Compare commits

..

12 commits
build ... main

Author SHA1 Message Date
Frank Villaro-Dixon 6c5f2d05ca clippy
Signed-off-by: Frank Villaro-Dixon <frank@villaro-dixon.eu>
2024-04-21 15:18:24 +02:00
Frank Villaro-Dixon c00c8bdebc fmt
Signed-off-by: Frank Villaro-Dixon <frank@villaro-dixon.eu>
2024-04-21 15:17:51 +02:00
Frank Villaro-Dixon f976e5578a remove useless css
Signed-off-by: Frank Villaro-Dixon <frank@villaro-dixon.eu>
2024-04-21 14:11:55 +02:00
Frank Villaro-Dixon 032b735a41 improve doc
Signed-off-by: Frank Villaro-Dixon <frank@villaro-dixon.eu>
2024-04-21 14:11:37 +02:00
Frank Villaro-Dixon 8b85c31411 Add js endpoint
Signed-off-by: Frank Villaro-Dixon <frank@villaro-dixon.eu>
2024-04-21 14:05:46 +02:00
Frank Villaro-Dixon cd4ccee05c website: make moar better
Signed-off-by: Frank Villaro-Dixon <frank@villaro-dixon.eu>
2024-04-19 13:29:10 +02:00
Frank Villaro-Dixon ffc00dd2e6 Dockerfile: fix add website
Signed-off-by: Frank Villaro-Dixon <frank@villaro-dixon.eu>
2024-04-19 13:29:01 +02:00
Frank Villaro-Dixon 61259dc4fb API: handle multi pts
Signed-off-by: Frank Villaro-Dixon <frank@villaro-dixon.eu>
2024-04-19 13:28:40 +02:00
Frank Villaro-Dixon ce3610bf45 assets
Signed-off-by: Frank Villaro-Dixon <frank@villaro-dixon.eu>
2024-04-16 00:32:29 +02:00
Frank Villaro-Dixon 1c448a240d smore shit
Signed-off-by: Frank Villaro-Dixon <frank@villaro-dixon.eu>
2024-04-16 00:22:06 +02:00
Frank Villaro-Dixon e4657c87e6 add some dumb logs
Signed-off-by: Frank Villaro-Dixon <frank@villaro-dixon.eu>
2024-04-15 23:44:57 +02:00
Frank Villaro-Dixon f34e8ba309 refactor cache
Signed-off-by: Frank Villaro-Dixon <frank@villaro-dixon.eu>
2024-04-15 23:23:18 +02:00
8 changed files with 455 additions and 94 deletions

View file

@ -1,30 +0,0 @@
#
# .gitea/gitea-ci.yaml
#
name: Build
run-name: ${{ gitea.actor }} is runs ci pipeline
on: [ push ]
jobs:
build:
runs-on: ubuntu-latest
needs: build
if: gitea.ref == 'refs/heads/build' #XXX
steps:
- uses: https://github.com/actions/checkout@v4
- name: Set up Docker Buildx
uses: https://github.com/docker/setup-buildx-action@v3
#uses: https://github.com/Frankkkkk/setup-buildx-action@master
with:
config-inline: |
[registry."<my-private-unsecure-git-repository-ip-address>:5000"]
http = true
insecure = true
- name: Build and push Docker image
uses: https://github.com/docker/build-push-action@v5
with:
context: .
file: ./Dockerfile
push: true
tags: "<my-private-unsecure-git-repository-ip-address>:5000/<my-docker-image>:${{gitea.sha}},<my-private-unsecure-git-repository-ip-address>:5000/<my-docker-image>:latest"

View file

@ -1,9 +0,0 @@
on:
push: '*'
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Show env
run: env

113
Cargo.lock generated
View file

@ -36,7 +36,10 @@ dependencies = [
"moka", "moka",
"serde", "serde",
"serde_json", "serde_json",
"serde_qs",
"tokio", "tokio",
"tower",
"tower-http",
"tracing", "tracing",
"tracing-subscriber", "tracing-subscriber",
] ]
@ -359,6 +362,21 @@ dependencies = [
"percent-encoding", "percent-encoding",
] ]
[[package]]
name = "futures"
version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0"
dependencies = [
"futures-channel",
"futures-core",
"futures-executor",
"futures-io",
"futures-sink",
"futures-task",
"futures-util",
]
[[package]] [[package]]
name = "futures-channel" name = "futures-channel"
version = "0.3.30" version = "0.3.30"
@ -366,6 +384,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78"
dependencies = [ dependencies = [
"futures-core", "futures-core",
"futures-sink",
] ]
[[package]] [[package]]
@ -374,6 +393,23 @@ version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d"
[[package]]
name = "futures-executor"
version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d"
dependencies = [
"futures-core",
"futures-task",
"futures-util",
]
[[package]]
name = "futures-io"
version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1"
[[package]] [[package]]
name = "futures-macro" name = "futures-macro"
version = "0.3.30" version = "0.3.30"
@ -385,6 +421,12 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "futures-sink"
version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5"
[[package]] [[package]]
name = "futures-task" name = "futures-task"
version = "0.3.30" version = "0.3.30"
@ -397,9 +439,13 @@ version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48"
dependencies = [ dependencies = [
"futures-channel",
"futures-core", "futures-core",
"futures-io",
"futures-macro", "futures-macro",
"futures-sink",
"futures-task", "futures-task",
"memchr",
"pin-project-lite", "pin-project-lite",
"pin-utils", "pin-utils",
"slab", "slab",
@ -522,6 +568,12 @@ dependencies = [
"pin-project-lite", "pin-project-lite",
] ]
[[package]]
name = "http-range-header"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ce4ef31cda248bbdb6e6820603b82dfcd9e833db65a43e997a0ccec777d11fe"
[[package]] [[package]]
name = "httparse" name = "httparse"
version = "1.8.0" version = "1.8.0"
@ -658,6 +710,16 @@ version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
[[package]]
name = "mime_guess"
version = "2.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef"
dependencies = [
"mime",
"unicase",
]
[[package]] [[package]]
name = "minimal-lexical" name = "minimal-lexical"
version = "0.2.1" version = "0.2.1"
@ -1054,6 +1116,19 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "serde_qs"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd34f36fe4c5ba9654417139a9b3a20d2e1de6012ee678ad14d240c22c78d8d6"
dependencies = [
"axum",
"futures",
"percent-encoding",
"serde",
"thiserror",
]
[[package]] [[package]]
name = "serde_urlencoded" name = "serde_urlencoded"
version = "0.7.1" version = "0.7.1"
@ -1231,6 +1306,19 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "tokio-util"
version = "0.7.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15"
dependencies = [
"bytes",
"futures-core",
"futures-sink",
"pin-project-lite",
"tokio",
]
[[package]] [[package]]
name = "tower" name = "tower"
version = "0.4.13" version = "0.4.13"
@ -1247,6 +1335,31 @@ dependencies = [
"tracing", "tracing",
] ]
[[package]]
name = "tower-http"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e9cd434a998747dd2c4276bc96ee2e0c7a2eadf3cae88e52be55a05fa9053f5"
dependencies = [
"bitflags 2.5.0",
"bytes",
"futures-util",
"http",
"http-body",
"http-body-util",
"http-range-header",
"httpdate",
"mime",
"mime_guess",
"percent-encoding",
"pin-project-lite",
"tokio",
"tokio-util",
"tower-layer",
"tower-service",
"tracing",
]
[[package]] [[package]]
name = "tower-layer" name = "tower-layer"
version = "0.3.2" version = "0.3.2"

View file

@ -13,6 +13,9 @@ gdal = { version = "0.16.0", features = ["bindgen"] }
moka = { version = "0.12.5", features = ["future"] } moka = { version = "0.12.5", features = ["future"] }
serde = { version = "1.0.197", features = ["derive"] } serde = { version = "1.0.197", features = ["derive"] }
serde_json = "1.0.115" serde_json = "1.0.115"
serde_qs = { version = "0.13.0", features = ["axum"] }
tokio = { version = "1.37.0", features = ["full"] } tokio = { version = "1.37.0", features = ["full"] }
tower = "0.4.13"
tower-http = { version = "0.5.2", features = ["fs", "trace"]}
tracing = "0.1.40" tracing = "0.1.40"
tracing-subscriber = "0.3.18" tracing-subscriber = "0.3.18"

View file

@ -17,7 +17,8 @@ RUN cargo build --release --bin api-server
# We do not need the Rust toolchain to run the binary! # We do not need the Rust toolchain to run the binary!
FROM debian:bookworm-slim AS runtime FROM debian:bookworm-slim AS runtime
RUN apt update && apt install -y libgdal-dev RUN apt update && apt install -y gdal-bin
WORKDIR /app WORKDIR /app
COPY --from=builder /app/target/release/api-server /usr/local/bin COPY --from=builder /app/target/release/api-server .
ENTRYPOINT ["/usr/local/bin/api-server"] COPY assets assets
ENTRYPOINT ["/app/api-server"]

105
assets/index.html Normal file
View file

@ -0,0 +1,105 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Free Elevation API Service</title>
<style>
body {
font-family: Arial, sans-serif;
margin: 0;
padding: 0;
background: #f4f4f4;
color: #454545;
}
header {
background: #007BFF;
color: #ffffff;
padding: 10px 20px;
text-align: center;
}
section {
padding: 20px;
margin: 0 10px;
background: #ffffff;
border-radius: 8px;
box-shadow: 0 2px 5px rgba(0, 0, 0, 0.1);
}
footer {
text-align: center;
padding: 10px 0;
background: #007BFF;
color: white;
/*position: fixed;*/
bottom: 0;
width: 100%;
}
code {
background: #eee;
padding: 2px 5px;
border-radius: 3px;
}
a {
color: #07a;
}
</style>
</head>
<body>
<header>
<h1>Free Elevation API Service</h1>
<p>Get the elevation of every point on Earth.</p>
</header>
<section>
<h2>About This Service</h2>
<p>This API provides you with access to elevation data across Earth's globe, absolutely free of charge.</p>
</section>
<section>
<h2>API Usage</h2>
<p>To use the API, make a GET request to this endpoint with the required parameters. Here is an example request:
</p>
<code>https://www.elevation-api.eu/v1/elevation/:lat/:lon[?json]</code>
<h3>Parameters:</h3>
<ul>
<li><strong>lat</strong> - Latitude of the location, WGS-84</li>
<li><strong>lon</strong> - Longitude of the location, WGS-84</li>
<li><strong>?json</strong> - Append this to have the result retuned as JSON</li>
</ul>
<h3>Response:</h3>
<p>The API will return the elevation in meters. For example:</p>
<code>42</code>
<p>If the point is not into the DEM dataset, the response code will be 501 (or <code>Null</code> in JSON mode)
</p>
<h3>Try it out:</h3>
<a href="http://www.elevation-api.eu/v1/elevation/46.24566/6.17081">Get the elevation of the Lake Leman</a>
</section>
<section>
<h2>JSON API endpoint (multi coordinates)</h2>
You can also request many coordinates at once by using the following endpoint:
<code>https://www.elevation-api.eu/v1/elevation?pts=[[lat1,lon1],[lat2,lon2],[lat3,lon3]]</code>
<h4>Example</h4>
<code><a href="https://www.elevation-api.eu/v1/elevation?pts=[[46.24566,6.17081],[46.85499,6.78134]]">
https://www.elevation-api.eu/v1/elevation?pts=[[46.24566,6.17081],[46.85499,6.78134]]
</a></code>
</section>
<section>
<h2>Misc</h2>
<h3>Data source</h3>
<p>The digital elevation data comes from ESA's Copernicus program.</p>
<h3>Rate limits</h3>
<p>Don't be an idiot. Less than 10/s is okay</p>
<h3>Contact</h3>
<p>For more information, you can reach me at f@vi-di.fr</p>
</section>
<footer>
<p>© 2024 Frank Villaro-Dixon. No rights reserved</p>
</footer>
</body>
</html>

View file

@ -1,52 +1,92 @@
use std::sync::Arc; use std::sync::Arc;
use std::env;
use gdal::errors::GdalError; use gdal::errors::GdalError;
use gdal::Dataset; use gdal::Dataset;
use tracing::{info, debug_span, debug}; use tracing::{debug, debug_span, error, info};
use moka::future::Cache; use moka::future::Cache;
pub struct MyDataset{pub ds: Dataset} pub struct MyDataset {
unsafe impl Send for MyDataset{} pub ds: Dataset,
unsafe impl Sync for MyDataset{} }
unsafe impl Send for MyDataset {}
unsafe impl Sync for MyDataset {}
pub type DSC = Cache<String, Arc<MyDataset>>; pub struct DatasetRepository {
pub fn new_cache(max_elems: u64) -> DSC { cache: Cache<String, Arc<MyDataset>>,
Cache::builder() basedir: String,
// Up to 10,000 entries.
.max_capacity(max_elems)
// Create the cache.
.build()
} }
pub async fn elevation_from_coordinates(dsc: DSC, lat: f64, lon: f64) -> f64 { unsafe impl Send for DatasetRepository {}
unsafe impl Sync for DatasetRepository {}
impl DatasetRepository {
pub fn new(basedir: String) -> Self {
let c = Cache::builder()
.max_capacity(100)
// Create the cache.
.build();
DatasetRepository { cache: c, basedir }
}
async fn get(&self, filename: String) -> Option<Arc<MyDataset>> {
let full_filename = format!("{}/{filename}", self.basedir);
if !self.cache.contains_key(&full_filename) {
info!("Will open {full_filename} because not in cache!");
let ds = Dataset::open(full_filename.clone());
match ds {
Err(x) => {
error!("File not present");
return None;
}
Ok(ds) => {
let mds = Arc::new(MyDataset { ds });
self.cache.insert(full_filename.clone(), mds).await;
}
}
}
self.cache.get(&full_filename).await
}
}
impl Clone for DatasetRepository {
fn clone(&self) -> Self {
Self {
basedir: self.basedir.clone(),
cache: self.cache.clone(),
}
}
}
pub async fn elevation_from_coordinates(
dr: &DatasetRepository,
lat: f64,
lon: f64,
) -> Result<Option<f64>, GdalError> {
let span = debug_span!("req", lat=%lat, lon=%lon); let span = debug_span!("req", lat=%lat, lon=%lon);
let _guard = span.enter(); let _guard = span.enter();
let filename = get_filename_from_latlon(lat, lon); let filename = get_filename_from_latlon(lat, lon);
debug!(filename, "filename"); debug!(filename, "filename");
let ds = &match dr.get(filename).await {
if !dsc.contains_key(&filename) { Some(x) => x,
info!("Will open {filename} because not in cache!"); None => return Ok(None),
let ds = Arc::new(MyDataset{ds:Dataset::open(filename.clone()).unwrap()});
dsc.insert(filename.clone(), ds).await;
} }
.ds;
let ds = &dsc.get(&filename).await.unwrap().ds; let (px, py) = geo_to_pixel(ds, lat, lon)?;
let (px, py) = geo_to_pixel(ds, lat, lon).unwrap();
let raster_band = ds.rasterband(1).unwrap(); let raster_band = ds.rasterband(1)?;
let raster_value = raster_band.read_as::<f64>((px, py), (1, 1), (1, 1), None).unwrap(); let raster_value = raster_band.read_as::<f64>((px, py), (1, 1), (1, 1), None)?;
raster_value.data[0] Ok(Some(raster_value.data[0]))
} }
fn get_filename_from_latlon(lat: f64, lon: f64) -> String { fn get_filename_from_latlon(lat: f64, lon: f64) -> String {
// Calculate the rounded values for latitude and longitude // Calculate the rounded values for latitude and longitude
let rounded_lat = lat.floor(); let rounded_lat = lat.floor();
let rounded_lon = lon.floor(); let rounded_lon = lon.floor();
@ -58,16 +98,13 @@ fn get_filename_from_latlon(lat: f64, lon: f64) -> String {
let lat_prefix = if rounded_lat >= 0.0 { "N" } else { "S" }; let lat_prefix = if rounded_lat >= 0.0 { "N" } else { "S" };
let lon_prefix = if rounded_lon >= 0.0 { "E" } else { "W" }; let lon_prefix = if rounded_lon >= 0.0 { "E" } else { "W" };
let filename_prefix = env::var("DEM_LOCATION").unwrap_or("/data".to_string());
// Construct the filename // Construct the filename
let filename = format!( let filename = format!(
"{filename_prefix}/Copernicus_DSM_30_{}{}_00_{}{}_00_DEM.tif", "Copernicus_DSM_30_{}{}_00_{}{}_00_DEM.tif",
lat_prefix, lat_deg, lon_prefix, lon_deg lat_prefix, lat_deg, lon_prefix, lon_deg
); );
filename filename
} }
fn geo_to_pixel(dataset: &Dataset, lat: f64, lon: f64) -> Result<(isize, isize), GdalError> { fn geo_to_pixel(dataset: &Dataset, lat: f64, lon: f64) -> Result<(isize, isize), GdalError> {

View file

@ -1,47 +1,188 @@
mod dem; mod dem;
use std::env;
use axum_macros::debug_handler;
use axum::{ use axum::{
extract::{Path, State}, routing::{get}, Router extract::{Extension, Path, Query, State},
http::StatusCode,
response::{IntoResponse, Response},
routing::get,
Json, Router,
}; };
use dem::{DSC, new_cache}; use axum_macros::debug_handler;
use std::env;
use tracing::info; use serde::ser::{SerializeSeq, Serializer};
use serde::{Deserialize, Deserializer, Serialize};
use serde_qs::axum::{QsQueryConfig, QsQueryRejection};
use tower_http::trace::TraceLayer;
use tower_http::{
services::ServeDir,
trace::{self},
};
use tracing::{error, info, Level};
use dem::DatasetRepository;
const DEFAULT_DATA_DIR: &str = "/data";
const DEFAULT_PORT: &str = "3000";
#[tokio::main(flavor = "current_thread")] #[derive(Deserialize, Debug)]
struct Opts {
#[serde(default, deserialize_with = "empty_string_as_none")]
json: bool,
}
#[derive(Deserialize, Debug)]
struct JsParams {
#[serde(default, deserialize_with = "deserialize_array")]
pts: Vec<(f64, f64)>,
}
#[derive(Serialize, Debug)]
struct JsResult {
#[serde(serialize_with = "serialize_vec_round")]
elevations: Vec<Option<f64>>,
}
fn serialize_vec_round<S>(v: &Vec<Option<f64>>, s: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let mut sv = s.serialize_seq(Some(v.len()))?;
for e in v {
match e {
None => sv.serialize_element(&e)?,
Some(x) => {
// Round the f64 to 1 decimal place. This is ugly as shit.
let fmt = format!("{:.1}", x);
let xx = fmt.parse::<f64>().unwrap();
sv.serialize_element(&xx)?;
}
};
}
sv.end()
}
fn deserialize_array<'de, D>(deserializer: D) -> Result<Vec<(f64, f64)>, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let result: Result<Vec<(f64, f64)>, serde_json::Error> = serde_json::from_str(&s);
match result {
Ok(x) => Ok(x),
Err(e) => Err(serde::de::Error::custom(
"Invalid array: ".to_string() + &e.to_string(),
)),
}
}
fn empty_string_as_none<'de, D>(de: D) -> Result<bool, D::Error>
where
D: Deserializer<'de>,
{
let opt = Option::<String>::deserialize(de)?;
match opt.as_deref() {
None => Ok(false),
Some("") => Ok(true),
Some(x) => Ok(x != "false"),
}
}
#[tokio::main]
async fn main() { async fn main() {
// initialize tracing // initialize tracing
tracing_subscriber::fmt::init(); tracing_subscriber::fmt::init();
let config = load_config().unwrap();
let cache = DatasetRepository::new(config.basedir);
let cache = new_cache(1000); let serve_dir = ServeDir::new("assets");
// build our application with a route let app = Router::<DatasetRepository>::new()
let app = Router::new() .route("/v1/elevation/:lat/:lon", get(get_elevation))
.route("/elevation/:lat/:lon", get(get_elevation)) .route("/v1/elevation", get(get_elevation_js))
.with_state(cache); .nest_service("/", serve_dir)
.with_state(cache)
.layer(
TraceLayer::new_for_http()
.make_span_with(trace::DefaultMakeSpan::new().level(Level::DEBUG))
.on_response(trace::DefaultOnResponse::new().level(Level::DEBUG)),
)
.layer(Extension(QsQueryConfig::new(5, false).error_handler(
|err| {
QsQueryRejection::new(
format!("Get fucked: {}", err),
StatusCode::UNPROCESSABLE_ENTITY,
)
},
)));
let host = format!("[::]:{}", config.port);
let port = env::var("HTTP_PORT").unwrap_or("3000".to_string());
let host = format!("[::]:{port}");
info!("Will start server on {host}"); info!("Will start server on {host}");
let listener = tokio::net::TcpListener::bind(host).await.unwrap(); let listener = tokio::net::TcpListener::bind(host).await.unwrap();
axum::serve(listener, app).await.unwrap(); axum::serve(listener, app).await.unwrap();
} }
#[debug_handler] #[debug_handler]
async fn get_elevation(State(shared): State<DSC>, Path((lat, lon)): Path<(f64, f64)>) -> String{ async fn get_elevation(
State(dsr): State<DatasetRepository>,
query_opts: Query<Opts>,
Path((lat, lon)): Path<(f64, f64)>,
) -> Response {
println!("lat: {}, lon: {}", lat, lon);
println!("query_opts: {:?}", query_opts);
let ele = dem::elevation_from_coordinates(shared, lat, lon); let ele;
let myele = ele.await; match dem::elevation_from_coordinates(&dsr, lat, lon).await {
format!("{myele}") Ok(x) => match x {
Some(el) => ele = el,
None => ele = 0.,
},
Err(e) => {
return e.to_string().into_response();
}
}
#[derive(Serialize)]
struct Ele {
elevation: f64,
};
if query_opts.json {
let r = Ele { elevation: ele };
Json(r).into_response()
} else {
format!("{}", ele).into_response()
}
} }
#[debug_handler]
async fn get_elevation_js(
State(dsr): State<DatasetRepository>,
Query(params): Query<JsParams>,
) -> Response {
let mut response = JsResult { elevations: vec![] };
for pt in params.pts {
let ele = dem::elevation_from_coordinates(&dsr, pt.0, pt.1).await;
match ele {
Ok(x) => response.elevations.push(x),
Err(e) => {
error!("Error: {e}");
response.elevations.push(None);
}
}
}
Json(response).into_response()
}
fn load_config() -> Result<Config, env::VarError> {
Ok(Config {
basedir: env::var("DEM_LOCATION").unwrap_or_else(|_| DEFAULT_DATA_DIR.to_string()),
port: env::var("HTTP_PORT").unwrap_or_else(|_| DEFAULT_PORT.to_string()),
})
}
struct Config {
basedir: String,
port: String,
}