Compare commits
7 commits
Author | SHA1 | Date | |
---|---|---|---|
1cbc72ea34 | |||
d13a05751f | |||
ed95a7f84a | |||
47239f51bc | |||
76d29851e7 | |||
53705cc4f8 | |||
c97663d0c3 |
9 changed files with 96 additions and 488 deletions
30
.forgejo/workflows/build-docker.yml
Normal file
30
.forgejo/workflows/build-docker.yml
Normal file
|
@ -0,0 +1,30 @@
|
|||
#
|
||||
# .gitea/gitea-ci.yaml
|
||||
#
|
||||
|
||||
name: Build
|
||||
run-name: ${{ gitea.actor }} is runs ci pipeline
|
||||
on: [ push ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
if: gitea.ref == 'refs/heads/build' #XXX
|
||||
steps:
|
||||
- uses: https://github.com/actions/checkout@v4
|
||||
- name: Set up Docker Buildx
|
||||
uses: https://github.com/docker/setup-buildx-action@v3
|
||||
#uses: https://github.com/Frankkkkk/setup-buildx-action@master
|
||||
with:
|
||||
config-inline: |
|
||||
[registry."<my-private-unsecure-git-repository-ip-address>:5000"]
|
||||
http = true
|
||||
insecure = true
|
||||
- name: Build and push Docker image
|
||||
uses: https://github.com/docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
tags: "<my-private-unsecure-git-repository-ip-address>:5000/<my-docker-image>:${{gitea.sha}},<my-private-unsecure-git-repository-ip-address>:5000/<my-docker-image>:latest"
|
|
@ -1,31 +0,0 @@
|
|||
name: ci
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "cicd"
|
||||
- main
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: forge.k3s.fr
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: forge.k3s.fr/elevation-api-eu/server:latest
|
||||
|
9
.forgejo/workflows/test.yaml
Normal file
9
.forgejo/workflows/test.yaml
Normal file
|
@ -0,0 +1,9 @@
|
|||
on:
|
||||
push: '*'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Show env
|
||||
run: env
|
113
Cargo.lock
generated
113
Cargo.lock
generated
|
@ -36,10 +36,7 @@ dependencies = [
|
|||
"moka",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_qs",
|
||||
"tokio",
|
||||
"tower",
|
||||
"tower-http",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
]
|
||||
|
@ -362,21 +359,6 @@ dependencies = [
|
|||
"percent-encoding",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "futures"
|
||||
version = "0.3.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0"
|
||||
dependencies = [
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
"futures-executor",
|
||||
"futures-io",
|
||||
"futures-sink",
|
||||
"futures-task",
|
||||
"futures-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "futures-channel"
|
||||
version = "0.3.30"
|
||||
|
@ -384,7 +366,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"futures-sink",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -393,23 +374,6 @@ version = "0.3.30"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d"
|
||||
|
||||
[[package]]
|
||||
name = "futures-executor"
|
||||
version = "0.3.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"futures-task",
|
||||
"futures-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "futures-io"
|
||||
version = "0.3.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1"
|
||||
|
||||
[[package]]
|
||||
name = "futures-macro"
|
||||
version = "0.3.30"
|
||||
|
@ -421,12 +385,6 @@ dependencies = [
|
|||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "futures-sink"
|
||||
version = "0.3.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5"
|
||||
|
||||
[[package]]
|
||||
name = "futures-task"
|
||||
version = "0.3.30"
|
||||
|
@ -439,13 +397,9 @@ version = "0.3.30"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48"
|
||||
dependencies = [
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
"futures-io",
|
||||
"futures-macro",
|
||||
"futures-sink",
|
||||
"futures-task",
|
||||
"memchr",
|
||||
"pin-project-lite",
|
||||
"pin-utils",
|
||||
"slab",
|
||||
|
@ -568,12 +522,6 @@ dependencies = [
|
|||
"pin-project-lite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "http-range-header"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3ce4ef31cda248bbdb6e6820603b82dfcd9e833db65a43e997a0ccec777d11fe"
|
||||
|
||||
[[package]]
|
||||
name = "httparse"
|
||||
version = "1.8.0"
|
||||
|
@ -710,16 +658,6 @@ version = "0.3.17"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
|
||||
|
||||
[[package]]
|
||||
name = "mime_guess"
|
||||
version = "2.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef"
|
||||
dependencies = [
|
||||
"mime",
|
||||
"unicase",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "minimal-lexical"
|
||||
version = "0.2.1"
|
||||
|
@ -1116,19 +1054,6 @@ dependencies = [
|
|||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_qs"
|
||||
version = "0.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cd34f36fe4c5ba9654417139a9b3a20d2e1de6012ee678ad14d240c22c78d8d6"
|
||||
dependencies = [
|
||||
"axum",
|
||||
"futures",
|
||||
"percent-encoding",
|
||||
"serde",
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_urlencoded"
|
||||
version = "0.7.1"
|
||||
|
@ -1306,19 +1231,6 @@ dependencies = [
|
|||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-util"
|
||||
version = "0.7.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"futures-core",
|
||||
"futures-sink",
|
||||
"pin-project-lite",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tower"
|
||||
version = "0.4.13"
|
||||
|
@ -1335,31 +1247,6 @@ dependencies = [
|
|||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tower-http"
|
||||
version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e9cd434a998747dd2c4276bc96ee2e0c7a2eadf3cae88e52be55a05fa9053f5"
|
||||
dependencies = [
|
||||
"bitflags 2.5.0",
|
||||
"bytes",
|
||||
"futures-util",
|
||||
"http",
|
||||
"http-body",
|
||||
"http-body-util",
|
||||
"http-range-header",
|
||||
"httpdate",
|
||||
"mime",
|
||||
"mime_guess",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tower-layer"
|
||||
version = "0.3.2"
|
||||
|
|
|
@ -13,9 +13,6 @@ gdal = { version = "0.16.0", features = ["bindgen"] }
|
|||
moka = { version = "0.12.5", features = ["future"] }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
serde_json = "1.0.115"
|
||||
serde_qs = { version = "0.13.0", features = ["axum"] }
|
||||
tokio = { version = "1.37.0", features = ["full"] }
|
||||
tower = "0.4.13"
|
||||
tower-http = { version = "0.5.2", features = ["fs", "trace"]}
|
||||
tracing = "0.1.40"
|
||||
tracing-subscriber = "0.3.18"
|
||||
|
|
10
Dockerfile
10
Dockerfile
|
@ -2,8 +2,7 @@ FROM docker.io/lukemathwalker/cargo-chef:latest-rust-1 AS chef
|
|||
WORKDIR /app
|
||||
|
||||
FROM chef AS planner
|
||||
COPY Cargo.* .
|
||||
ADD src src
|
||||
COPY . .
|
||||
RUN cargo chef prepare --recipe-path recipe.json
|
||||
|
||||
FROM chef AS builder
|
||||
|
@ -18,8 +17,7 @@ RUN cargo build --release --bin api-server
|
|||
|
||||
# We do not need the Rust toolchain to run the binary!
|
||||
FROM debian:bookworm-slim AS runtime
|
||||
RUN apt update && apt install -y gdal-bin
|
||||
RUN apt update && apt install -y libgdal-dev
|
||||
WORKDIR /app
|
||||
COPY --from=builder /app/target/release/api-server .
|
||||
COPY assets assets
|
||||
ENTRYPOINT ["/app/api-server"]
|
||||
COPY --from=builder /app/target/release/api-server /usr/local/bin
|
||||
ENTRYPOINT ["/usr/local/bin/api-server"]
|
|
@ -1,107 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Free Elevation API Service</title>
|
||||
<style>
|
||||
body {
|
||||
font-family: Arial, sans-serif;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
background: #f4f4f4;
|
||||
color: #454545;
|
||||
}
|
||||
|
||||
header {
|
||||
background: #1CD4FF;
|
||||
color: #ffffff;
|
||||
padding: 10px 20px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
section {
|
||||
padding: 20px;
|
||||
margin: 0 10px;
|
||||
background: #ffffff;
|
||||
border-radius: 8px;
|
||||
box-shadow: 0 2px 5px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
footer {
|
||||
text-align: center;
|
||||
padding: 10px 0;
|
||||
background: #1CD4FF;
|
||||
color: white;
|
||||
/*position: fixed;*/
|
||||
bottom: 0;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
code {
|
||||
background: #eee;
|
||||
padding: 2px 5px;
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
a {
|
||||
color: #07a;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<header>
|
||||
<h1>Free Elevation API Service</h1>
|
||||
<p>Get the elevation of every point on Earth.</p>
|
||||
</header>
|
||||
<section>
|
||||
<h2>About This Service</h2>
|
||||
<p>This API provides you with access to elevation data across Earth's globe, absolutely free of charge.</p>
|
||||
</section>
|
||||
<section>
|
||||
<h2>API Usage</h2>
|
||||
<p>To use the API, make a GET request to this endpoint with the required parameters. Here is an example request:
|
||||
</p>
|
||||
<code>https://www.elevation-api.eu/v1/elevation/:lat/:lon[?json]</code>
|
||||
<h3>Parameters:</h3>
|
||||
<ul>
|
||||
<li><strong>lat</strong> - Latitude of the location, WGS-84</li>
|
||||
<li><strong>lon</strong> - Longitude of the location, WGS-84</li>
|
||||
<li><strong>?json</strong> - Append this to have the result retuned as JSON</li>
|
||||
</ul>
|
||||
<h3>Response:</h3>
|
||||
<p>The API will return the elevation in meters. For example:</p>
|
||||
<code>42</code>
|
||||
<p>If the point is not into the DEM dataset, the response code will be 501 (or <code>Null</code> in JSON mode)
|
||||
</p>
|
||||
<h3>Try it out:</h3>
|
||||
<a href="http://www.elevation-api.eu/v1/elevation/46.24566/6.17081">Get the elevation of the Lake Leman</a>
|
||||
</section>
|
||||
<section>
|
||||
<h2>JSON API endpoint (multi coordinates)</h2>
|
||||
You can also request many coordinates at once by using the following endpoint:
|
||||
<code>https://www.elevation-api.eu/v1/elevation?pts=[[lat1,lon1],[lat2,lon2],[lat3,lon3]]</code>
|
||||
<h4>Example</h4>
|
||||
<code><a href="https://www.elevation-api.eu/v1/elevation?pts=[[46.24566,6.17081],[46.85499,6.78134]]">
|
||||
https://www.elevation-api.eu/v1/elevation?pts=[[46.24566,6.17081],[46.85499,6.78134]]
|
||||
</a></code>
|
||||
</section>
|
||||
<section>
|
||||
<h2>Misc</h2>
|
||||
<h3>Data source</h3>
|
||||
<p>The digital elevation data comes from ESA's Copernicus program.</p>
|
||||
<h3>Rate limits</h3>
|
||||
<p>Don't be an idiot. Less than 10/s is okay</p>
|
||||
<h3>Source code</h3>
|
||||
<p><a href="https://forge.k3s.fr/elevation-api-eu/api-server">https://forge.k3s.fr/elevation-api-eu/api-server</a></p>
|
||||
<h3>Contact</h3>
|
||||
<p>For more information, you can reach me at f@vi-di.fr. Patches welcome!</p>
|
||||
</section>
|
||||
<footer>
|
||||
<p>© 2024 <a href="https://frank.villaro-dixon.eu">Frank Villaro-Dixon</a>. No rights reserved. Made with love in 🇨🇭 and 🇫🇷</p>
|
||||
</footer>
|
||||
</body>
|
||||
|
||||
</html>
|
97
src/dem.rs
97
src/dem.rs
|
@ -1,92 +1,52 @@
|
|||
use std::sync::Arc;
|
||||
use std::env;
|
||||
|
||||
use gdal::errors::GdalError;
|
||||
use gdal::Dataset;
|
||||
|
||||
use tracing::{debug, debug_span, error, info};
|
||||
use tracing::{info, debug_span, debug};
|
||||
|
||||
use moka::future::Cache;
|
||||
|
||||
pub struct MyDataset {
|
||||
pub ds: Dataset,
|
||||
}
|
||||
unsafe impl Send for MyDataset {}
|
||||
unsafe impl Sync for MyDataset {}
|
||||
pub struct MyDataset{pub ds: Dataset}
|
||||
unsafe impl Send for MyDataset{}
|
||||
unsafe impl Sync for MyDataset{}
|
||||
|
||||
pub struct DatasetRepository {
|
||||
cache: Cache<String, Arc<MyDataset>>,
|
||||
basedir: String,
|
||||
pub type DSC = Cache<String, Arc<MyDataset>>;
|
||||
pub fn new_cache(max_elems: u64) -> DSC {
|
||||
Cache::builder()
|
||||
// Up to 10,000 entries.
|
||||
.max_capacity(max_elems)
|
||||
// Create the cache.
|
||||
.build()
|
||||
}
|
||||
|
||||
unsafe impl Send for DatasetRepository {}
|
||||
unsafe impl Sync for DatasetRepository {}
|
||||
impl DatasetRepository {
|
||||
pub fn new(basedir: String) -> Self {
|
||||
let c = Cache::builder()
|
||||
.max_capacity(100)
|
||||
// Create the cache.
|
||||
.build();
|
||||
|
||||
DatasetRepository { cache: c, basedir }
|
||||
}
|
||||
|
||||
async fn get(&self, filename: String) -> Option<Arc<MyDataset>> {
|
||||
let full_filename = format!("{}/{filename}", self.basedir);
|
||||
|
||||
if !self.cache.contains_key(&full_filename) {
|
||||
info!("Will open {full_filename} because not in cache!");
|
||||
let ds = Dataset::open(full_filename.clone());
|
||||
match ds {
|
||||
Err(_) => {
|
||||
error!("File not present");
|
||||
return None;
|
||||
}
|
||||
Ok(ds) => {
|
||||
let mds = Arc::new(MyDataset { ds });
|
||||
self.cache.insert(full_filename.clone(), mds).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.cache.get(&full_filename).await
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for DatasetRepository {
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
basedir: self.basedir.clone(),
|
||||
cache: self.cache.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn elevation_from_coordinates(
|
||||
dr: &DatasetRepository,
|
||||
lat: f64,
|
||||
lon: f64,
|
||||
) -> Result<Option<f64>, GdalError> {
|
||||
pub async fn elevation_from_coordinates(dsc: DSC, lat: f64, lon: f64) -> f64 {
|
||||
let span = debug_span!("req", lat=%lat, lon=%lon);
|
||||
let _guard = span.enter();
|
||||
|
||||
let filename = get_filename_from_latlon(lat, lon);
|
||||
debug!(filename, "filename");
|
||||
|
||||
let ds = &match dr.get(filename).await {
|
||||
Some(x) => x,
|
||||
None => return Ok(None),
|
||||
|
||||
if !dsc.contains_key(&filename) {
|
||||
info!("Will open {filename} because not in cache!");
|
||||
let ds = Arc::new(MyDataset{ds:Dataset::open(filename.clone()).unwrap()});
|
||||
dsc.insert(filename.clone(), ds).await;
|
||||
}
|
||||
.ds;
|
||||
|
||||
let (px, py) = geo_to_pixel(ds, lat, lon)?;
|
||||
let ds = &dsc.get(&filename).await.unwrap().ds;
|
||||
|
||||
let raster_band = ds.rasterband(1)?;
|
||||
let raster_value = raster_band.read_as::<f64>((px, py), (1, 1), (1, 1), None)?;
|
||||
Ok(Some(raster_value.data[0]))
|
||||
let (px, py) = geo_to_pixel(ds, lat, lon).unwrap();
|
||||
|
||||
let raster_band = ds.rasterband(1).unwrap();
|
||||
let raster_value = raster_band.read_as::<f64>((px, py), (1, 1), (1, 1), None).unwrap();
|
||||
raster_value.data[0]
|
||||
}
|
||||
|
||||
|
||||
fn get_filename_from_latlon(lat: f64, lon: f64) -> String {
|
||||
// Calculate the rounded values for latitude and longitude
|
||||
// Calculate the rounded values for latitude and longitude
|
||||
let rounded_lat = lat.floor();
|
||||
let rounded_lon = lon.floor();
|
||||
|
||||
|
@ -98,13 +58,16 @@ fn get_filename_from_latlon(lat: f64, lon: f64) -> String {
|
|||
let lat_prefix = if rounded_lat >= 0.0 { "N" } else { "S" };
|
||||
let lon_prefix = if rounded_lon >= 0.0 { "E" } else { "W" };
|
||||
|
||||
|
||||
let filename_prefix = env::var("DEM_LOCATION").unwrap_or("/data".to_string());
|
||||
// Construct the filename
|
||||
let filename = format!(
|
||||
"Copernicus_DSM_30_{}{}_00_{}{}_00_DEM.tif",
|
||||
"{filename_prefix}/Copernicus_DSM_30_{}{}_00_{}{}_00_DEM.tif",
|
||||
lat_prefix, lat_deg, lon_prefix, lon_deg
|
||||
);
|
||||
|
||||
filename
|
||||
|
||||
}
|
||||
|
||||
fn geo_to_pixel(dataset: &Dataset, lat: f64, lon: f64) -> Result<(isize, isize), GdalError> {
|
||||
|
|
182
src/main.rs
182
src/main.rs
|
@ -1,185 +1,47 @@
|
|||
|
||||
mod dem;
|
||||
|
||||
use axum::{
|
||||
extract::{Extension, Path, Query, State},
|
||||
http::StatusCode,
|
||||
response::{IntoResponse, Response},
|
||||
routing::get,
|
||||
Json, Router,
|
||||
};
|
||||
use axum_macros::debug_handler;
|
||||
|
||||
use std::env;
|
||||
|
||||
use serde::ser::SerializeSeq;
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
use serde_qs::axum::{QsQueryConfig, QsQueryRejection};
|
||||
use tower_http::trace::TraceLayer;
|
||||
use tower_http::{
|
||||
services::ServeDir,
|
||||
trace::{self},
|
||||
use axum_macros::debug_handler;
|
||||
use axum::{
|
||||
extract::{Path, State}, routing::{get}, Router
|
||||
};
|
||||
use tracing::{error, info, Level};
|
||||
use dem::{DSC, new_cache};
|
||||
|
||||
use dem::DatasetRepository;
|
||||
use tracing::info;
|
||||
|
||||
const DEFAULT_DATA_DIR: &str = "/data";
|
||||
const DEFAULT_PORT: &str = "3000";
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct Opts {
|
||||
#[serde(default, deserialize_with = "empty_string_as_none")]
|
||||
json: bool,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct JsParams {
|
||||
#[serde(default, deserialize_with = "deserialize_array")]
|
||||
pts: Vec<(f64, f64)>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug)]
|
||||
struct JsResult {
|
||||
#[serde(serialize_with = "serialize_vec_round")]
|
||||
elevations: Vec<Option<f64>>,
|
||||
}
|
||||
|
||||
fn serialize_vec_round<S>(v: &Vec<Option<f64>>, s: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
let mut sv = s.serialize_seq(Some(v.len()))?;
|
||||
for e in v {
|
||||
match e {
|
||||
None => sv.serialize_element(&e)?,
|
||||
Some(x) => {
|
||||
// Round the f64 to 1 decimal place. This is ugly as shit.
|
||||
let fmt = format!("{:.1}", x);
|
||||
let xx = fmt.parse::<f64>().unwrap();
|
||||
sv.serialize_element(&xx)?;
|
||||
}
|
||||
};
|
||||
}
|
||||
sv.end()
|
||||
}
|
||||
|
||||
fn deserialize_array<'de, D>(deserializer: D) -> Result<Vec<(f64, f64)>, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let s = String::deserialize(deserializer)?;
|
||||
let result: Result<Vec<(f64, f64)>, serde_json::Error> = serde_json::from_str(&s);
|
||||
match result {
|
||||
Ok(x) => Ok(x),
|
||||
Err(e) => Err(serde::de::Error::custom(
|
||||
"Invalid array: ".to_string() + &e.to_string(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
fn empty_string_as_none<'de, D>(de: D) -> Result<bool, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let opt = Option::<String>::deserialize(de)?;
|
||||
match opt.as_deref() {
|
||||
None => Ok(false),
|
||||
Some("") => Ok(true),
|
||||
Some(x) => Ok(x != "false"),
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
#[tokio::main(flavor = "current_thread")]
|
||||
async fn main() {
|
||||
// initialize tracing
|
||||
tracing_subscriber::fmt::init();
|
||||
|
||||
let config = load_config().unwrap();
|
||||
let cache = DatasetRepository::new(config.basedir);
|
||||
|
||||
let serve_dir = ServeDir::new("assets");
|
||||
let cache = new_cache(1000);
|
||||
|
||||
let app = Router::<DatasetRepository>::new()
|
||||
.route("/v1/elevation/:lat/:lon", get(get_elevation))
|
||||
.route("/v1/elevation", get(get_elevation_js))
|
||||
.nest_service("/", serve_dir)
|
||||
.with_state(cache)
|
||||
.layer(
|
||||
TraceLayer::new_for_http()
|
||||
.make_span_with(trace::DefaultMakeSpan::new().level(Level::INFO))
|
||||
.on_response(trace::DefaultOnResponse::new().level(Level::INFO)),
|
||||
)
|
||||
.layer(Extension(QsQueryConfig::new(5, false).error_handler(
|
||||
|err| {
|
||||
QsQueryRejection::new(
|
||||
format!("Get fucked: {}", err),
|
||||
StatusCode::UNPROCESSABLE_ENTITY,
|
||||
)
|
||||
},
|
||||
)));
|
||||
// build our application with a route
|
||||
let app = Router::new()
|
||||
.route("/elevation/:lat/:lon", get(get_elevation))
|
||||
.with_state(cache);
|
||||
|
||||
let host = format!("[::]:{}", config.port);
|
||||
|
||||
let port = env::var("HTTP_PORT").unwrap_or("3000".to_string());
|
||||
let host = format!("[::]:{port}");
|
||||
info!("Will start server on {host}");
|
||||
|
||||
|
||||
let listener = tokio::net::TcpListener::bind(host).await.unwrap();
|
||||
axum::serve(listener, app).await.unwrap();
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
async fn get_elevation(
|
||||
State(dsr): State<DatasetRepository>,
|
||||
query_opts: Query<Opts>,
|
||||
Path((lat, lon)): Path<(f64, f64)>,
|
||||
) -> Response {
|
||||
let ele;
|
||||
match dem::elevation_from_coordinates(&dsr, lat, lon).await {
|
||||
Ok(x) => match x {
|
||||
Some(el) => ele = el,
|
||||
None => ele = 0.,
|
||||
},
|
||||
Err(e) => {
|
||||
return e.to_string().into_response();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct Ele {
|
||||
elevation: f64,
|
||||
}
|
||||
|
||||
if query_opts.json {
|
||||
let r = Ele { elevation: ele };
|
||||
Json(r).into_response()
|
||||
} else {
|
||||
format!("{}", ele).into_response()
|
||||
}
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
async fn get_elevation_js(
|
||||
State(dsr): State<DatasetRepository>,
|
||||
Query(params): Query<JsParams>,
|
||||
) -> Response {
|
||||
let mut response = JsResult { elevations: vec![] };
|
||||
for pt in params.pts {
|
||||
let ele = dem::elevation_from_coordinates(&dsr, pt.0, pt.1).await;
|
||||
match ele {
|
||||
Ok(x) => response.elevations.push(x),
|
||||
Err(e) => {
|
||||
error!("Error: {e}");
|
||||
response.elevations.push(None);
|
||||
}
|
||||
}
|
||||
}
|
||||
Json(response).into_response()
|
||||
async fn get_elevation(State(shared): State<DSC>, Path((lat, lon)): Path<(f64, f64)>) -> String{
|
||||
|
||||
let ele = dem::elevation_from_coordinates(shared, lat, lon);
|
||||
let myele = ele.await;
|
||||
format!("{myele}")
|
||||
}
|
||||
|
||||
fn load_config() -> Result<Config, env::VarError> {
|
||||
Ok(Config {
|
||||
basedir: env::var("DEM_LOCATION").unwrap_or_else(|_| DEFAULT_DATA_DIR.to_string()),
|
||||
port: env::var("HTTP_PORT").unwrap_or_else(|_| DEFAULT_PORT.to_string()),
|
||||
})
|
||||
}
|
||||
struct Config {
|
||||
basedir: String,
|
||||
port: String,
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue