From bf4413e58dc23ded80879c49f54248fc5a7507ac Mon Sep 17 00:00:00 2001
From: Aliaksandr Valialkin <valyala@gmail.com>
Date: Mon, 23 Dec 2019 13:40:16 +0200
Subject: [PATCH] README.md: document how to export and import gzipped data

---
 README.md                             | 23 +++++++++++++++++++++--
 docs/Single-server-VictoriaMetrics.md | 23 +++++++++++++++++++++--
 2 files changed, 42 insertions(+), 4 deletions(-)

diff --git a/README.md b/README.md
index c8f372521c..729e0234e9 100644
--- a/README.md
+++ b/README.md
@@ -544,6 +544,15 @@ Each JSON line would contain data for a single time series. An example output:
 Optional `start` and `end` args may be added to the request in order to limit the time frame for the exported data. These args may contain either
 unix timestamp in seconds or [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) values.
 
+Pass `Accept-Encoding: gzip` HTTP header in the request to `/api/v1/export` in order to reduce network bandwidth during exporing big amounts
+of time series data. This enables gzip compression for the exported data. Example for exporting gzipped data:
+
+```
+curl -H 'Accept-Encoding: gzip' http://localhost:8428/api/v1/export -d 'match[]={__name__!=""}' > data.jsonl.gz
+```
+
+The maximum duration for each request to `/api/v1/export` is limited by `-search.maxExportDuration` command-line flag.
+
 Exported data can be imported via POST'ing it to [/api/v1/import](#how-to-import-time-series-data).
 
 
@@ -562,10 +571,20 @@ The most efficient protocol for importing data into VictoriaMetrics is `/api/v1/
 
 ```
 # Export the data from <source-victoriametrics>:
-curl -s 'http://source-victoriametrics:8428/api/v1/export' -d 'match={__name__!=""}' > exported_data.jsonl
+curl http://source-victoriametrics:8428/api/v1/export -d 'match={__name__!=""}' > exported_data.jsonl
 
 # Import the data to <destination-victoriametrics>:
-curl -X POST 'http://destination-victoriametrics:8428/api/v1/import' -T exported_data.jsonl
+curl -X POST http://destination-victoriametrics:8428/api/v1/import -T exported_data.jsonl
+```
+
+Pass `Content-Encoding: gzip` HTTP request header to `/api/v1/import` for importing gzipped data:
+
+```
+# Export gzipped data from <source-victoriametrics>:
+curl -H 'Accept-Encoding: gzip' http://source-victoriametrics:8428/api/v1/export -d 'match={__name__!=""}' > exported_data.jsonl.gz
+
+# Import gzipped data to <destination-victoriametrics>:
+curl -X POST -H 'Content-Encoding: gzip' http://destination-victoriametrics:8428/api/v1/import -T exported_data.jsonl.gz
 ```
 
 Each request to `/api/v1/import` can load up to a single vCPU core on VictoriaMetrics. Import speed can be improved by splitting the original file into smaller parts
diff --git a/docs/Single-server-VictoriaMetrics.md b/docs/Single-server-VictoriaMetrics.md
index b311e19511..b6cbf14106 100644
--- a/docs/Single-server-VictoriaMetrics.md
+++ b/docs/Single-server-VictoriaMetrics.md
@@ -534,6 +534,15 @@ Each JSON line would contain data for a single time series. An example output:
 Optional `start` and `end` args may be added to the request in order to limit the time frame for the exported data. These args may contain either
 unix timestamp in seconds or [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) values.
 
+Pass `Accept-Encoding: gzip` HTTP header in the request to `/api/v1/export` in order to reduce network bandwidth during exporing big amounts
+of time series data. This enables gzip compression for the exported data. Example for exporting gzipped data:
+
+```
+curl -H 'Accept-Encoding: gzip' http://localhost:8428/api/v1/export -d 'match[]={__name__!=""}' > data.jsonl.gz
+```
+
+The maximum duration for each request to `/api/v1/export` is limited by `-search.maxExportDuration` command-line flag.
+
 Exported data can be imported via POST'ing it to [/api/v1/import](#how-to-import-time-series-data).
 
 
@@ -552,10 +561,20 @@ The most efficient protocol for importing data into VictoriaMetrics is `/api/v1/
 
 ```
 # Export the data from <source-victoriametrics>:
-curl -s 'http://source-victoriametrics:8428/api/v1/export' -d 'match={__name__!=""}' > exported_data.jsonl
+curl http://source-victoriametrics:8428/api/v1/export -d 'match={__name__!=""}' > exported_data.jsonl
 
 # Import the data to <destination-victoriametrics>:
-curl -X POST 'http://destination-victoriametrics:8428/api/v1/import' -T exported_data.jsonl
+curl -X POST http://destination-victoriametrics:8428/api/v1/import -T exported_data.jsonl
+```
+
+Pass `Content-Encoding: gzip` HTTP request header to `/api/v1/import` for importing gzipped data:
+
+```
+# Export gzipped data from <source-victoriametrics>:
+curl -H 'Accept-Encoding: gzip' http://source-victoriametrics:8428/api/v1/export -d 'match={__name__!=""}' > exported_data.jsonl.gz
+
+# Import gzipped data to <destination-victoriametrics>:
+curl -X POST -H 'Content-Encoding: gzip' http://destination-victoriametrics:8428/api/v1/import -T exported_data.jsonl.gz
 ```
 
 Each request to `/api/v1/import` can load up to a single vCPU core on VictoriaMetrics. Import speed can be improved by splitting the original file into smaller parts