mirror of
https://github.com/VictoriaMetrics/VictoriaMetrics.git
synced 2024-12-01 14:47:38 +00:00
lib/httpserver: add handler to serve /robots.txt
and deny search indexing (#4143)
This handler will instruct search engines that indexing is not allowed for the content exposed to the internet. This should help to address issues like #4128 when instances are exposed to the internet without authentication.
This commit is contained in:
parent
1103f36c6e
commit
2ca92aaa34
2 changed files with 7 additions and 0 deletions
|
@ -15,6 +15,8 @@ The following tip changes can be tested by building VictoriaMetrics components f
|
|||
|
||||
## v1.79.x long-time support release (LTS)
|
||||
|
||||
* SECURITY: serve `/robots.txt` content to disallow indexing of the exposed instances by search engines. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4128) for details.
|
||||
|
||||
## [v1.79.12](https://github.com/VictoriaMetrics/VictoriaMetrics/releases/tag/v1.79.12)
|
||||
|
||||
Released at 2023-04-06
|
||||
|
|
|
@ -316,6 +316,11 @@ func handlerWrapper(s *server, w http.ResponseWriter, r *http.Request, rh Reques
|
|||
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/1833
|
||||
fmt.Fprintf(w, "VictoriaMetrics is Ready.\n")
|
||||
return
|
||||
case "/robots.txt":
|
||||
// This prevents search engines from indexing contents
|
||||
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4128
|
||||
fmt.Fprintf(w, "User-agent: *\nDisallow: /\n")
|
||||
return
|
||||
default:
|
||||
if strings.HasPrefix(r.URL.Path, "/debug/pprof/") {
|
||||
pprofRequests.Inc()
|
||||
|
|
Loading…
Reference in a new issue