lib/streamaggr: limit the the number of concurrent flushes of the aggregate data to the exact number of available CPUs

This should reduce the maximum memory usage during concurrent flushes of the aggregate data
This commit is contained in:
Aliaksandr Valialkin 2023-01-07 00:18:49 -08:00
parent 0a14b7bb82
commit c630115be0
No known key found for this signature in database
GPG key ID: A72BEC6CD3D0DED1

View file

@ -356,7 +356,7 @@ func (a *aggregator) runFlusher(interval time.Duration) {
}
}
var flushConcurrencyCh = make(chan struct{}, 2*cgroup.AvailableCPUs())
var flushConcurrencyCh = make(chan struct{}, cgroup.AvailableCPUs())
func (a *aggregator) flush() {
ctx := &flushCtx{