I have test go channel memory usage and found that it differs from channel input frequency while the amount of goroutines are the same.
As the code below, I create thousands of goroutines, which produce data to its own channel and consume data from same channel.
By changing only the variable "interval" of producer, I can see the virtual memory and resident memory varies too by monitoring with command "top".
And the shorter is the interval, the usage of memory is larger.
Does anyone know what happen?
package main
import (
"fmt"
"os"
"os/signal"
"syscall"
"time"
)
type Session struct {
KeepAlive chan bool
}
var count = 1024 * 8 * 4
var interval = 250 * time.Millisecond //3718.0m 3.587g 1.2m S 224.0 23.1
// var interval = 500 * time.Millisecond //2011.2m 1.923g 1.2m S 118.8 12.4
// var interval = 1 * time.Second //1124.0m 1.059g 1.1m S 73.0 6.8
func main() {
var gracefulStop = make(chan os.Signal, 1)
signal.Notify(gracefulStop, syscall.SIGTERM, syscall.SIGINT, syscall.SIGKILL)
for i := 0; i < count; i++ {
go Loop()
}
<-gracefulStop
fmt.Println("gracefulStop")
}
func Loop() (err error) {
var se *Session
se = NewSession()
se.Serve()
return
}
func NewSession() (s *Session) {
fmt.Println("NewSession")
s = &Session{
KeepAlive: make(chan bool, 1),
}
return
}
func (s *Session) Serve() {
fmt.Println("Serve")
go s.sendLoop()
s.readLoop()
s.Close()
return
}
func (s *Session) Close() {
close(s.KeepAlive)
fmt.Println("Close")
}
// local-------------------------------------------------------
func (s *Session) readLoop() {
fmt.Println("readLoop")
sec := time.Duration(1 * time.Minute)
ServerHandlerLoop:
for {
select {
case alive := <-s.KeepAlive:
if alive == false {
break ServerHandlerLoop
}
case <-time.After(sec):
fmt.Println("Timeout")
break ServerHandlerLoop
}
}
fmt.Println("readLoop EXIT")
}
func (s *Session) sendLoop() {
for {
s.KeepAlive <- true
time.Sleep(interval)
}
s.KeepAlive <- false
fmt.Println("ReadMessage EXIT")
}