I have a simple code that prints GET response time for each URL listed in a text file (url_list.txt).
When the requests are fired sequentially the returned times correspond to the expected response times of individual URLs.
However, when the same code is executed concurrently the returned response times are typically higher than expected.
It seems that the time_start I capture before the http.Get(url) is called is not the time of when the request is actually sent. I guess the execution of http.Get(url) is queued to some extend.
Is there a better way to capture URL response time when using goroutines?
Here is my code:
Sequential requests:
package main
import ("fmt"
"net/http"
"io/ioutil"
"time"
"strings"
)
func get_resp_time(url string) {
time_start := time.Now()
resp, err := http.Get(url)
if err != nil {
panic(err)
}
defer resp.Body.Close()
fmt.Println(time.Since(time_start), url)
}
func main() {
content, _ := ioutil.ReadFile("url_list.txt")
urls := strings.Split(string(content), "
")
for _, url := range urls {
get_resp_time(url)
//go get_resp_time(url)
}
//time.Sleep(20 * time.Second)
}
Concurrent requests:
package main
import ("fmt"
"net/http"
"io/ioutil"
"time"
"strings"
)
func get_resp_time(url string) {
time_start := time.Now()
resp, err := http.Get(url)
if err != nil {
panic(err)
}
defer resp.Body.Close()
fmt.Println(time.Since(time_start), url)
}
func main() {
content, _ := ioutil.ReadFile("url_list.txt")
urls := strings.Split(string(content), "
")
for _, url := range urls {
//get_resp_time(url)
go get_resp_time(url)
}
time.Sleep(20 * time.Second)
}