I have a CSV file with ~10k URLs I need to HTTP get. What is the simplest way to limit the concurrency of Go routines to no more than 16 at a time?
func getUrl(url string) {
request := gorequest.New()
resp, body, errs := request.Get(each[1]).End()
_ = resp
_ = body
_ = errs
}
func main() {
csvfile, err := os.Open("urls.csv")
defer csvfile.Close()
reader := csv.NewReader(csvfile)
reader.FieldsPerRecord = -1
rawCSVdata, err := reader.ReadAll()
completed := 0
for _, each := range rawCSVdata {
go getUrl(each[1])
completed++
}
}