As the hits mentioned in Crawl example of 'A Tour of Go', I modified the Crawl function and just wonder why the 'go Crawl' failed to spawn another thread as only one url was found printed out.
Is there anything wrong with my modification?
List my modification as below,
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
// TODO: Fetch URLs in parallel.
// TODO: Don't fetch the same URL twice.
// This implementation doesn't do either:
if depth <= 0 {
fmt.Printf("depth <= 0 return")
return
}
body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
return
}
fmt.Printf("found: %s %q
", url, body)
crawled.mux.Lock()
crawled.c[url]++
crawled.mux.Unlock()
for _, u := range urls {
//crawled.mux.Lock()
if cnt, ok := crawled.c[u]; ok {
cnt++
} else {
fmt.Println("go ...", u)
go Crawl(u, depth-1, fetcher)
}
//crawled.mux.Unlock()
//Crawl(u, depth-1, fetcher)
}
return
}
type crawledUrl struct {
c map[string]int
mux sync.Mutex
}
var crawled = crawledUrl{c: make(map[string]int)}