I'm new to Go and trying to implement a web crawler. It should asynchronously parse web pages and save their contents to files, one file per new page. But it gets stuck after I've added
u, _ := url.Parse(uri)
fileName := u.Host + u.RawQuery + ".html"
body, err := ioutil.ReadAll(resp.Body)
writes <- writer{fileName: fileName, body: body}
Can anyone help me fix this problem? Basically I want to get data from the response body, push it to the channel, and then get data from the channel and put it into a file.
It looks like the writes
channel was not initialized, and sending on a nil channel blocks forever.
package main
import (
"crypto/tls"
"flag"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"os"
"runtime"
"./linksCollector"
)
type writer struct {
fileName string
body []byte
}
var writes = make(chan writer)
func usage() {
fmt.Fprintf(os.Stderr, "usage: crawl http://example.com/")
flag.PrintDefaults()
os.Exit(2)
}
func check(e error) {
if e != nil {
panic(e)
}
}
func main() {
runtime.GOMAXPROCS(8)
flag.Usage = usage
flag.Parse()
args := flag.Args()
fmt.Println(args)
if len(args) < 1 {
usage()
fmt.Println("Please specify start page")
os.Exit(1)
}
queue := make(chan string)
filteredQueue := make(chan string)
go func() { queue <- args[0] }()
go filterQueue(queue, filteredQueue)
for uri := range filteredQueue {
go enqueue(uri, queue)
}
for {
select {
case data := <-writes:
f, err := os.Create(data.fileName)
check(err)
defer f.Close()
_, err = f.Write(data.body)
check(err)
}
}
}
func filterQueue(in chan string, out chan string) {
var seen = make(map[string]bool)
for val := range in {
if !seen[val] {
seen[val] = true
out <- val
}
}
}
func enqueue(uri string, queue chan string) {
fmt.Println("fetching", uri)
transport := &http.Transport{
TLSClientConfig: &tls.Config{
InsecureSkipVerify: true,
},
}
client := http.Client{Transport: transport}
resp, err := client.Get(uri)
check(err)
defer resp.Body.Close()
u, _ := url.Parse(uri)
fileName := u.Host + u.RawQuery + ".html"
body, err := ioutil.ReadAll(resp.Body)
writes <- writer{fileName: fileName, body: body}
links := collectlinks.All(resp.Body)
for _, link := range links {
absolute := fixURL(link, uri)
if uri != "" {
go func() { queue <- absolute }()
}
}
}
func fixURL(href, base string) string {
uri, err := url.Parse(href)
if err != nil {
return ""
}
baseURL, err := url.Parse(base)
if err != nil {
return ""
}
uri = baseURL.ResolveReference(uri)
return uri.String()
}