douti0687 2015-08-07 14:31
浏览 44

如何关闭频道

I try to adapt this example: https://gobyexample.com/worker-pools

But I don't know how to stop the channel because program don't exit at the end of the channel loop.

Can you explain how to exit the program?

package main

import (
    "github.com/SlyMarbo/rss"
    "bufio"
    "fmt"
    "log"
    "os"
)

func readLines(path string) ([]string, error) {
    file, err := os.Open(path)
    if err != nil {
        return nil, err
    }
    defer file.Close()

    var lines []string
    scanner := bufio.NewScanner(file)
    for scanner.Scan() {
        lines = append(lines, scanner.Text())
    }
    return lines, scanner.Err()
}


func worker(id int, jobs <-chan string, results chan<- string) {
    for url := range jobs {
        fmt.Println("worker", id, "processing job", url)
        feed, err := rss.Fetch(url)
        if err != nil {
            fmt.Println("Error on: ", url)
            continue
        }
        borne := 0
        for _, value := range feed.Items {
            if borne < 5 {
                results <- value.Link
                borne = borne +1
            } else {
                continue
            }
        }
    }
}


func main() {
    jobs := make(chan string)
    results := make(chan string)

    for w := 1; w <= 16; w++ {
        go worker(w, jobs, results)
    }


    urls, err := readLines("flux.txt")
    if err != nil { 
        log.Fatalf("readLines: %s", err) 
    }

    for _, url := range urls {
        jobs <- url
    }

    close(jobs)

    // it seems program runs over...
    for msg := range results {
        fmt.Println(msg)
    }
}

The flux.txt is a flat text file like :

  • 写回答

1条回答 默认 最新

  • duanche4578 2015-08-07 18:32
    关注

    The problem is that, in the example you are referring to, the worker pool reads from results 9 times:

    for a := 1; a <= 9; a++ {
        <-results
    }
    

    Your program, on the other hand, does a range loop over the results which has a different semantics in go. The range operator does not stop until the channel is closed.

    for msg := range results {
        fmt.Println(msg)
    }
    

    To fix your problem you'd need to close the results channel. However, if you just call close(results) before the for loop, you most probably will get a panic, because the workers might be writing on results.

    To fix this problem, you need to add another channel to be notified when all the workers are done. You can do this either using a sync.WaitGroup or :

    const (
        workers = 16
    )
    
    func main() {
        jobs := make(chan string, 100)
        results := make(chan string, 100)
        var wg sync.WaitGroup
    
        for w := 0; w < workers; w++ {
            go func() {
                wg.Add(1)
                defer wg.Done()
                worker(w, jobs, results)
            }()
        }
    
        urls, err := readLines("flux.txt")
        if err != nil {
            log.Fatalf("readLines: %s", err)
        }
    
        for _, url := range urls {
            jobs <- url
        }
    
        close(jobs)
    
        wg.Wait()
    
        close(results)
    
        // it seems program runs over...
        for msg := range results {
            fmt.Println(msg)
        }
    }
    

    Or a done channel:

    package main
    
    import (
        "bufio"
        "fmt"
        "github.com/SlyMarbo/rss"
        "log"
        "os"
    )
    
    func readLines(path string) ([]string, error) {
        file, err := os.Open(path)
        if err != nil {
            return nil, err
        }
        defer file.Close()
    
        var lines []string
        scanner := bufio.NewScanner(file)
        for scanner.Scan() {
            lines = append(lines, scanner.Text())
        }
        return lines, scanner.Err()
    }
    
    func worker(id int, jobs <-chan string, results chan<- string, done chan struct{}) {
        for url := range jobs {
            fmt.Println("worker", id, "processing job", url)
            feed, err := rss.Fetch(url)
            if err != nil {
                fmt.Println("Error on: ", url)
                continue
            }
            borne := 0
            for _, value := range feed.Items {
                if borne < 5 {
                    results <- value.Link
                    borne = borne + 1
                } else {
                    continue
                }
            }
        }
        close(done)
    }
    
    const (
        workers = 16
    )
    
    func main() {
        jobs := make(chan string, 100)
        results := make(chan string, 100)
        dones := make([]chan struct{}, workers)
    
        for w := 0; w < workers; w++ {
            dones[w] = make(chan struct{})
            go worker(w, jobs, results, dones[w])
        }
    
        urls, err := readLines("flux.txt")
        if err != nil {
            log.Fatalf("readLines: %s", err)
        }
    
        for _, url := range urls {
            jobs <- url
        }
    
    
        close(jobs)
    
        for _, done := range dones {
            <-done
        }
    
        close(results)
    
        // it seems program runs over...
        for msg := range results {
            fmt.Println(msg)
        }
    }
    
    评论

报告相同问题?

悬赏问题

  • ¥15 无线电能传输系统MATLAB仿真问题
  • ¥50 如何用脚本实现输入法的热键设置
  • ¥20 我想使用一些网络协议或者部分协议也行,主要想实现类似于traceroute的一定步长内的路由拓扑功能
  • ¥30 深度学习,前后端连接
  • ¥15 孟德尔随机化结果不一致
  • ¥15 apm2.8飞控罗盘bad health,加速度计校准失败
  • ¥15 求解O-S方程的特征值问题给出边界层布拉休斯平行流的中性曲线
  • ¥15 谁有desed数据集呀
  • ¥20 手写数字识别运行c仿真时,程序报错错误代码sim211-100
  • ¥15 关于#hadoop#的问题