5

我正试图了解 goroutines。我创建了一个简单的程序,可以跨多个搜索引擎并行执行相同的搜索。目前,为了跟踪回复的数量,我计算了我收到的数量。不过好像有点业余。

有没有更好的方法来知道我何时收到了以下代码中所有 goroutine 的响应?

package main

import (
    "fmt"
    "net/http"
    "log"
)

type Query struct {
    url string
    status string
}

func search (url string, out chan Query) {
    fmt.Printf("Fetching URL %s\n", url)
    resp, err := http.Get(url)

    if err != nil {
        log.Fatal(err)
    }

    defer resp.Body.Close()

    out <- Query{url, resp.Status}
}

func main() {
    searchTerm := "carrot"

    fmt.Println("Hello world! Searching for ", searchTerm)

    searchEngines := []string{
        "http://www.bing.co.uk/?q=",
        "http://www.google.co.uk/?q=",
        "http://www.yahoo.co.uk/?q="}

    out := make(chan Query)

    for i := 0; i < len(searchEngines); i++ {
        go search(searchEngines[i] + searchTerm, out)
    }

    progress := 0

    for {
                    // is there a better way of doing this step?
        if progress >= len(searchEngines) {
            break
        }
        fmt.Println("Polling...")
        query := <-out
        fmt.Printf("Status from %s was %s\n", query.url, query.status)
        progress++
    }
}
4

1 回答 1

12

请使用, pkg docsync.WaitGroup中有一个示例

searchEngines := []string{
    "http://www.bing.co.uk/?q=",
    "http://www.google.co.uk/?q=",
    "http://www.yahoo.co.uk/?q="}
var wg sync.WaitGroup
out := make(chan Query)

for i := 0; i < len(searchEngines); i++ {
    wg.Add(1)
    go func (url string) {
        defer wg.Done()
        fmt.Printf("Fetching URL %s\n", url)
        resp, err := http.Get(url)

        if err != nil {
            log.Fatal(err)
        }

        defer resp.Body.Close()

        out <- Query{url, resp.Status}

    }(searchEngines[i] + searchTerm)

}
wg.Wait()
于 2013-02-23T09:48:51.510 回答