Go中限制并发的简单方法
Simple method for limiting concurrency in Go
我有一个 CSV 文件,其中包含我需要 HTTP 获取的约 10k 个 URL。限制Go例程一次不超过16个并发的最简单方法是什么?
func getUrl(url string) {
request := gorequest.New()
resp, body, errs := request.Get(each[1]).End()
_ = resp
_ = body
_ = errs
}
func main() {
csvfile, err := os.Open("urls.csv")
defer csvfile.Close()
reader := csv.NewReader(csvfile)
reader.FieldsPerRecord = -1
rawCSVdata, err := reader.ReadAll()
completed := 0
for _, each := range rawCSVdata {
go getUrl(each[1])
completed++
}
}
生产者-消费者模式:
package main
import (
"encoding/csv"
"os"
"sync"
"github.com/parnurzeal/gorequest"
)
const workersCount = 16
func getUrlWorker(urlChan chan string) {
for url := range urlChan {
request := gorequest.New()
resp, body, errs := request.Get(url).End()
_ = resp
_ = body
_ = errs
}
}
func main() {
csvfile, err := os.Open("urls.csv")
if err != nil {
panic(err)
}
defer csvfile.Close()
reader := csv.NewReader(csvfile)
reader.FieldsPerRecord = -1
rawCSVdata, err := reader.ReadAll()
var wg sync.WaitGroup
urlChan := make(chan string)
wg.Add(workersCount)
for i := 0; i < workersCount; i++ {
go func() {
getUrlWorker(urlChan)
wg.Done()
}()
}
completed := 0
for _, each := range rawCSVdata {
urlChan <- each[1]
completed++
}
close(urlChan)
wg.Wait()
}
我有一个 CSV 文件,其中包含我需要 HTTP 获取的约 10k 个 URL。限制Go例程一次不超过16个并发的最简单方法是什么?
func getUrl(url string) {
request := gorequest.New()
resp, body, errs := request.Get(each[1]).End()
_ = resp
_ = body
_ = errs
}
func main() {
csvfile, err := os.Open("urls.csv")
defer csvfile.Close()
reader := csv.NewReader(csvfile)
reader.FieldsPerRecord = -1
rawCSVdata, err := reader.ReadAll()
completed := 0
for _, each := range rawCSVdata {
go getUrl(each[1])
completed++
}
}
生产者-消费者模式:
package main
import (
"encoding/csv"
"os"
"sync"
"github.com/parnurzeal/gorequest"
)
const workersCount = 16
func getUrlWorker(urlChan chan string) {
for url := range urlChan {
request := gorequest.New()
resp, body, errs := request.Get(url).End()
_ = resp
_ = body
_ = errs
}
}
func main() {
csvfile, err := os.Open("urls.csv")
if err != nil {
panic(err)
}
defer csvfile.Close()
reader := csv.NewReader(csvfile)
reader.FieldsPerRecord = -1
rawCSVdata, err := reader.ReadAll()
var wg sync.WaitGroup
urlChan := make(chan string)
wg.Add(workersCount)
for i := 0; i < workersCount; i++ {
go func() {
getUrlWorker(urlChan)
wg.Done()
}()
}
completed := 0
for _, each := range rawCSVdata {
urlChan <- each[1]
completed++
}
close(urlChan)
wg.Wait()
}