问:
我需要不停的get一个url
但是 不管 我开启 20 个 还是 100个 goroutine 进行 http.Get
最终都是 每秒能请求10次左右
能不能 修改这个限制?
我的需求和这个比较类似
http://www.cnblogs.com/eaglet/archive/2012/05/18/2507179.html
答:
不像有这个限制啊,跑跑下面的代码看看。
package main
import (
"flag"
"fmt"
"io/ioutil"
"net/http"
"os"
"strconv"
"time"
)
var ns = flag.String("n", "", "-n 100 | -n 1000 | ...")
var cs = flag.String("c", "", "-c 10 | -c 100 | ...")
var url = flag.String("url", "", "-url http://localhost:8080/")
var ps = flag.String("p", "", "true|false")
func main() {
flag.Parse()
n, err := strconv.Atoi(*ns)
if err != nil {
fmt.Println("cant parse n", err, *ns)
os.Exit(1)
}
c, err := strconv.Atoi(*cs)
if err != nil {
fmt.Println("cant parse c", err, *cs)
os.Exit(1)
}
if *url == "" {
fmt.Println("please input url")
os.Exit(1)
}
fmt.Println(n, c, *url)
tasks := make(chan bool, n)
for i := 0; i < n; i++ {
tasks <- true
}
begin := time.Now()
cnt := 0
cntChan := make(chan bool)
for i := 0; i < c; i++ {
go func() {
for len(tasks) > 0 {
<-tasks
res, err := http.Get(*url)
if err != nil {
panic(err)
} else {
defer res.Body.Close()
content, err := ioutil.ReadAll(res.Body)
if err != nil {
panic(err)
} else {
if *ps == "true" {
fmt.Println(string(content))
}
}
}
cntChan <- true
}
}()
}
for {
b := false
select {
case <-cntChan:
cnt += 1
if cnt%(n/10) == 0 {
b = true
fmt.Printf("finished %d request \n", cnt)
}
if cnt == n {
if !b {
fmt.Printf("finished %d request \n", cnt)
}
goto END
}
}
}
END:
ts := time.Now().Sub(begin).Seconds()
fmt.Printf("%.3f requests/second \n", float64(cnt)/ts)
}