如何使用 Go 限制下载速度?
How to limit download speed with Go?
我目前正在用 Go 开发一个下载服务器。我需要限制用户的下载速度为100KB/s。
这是我的代码:
func serveFile(w http.ResponseWriter, r *http.Request) {
fileID := r.URL.Query().Get("fileID")
if len(fileID) != 0 {
w.Header().Set("Content-Disposition", "attachment; filename=filename.txt")
w.Header().Set("Content-Type", r.Header.Get("Content-Type"))
w.Header().Set("Content-Length", r.Header.Get("Content-Length"))
file, err := os.Open(fmt.Sprintf("../../bin/files/test.txt"))
defer file.Close()
if err != nil {
http.NotFound(w, r)
return
}
io.Copy(w, file)
} else {
io.WriteString(w, "Invalid request.")
}
}
然后我在github上找到了一个包,我的代码变成了下面这样:
func serveFile(w http.ResponseWriter, r *http.Request) {
fileID := r.URL.Query().Get("fileID")
if len(fileID) != 0 {
w.Header().Set("Content-Disposition", "attachment; filename=Wiki.png")
w.Header().Set("Content-Type", r.Header.Get("Content-Type"))
w.Header().Set("Content-Length", r.Header.Get("Content-Length"))
file, err := os.Open(fmt.Sprintf("../../bin/files/test.txt"))
defer file.Close()
if err != nil {
http.NotFound(w, r)
return
}
bucket := ratelimit.NewBucketWithRate(100*1024, 100*1024)
reader := bufio.NewReader(file)
io.Copy(w, ratelimit.Reader(reader, bucket))
} else {
io.WriteString(w, "Invalid request.")
}
}
但是我收到这个错误:
Corrupted Content Error
The page you are trying to view cannot be shown because an error in
the data transmission was detected.
这是我在 Go 操场上的代码:http://play.golang.org/p/ulgXQl4eQO
我没有看到错误,但我确实注意到代码存在一些问题。为此:
w.Header().Set("Content-Type", r.Header.Get("Content-Type"))
您应该使用 mime package 的:
func TypeByExtension(ext string) string
确定内容类型。 (如果您最终得到的空字符串默认为 application/octet-stream
)
对于:
w.Header().Set("Content-Length", r.Header.Get("Content-Length"))
您需要从文件本身获取内容长度。通过使用请求内容长度,对于 GET
这基本上以空操作结束,但是对于 POST
你发回了错误的长度,这可能解释了你看到的错误.打开文件后,执行以下操作:
fi, err := file.Stat()
if err != nil {
http.Error(w, err.Error(), 500)
return
}
w.Header().Set("Content-Length", fmt.Sprint(fi.Size()))
最后一点,当你打开文件时,如果出现错误,你不需要关闭文件句柄。改为这样做:
file, err := os.Open(...)
if err != nil {
http.NotFound(w, r)
return
}
defer file.Close()
与其自己搞定正确的内容类型和长度 header,不如使用 http.ServeContent
更好,它会为您做这些(以及支持"If-Modified-Since"、范围请求等。如果您可以提供 "ETag" header,它也可以处理 "If-Range" 和 "If-None-Match" 请求)。
如前所述,通常最好在写端进行限制,但包装 http.ResponseWriter
很尴尬,因为各种 http 函数也会检查可选接口,例如 http.Flusher
和 http.Hijacker
.包装 ServeContent
需要的 io.ReadSeeker
更容易。
例如,可能是这样的:
func pathFromID(fileID string) string {
// replace with whatever logic you need
return "../../bin/files/test.txt"
}
// or more verbosely you could call this a "limitedReadSeeker"
type lrs struct {
io.ReadSeeker
// This reader must not buffer but just do something simple
// while passing through Read calls to the ReadSeeker
r io.Reader
}
func (r lrs) Read(p []byte) (int, error) {
return r.r.Read(p)
}
func newLRS(r io.ReadSeeker, bucket *ratelimit.Bucket) io.ReadSeeker {
// Here we know/expect that a ratelimit.Reader does nothing
// to the Read calls other than add delays so it won't break
// any io.Seeker calls.
return lrs{r, ratelimit.Reader(r, bucket)}
}
func serveFile(w http.ResponseWriter, req *http.Request) {
fileID := req.URL.Query().Get("fileID")
if len(fileID) == 0 {
http.Error(w, "invalid request", http.StatusBadRequest)
return
}
path := pathFromID(fileID)
file, err := os.Open(path)
if err != nil {
http.NotFound(w, req)
return
}
defer file.Close()
fi, err := file.Stat()
if err != nil {
http.Error(w, "blah", 500) // XXX fixme
return
}
const (
rate = 100 << 10
capacity = 100 << 10
)
// Normally we'd prefer to limit the writer but it's awkward to wrap
// an http.ResponseWriter since it may optionally also implement
// http.Flusher, or http.Hijacker.
bucket := ratelimit.NewBucketWithRate(rate, capacity)
lr := newLRS(file, bucket)
http.ServeContent(w, req, path, fi.ModTime(), lr)
}
我目前正在用 Go 开发一个下载服务器。我需要限制用户的下载速度为100KB/s。
这是我的代码:
func serveFile(w http.ResponseWriter, r *http.Request) {
fileID := r.URL.Query().Get("fileID")
if len(fileID) != 0 {
w.Header().Set("Content-Disposition", "attachment; filename=filename.txt")
w.Header().Set("Content-Type", r.Header.Get("Content-Type"))
w.Header().Set("Content-Length", r.Header.Get("Content-Length"))
file, err := os.Open(fmt.Sprintf("../../bin/files/test.txt"))
defer file.Close()
if err != nil {
http.NotFound(w, r)
return
}
io.Copy(w, file)
} else {
io.WriteString(w, "Invalid request.")
}
}
然后我在github上找到了一个包,我的代码变成了下面这样:
func serveFile(w http.ResponseWriter, r *http.Request) {
fileID := r.URL.Query().Get("fileID")
if len(fileID) != 0 {
w.Header().Set("Content-Disposition", "attachment; filename=Wiki.png")
w.Header().Set("Content-Type", r.Header.Get("Content-Type"))
w.Header().Set("Content-Length", r.Header.Get("Content-Length"))
file, err := os.Open(fmt.Sprintf("../../bin/files/test.txt"))
defer file.Close()
if err != nil {
http.NotFound(w, r)
return
}
bucket := ratelimit.NewBucketWithRate(100*1024, 100*1024)
reader := bufio.NewReader(file)
io.Copy(w, ratelimit.Reader(reader, bucket))
} else {
io.WriteString(w, "Invalid request.")
}
}
但是我收到这个错误:
Corrupted Content Error
The page you are trying to view cannot be shown because an error in the data transmission was detected.
这是我在 Go 操场上的代码:http://play.golang.org/p/ulgXQl4eQO
我没有看到错误,但我确实注意到代码存在一些问题。为此:
w.Header().Set("Content-Type", r.Header.Get("Content-Type"))
您应该使用 mime package 的:
func TypeByExtension(ext string) string
确定内容类型。 (如果您最终得到的空字符串默认为 application/octet-stream
)
对于:
w.Header().Set("Content-Length", r.Header.Get("Content-Length"))
您需要从文件本身获取内容长度。通过使用请求内容长度,对于 GET
这基本上以空操作结束,但是对于 POST
你发回了错误的长度,这可能解释了你看到的错误.打开文件后,执行以下操作:
fi, err := file.Stat()
if err != nil {
http.Error(w, err.Error(), 500)
return
}
w.Header().Set("Content-Length", fmt.Sprint(fi.Size()))
最后一点,当你打开文件时,如果出现错误,你不需要关闭文件句柄。改为这样做:
file, err := os.Open(...)
if err != nil {
http.NotFound(w, r)
return
}
defer file.Close()
与其自己搞定正确的内容类型和长度 header,不如使用 http.ServeContent
更好,它会为您做这些(以及支持"If-Modified-Since"、范围请求等。如果您可以提供 "ETag" header,它也可以处理 "If-Range" 和 "If-None-Match" 请求)。
如前所述,通常最好在写端进行限制,但包装 http.ResponseWriter
很尴尬,因为各种 http 函数也会检查可选接口,例如 http.Flusher
和 http.Hijacker
.包装 ServeContent
需要的 io.ReadSeeker
更容易。
例如,可能是这样的:
func pathFromID(fileID string) string {
// replace with whatever logic you need
return "../../bin/files/test.txt"
}
// or more verbosely you could call this a "limitedReadSeeker"
type lrs struct {
io.ReadSeeker
// This reader must not buffer but just do something simple
// while passing through Read calls to the ReadSeeker
r io.Reader
}
func (r lrs) Read(p []byte) (int, error) {
return r.r.Read(p)
}
func newLRS(r io.ReadSeeker, bucket *ratelimit.Bucket) io.ReadSeeker {
// Here we know/expect that a ratelimit.Reader does nothing
// to the Read calls other than add delays so it won't break
// any io.Seeker calls.
return lrs{r, ratelimit.Reader(r, bucket)}
}
func serveFile(w http.ResponseWriter, req *http.Request) {
fileID := req.URL.Query().Get("fileID")
if len(fileID) == 0 {
http.Error(w, "invalid request", http.StatusBadRequest)
return
}
path := pathFromID(fileID)
file, err := os.Open(path)
if err != nil {
http.NotFound(w, req)
return
}
defer file.Close()
fi, err := file.Stat()
if err != nil {
http.Error(w, "blah", 500) // XXX fixme
return
}
const (
rate = 100 << 10
capacity = 100 << 10
)
// Normally we'd prefer to limit the writer but it's awkward to wrap
// an http.ResponseWriter since it may optionally also implement
// http.Flusher, or http.Hijacker.
bucket := ratelimit.NewBucketWithRate(rate, capacity)
lr := newLRS(file, bucket)
http.ServeContent(w, req, path, fi.ModTime(), lr)
}