我想知道是否有人可以指出如何进一步优化?我不喜欢这样的事实:我必须将整个文件读入内存并创建文件长度的字节切片。
这是代码:
func newfileUploadRequestWithGzip(uri string, paramName, path string) (*http.Request, error) {
f, err := os.Open(path)
if err != nil {
return nil, err
}
fi, err := f.Stat()
if err != nil {
return nil, err
}
defer f.Close()
body := new(bytes.Buffer)
writer := multipart.NewWriter(body)
part, err := writer.CreateFormFile(paramName, fi.Name())
if err != nil {
return nil, err
}
filebuffer := make([]byte, fi.Size())
var gzbuffer bytes.Buffer
gw, err := gzip.NewWriterLevel(&gzbuffer, gzip.DefaultCompression)
buffer := bufio.NewReader(f)
if _, err = buffer.Read(filebuffer); err != nil {
fmt.Printf("Error in reading file with error: %v\n", err)
}
n, err := gw.Write(filebuffer)
gw.Close()
fmt.Printf("%d:%d => %.2f%%\n", n, len(gzbuffer.Bytes()), float32(len(gzbuffer.Bytes()))/float32(n)*100.0)
io.Copy(part, &gzbuffer)
if writer.Close() != nil {
return nil, err
}
request, requestErr := http.NewRequest("POST", uri, body)
request.Header.Add("Content-Type", writer.FormDataContentType())
return request, requestErr
}
慕妹3242003
相关分类