Improve memory usage of dump by not loading all files in memory prior to adding them to the zip
This commit is contained in:
parent
c12bac0c96
commit
db0126968a
2 changed files with 17 additions and 10 deletions
|
@ -16,27 +16,25 @@
|
|||
|
||||
package files
|
||||
|
||||
import "bytes"
|
||||
import (
|
||||
"io"
|
||||
)
|
||||
|
||||
// Dump dumps all saved files
|
||||
// This only includes the raw files, no db entries.
|
||||
func Dump() (allFiles map[int64][]byte, err error) {
|
||||
func Dump() (allFiles map[int64]io.ReadCloser, err error) {
|
||||
files := []*File{}
|
||||
err = x.Find(&files)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
allFiles = make(map[int64][]byte, len(files))
|
||||
allFiles = make(map[int64]io.ReadCloser, len(files))
|
||||
for _, file := range files {
|
||||
if err := file.LoadFileByID(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var buf bytes.Buffer
|
||||
if _, err := buf.ReadFrom(file.File); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
allFiles[file.ID] = buf.Bytes()
|
||||
allFiles[file.ID] = file.File
|
||||
}
|
||||
|
||||
return
|
||||
|
|
|
@ -80,11 +80,20 @@ func Dump(filename string) error {
|
|||
if err != nil {
|
||||
return fmt.Errorf("error saving file: %s", err)
|
||||
}
|
||||
for fid, fcontent := range allFiles {
|
||||
err = writeBytesToZip("files/"+strconv.FormatInt(fid, 10), fcontent, dumpWriter)
|
||||
for fid, file := range allFiles {
|
||||
header := &zip.FileHeader{
|
||||
Name: "files/" + strconv.FormatInt(fid, 10),
|
||||
Method: compressionUsed,
|
||||
}
|
||||
w, err := dumpWriter.CreateHeader(header)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = io.Copy(w, file)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error writing file %d: %s", fid, err)
|
||||
}
|
||||
_ = file.Close()
|
||||
}
|
||||
log.Infof("Dumped files")
|
||||
|
||||
|
|
Loading…
Reference in a new issue