lib/model: Use same batch size constants in db updater as for protocol

This commit is contained in:
Jakob Borg 2017-08-31 10:47:39 +02:00
parent 4b6e7e7867
commit e33fa10115

View File

@ -1479,13 +1479,10 @@ func (f *sendReceiveFolder) Jobs() ([]string, []string) {
// dbUpdaterRoutine aggregates db updates and commits them in batches no // dbUpdaterRoutine aggregates db updates and commits them in batches no
// larger than 1000 items, and no more delayed than 2 seconds. // larger than 1000 items, and no more delayed than 2 seconds.
func (f *sendReceiveFolder) dbUpdaterRoutine() { func (f *sendReceiveFolder) dbUpdaterRoutine() {
const ( const maxBatchTime = 2 * time.Second
maxBatchSize = 1000
maxBatchTime = 2 * time.Second
)
batch := make([]dbUpdateJob, 0, maxBatchSize) batch := make([]dbUpdateJob, 0, maxBatchSizeFiles)
files := make([]protocol.FileInfo, 0, maxBatchSize) files := make([]protocol.FileInfo, 0, maxBatchSizeFiles)
tick := time.NewTicker(maxBatchTime) tick := time.NewTicker(maxBatchTime)
defer tick.Stop() defer tick.Stop()
@ -1545,6 +1542,7 @@ func (f *sendReceiveFolder) dbUpdaterRoutine() {
files = files[:0] files = files[:0]
} }
batchSizeBytes := 0
loop: loop:
for { for {
select { select {
@ -1556,8 +1554,10 @@ loop:
job.file.Sequence = 0 job.file.Sequence = 0
batch = append(batch, job) batch = append(batch, job)
if len(batch) == maxBatchSize { batchSizeBytes += job.file.ProtoSize()
if len(batch) == maxBatchSizeFiles || batchSizeBytes > maxBatchSizeBytes {
handleBatch() handleBatch()
batchSizeBytes = 0
} }
case <-tick.C: case <-tick.C: