Initial implementation of unified export/import progress

Both export and import progress get updated over a unified channel.
Most importantly this allows updating the import total from latest
export results.
This commit is contained in:
Philipp Wolfer 2025-05-05 11:38:29 +02:00
parent 1f48abc284
commit b8e6ccffdb
No known key found for this signature in database
GPG key ID: 8FDF744D4919943B
18 changed files with 369 additions and 194 deletions

View file

@ -1,5 +1,5 @@
/*
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
Scotty is free software: you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
@ -77,7 +77,7 @@ func (b *DeezerApiBackend) OAuth2Setup(token oauth2.TokenSource) error {
return nil
}
func (b *DeezerApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
func (b *DeezerApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
// Choose a high offset, we attempt to search the loves backwards starting
// at the oldest one.
offset := math.MaxInt32
@ -88,13 +88,18 @@ func (b *DeezerApiBackend) ExportListens(oldestTimestamp time.Time, results chan
totalDuration := startTime.Sub(oldestTimestamp)
p := models.Progress{Total: int64(totalDuration.Seconds())}
p := models.TransferProgress{
Export: &models.Progress{
Total: int64(totalDuration.Seconds()),
},
}
out:
for {
result, err := b.client.UserHistory(offset, perPage)
if err != nil {
progress <- p.Abort()
p.Export.Abort()
progress <- p
results <- models.ListensResult{Error: err}
return
}
@ -102,7 +107,6 @@ out:
// The offset was higher then the actual number of tracks. Adjust the offset
// and continue.
if offset >= result.Total {
p.Total = int64(result.Total)
offset = max(result.Total-perPage, 0)
continue
}
@ -128,7 +132,8 @@ out:
}
remainingTime := startTime.Sub(minTime)
p.Elapsed = int64(totalDuration.Seconds() - remainingTime.Seconds())
p.Export.TotalItems += len(listens)
p.Export.Elapsed = int64(totalDuration.Seconds() - remainingTime.Seconds())
progress <- p
results <- models.ListensResult{Items: listens, OldestTimestamp: minTime}
@ -144,23 +149,29 @@ out:
}
results <- models.ListensResult{OldestTimestamp: minTime}
progress <- p.Complete()
p.Export.Complete()
progress <- p
}
func (b *DeezerApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
func (b *DeezerApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
// Choose a high offset, we attempt to search the loves backwards starting
// at the oldest one.
offset := math.MaxInt32
perPage := MaxItemsPerGet
p := models.Progress{Total: int64(perPage)}
p := models.TransferProgress{
Export: &models.Progress{
Total: int64(perPage),
},
}
var totalCount int
out:
for {
result, err := b.client.UserTracks(offset, perPage)
if err != nil {
progress <- p.Abort()
p.Export.Abort()
progress <- p
results <- models.LovesResult{Error: err}
return
}
@ -168,8 +179,8 @@ out:
// The offset was higher then the actual number of tracks. Adjust the offset
// and continue.
if offset >= result.Total {
p.Total = int64(result.Total)
totalCount = result.Total
p.Export.Total = int64(totalCount)
offset = max(result.Total-perPage, 0)
continue
}
@ -186,13 +197,14 @@ out:
loves = append(loves, love)
} else {
totalCount -= 1
break
}
}
sort.Sort(loves)
results <- models.LovesResult{Items: loves, Total: totalCount}
p.Elapsed += int64(count)
p.Export.TotalItems = totalCount
p.Export.Total = int64(totalCount)
p.Export.Elapsed += int64(count)
progress <- p
if offset <= 0 {
@ -206,7 +218,8 @@ out:
}
}
progress <- p.Complete()
p.Export.Complete()
progress <- p
}
func (t Listen) AsListen() models.Listen {