Initial implementation of unified export/import progress

Both export and import progress get updated over a unified channel.
Most importantly this allows updating the import total from latest
export results.
This commit is contained in:
Philipp Wolfer 2025-05-05 11:38:29 +02:00
parent 1f48abc284
commit b8e6ccffdb
No known key found for this signature in database
GPG key ID: 8FDF744D4919943B
18 changed files with 369 additions and 194 deletions

View file

@ -1,5 +1,5 @@
/*
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
Scotty is free software: you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
@ -88,13 +88,17 @@ func (b *LastfmApiBackend) OAuth2Setup(token oauth2.TokenSource) error {
return nil
}
func (b *LastfmApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
func (b *LastfmApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
page := MaxPage
minTime := oldestTimestamp
perPage := MaxItemsPerGet
// We need to gather the full list of listens in order to sort them
p := models.Progress{Total: int64(page)}
p := models.TransferProgress{
Export: &models.Progress{
Total: int64(page),
},
}
out:
for page > 0 {
@ -108,7 +112,8 @@ out:
result, err := b.client.User.GetRecentTracks(args)
if err != nil {
results <- models.ListensResult{Error: err}
progress <- p.Abort()
p.Export.Abort()
progress <- p
return
}
@ -127,11 +132,12 @@ out:
timestamp, err := strconv.ParseInt(scrobble.Date.Uts, 10, 64)
if err != nil {
results <- models.ListensResult{Error: err}
progress <- p.Abort()
p.Export.Abort()
progress <- p
break out
}
if timestamp > oldestTimestamp.Unix() {
p.Elapsed += 1
p.Export.Elapsed += 1
listen := models.Listen{
ListenedAt: time.Unix(timestamp, 0),
UserName: b.username,
@ -165,16 +171,18 @@ out:
Total: result.Total,
OldestTimestamp: minTime,
}
p.Total = int64(result.TotalPages)
p.Elapsed = int64(result.TotalPages - page)
p.Export.Total = int64(result.TotalPages)
p.Export.Elapsed = int64(result.TotalPages - page)
p.Export.TotalItems += len(listens)
progress <- p
}
results <- models.ListensResult{OldestTimestamp: minTime}
progress <- p.Complete()
p.Export.Complete()
progress <- p
}
func (b *LastfmApiBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
func (b *LastfmApiBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
total := len(export.Items)
for i := 0; i < total; i += MaxListensPerSubmission {
listens := export.Items[i:min(i+MaxListensPerSubmission, total)]
@ -244,20 +252,24 @@ func (b *LastfmApiBackend) ImportListens(export models.ListensResult, importResu
importResult.UpdateTimestamp(listens[count-1].ListenedAt)
importResult.ImportCount += accepted
progress <- models.Progress{}.FromImportResult(importResult)
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
}
return importResult, nil
}
func (b *LastfmApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
func (b *LastfmApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
// Choose a high offset, we attempt to search the loves backwards starting
// at the oldest one.
page := 1
perPage := MaxItemsPerGet
loves := make(models.LovesList, 0, 2*MaxItemsPerGet)
p := models.Progress{Total: int64(perPage)}
p := models.TransferProgress{
Export: &models.Progress{
Total: int64(perPage),
},
}
var totalCount int
out:
@ -268,12 +280,12 @@ out:
"page": page,
})
if err != nil {
progress <- p.Abort()
p.Export.Abort()
progress <- p
results <- models.LovesResult{Error: err}
return
}
p.Total = int64(result.Total)
count := len(result.Tracks)
if count == 0 {
break out
@ -282,7 +294,8 @@ out:
for _, track := range result.Tracks {
timestamp, err := strconv.ParseInt(track.Date.Uts, 10, 64)
if err != nil {
progress <- p.Abort()
p.Export.Abort()
progress <- p
results <- models.LovesResult{Error: err}
return
}
@ -308,18 +321,21 @@ out:
}
}
p.Elapsed += int64(count)
p.Export.Total += int64(perPage)
p.Export.TotalItems = totalCount
p.Export.Elapsed += int64(count)
progress <- p
page += 1
}
sort.Sort(loves)
p.Export.Complete()
progress <- p
results <- models.LovesResult{Items: loves, Total: totalCount}
progress <- p.Complete()
}
func (b *LastfmApiBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
func (b *LastfmApiBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
for _, love := range export.Items {
err := b.client.Track.Love(lastfm.P{
"track": love.TrackName,
@ -335,7 +351,7 @@ func (b *LastfmApiBackend) ImportLoves(export models.LovesResult, importResult m
importResult.Log(models.Error, msg)
}
progress <- models.Progress{}.FromImportResult(importResult)
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
}
return importResult, nil