mirror of
https://git.sr.ht/~phw/scotty
synced 2025-05-10 18:27:03 +02:00
Initial implementation of unified export/import progress
Both export and import progress get updated over a unified channel. Most importantly this allows updating the import total from latest export results.
This commit is contained in:
parent
1f48abc284
commit
b8e6ccffdb
18 changed files with 369 additions and 194 deletions
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
This file is part of Scotty.
|
This file is part of Scotty.
|
||||||
|
|
||||||
|
@ -18,7 +18,6 @@ Scotty. If not, see <https://www.gnu.org/licenses/>.
|
||||||
package backends_test
|
package backends_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"reflect"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/spf13/viper"
|
"github.com/spf13/viper"
|
||||||
|
@ -33,6 +32,7 @@ import (
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/maloja"
|
"go.uploadedlobster.com/scotty/internal/backends/maloja"
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/scrobblerlog"
|
"go.uploadedlobster.com/scotty/internal/backends/scrobblerlog"
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/spotify"
|
"go.uploadedlobster.com/scotty/internal/backends/spotify"
|
||||||
|
"go.uploadedlobster.com/scotty/internal/backends/spotifyhistory"
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/subsonic"
|
"go.uploadedlobster.com/scotty/internal/backends/subsonic"
|
||||||
"go.uploadedlobster.com/scotty/internal/config"
|
"go.uploadedlobster.com/scotty/internal/config"
|
||||||
"go.uploadedlobster.com/scotty/internal/i18n"
|
"go.uploadedlobster.com/scotty/internal/i18n"
|
||||||
|
@ -93,9 +93,9 @@ func TestImplementsInterfaces(t *testing.T) {
|
||||||
expectInterface[models.LovesExport](t, &funkwhale.FunkwhaleApiBackend{})
|
expectInterface[models.LovesExport](t, &funkwhale.FunkwhaleApiBackend{})
|
||||||
// expectInterface[models.LovesImport](t, &funkwhale.FunkwhaleApiBackend{})
|
// expectInterface[models.LovesImport](t, &funkwhale.FunkwhaleApiBackend{})
|
||||||
|
|
||||||
// expectInterface[models.ListensExport](t, &jspf.JSPFBackend{})
|
expectInterface[models.ListensExport](t, &jspf.JSPFBackend{})
|
||||||
expectInterface[models.ListensImport](t, &jspf.JSPFBackend{})
|
expectInterface[models.ListensImport](t, &jspf.JSPFBackend{})
|
||||||
// expectInterface[models.LovesExport](t, &jspf.JSPFBackend{})
|
expectInterface[models.LovesExport](t, &jspf.JSPFBackend{})
|
||||||
expectInterface[models.LovesImport](t, &jspf.JSPFBackend{})
|
expectInterface[models.LovesImport](t, &jspf.JSPFBackend{})
|
||||||
|
|
||||||
// expectInterface[models.ListensExport](t, &lastfm.LastfmApiBackend{})
|
// expectInterface[models.ListensExport](t, &lastfm.LastfmApiBackend{})
|
||||||
|
@ -115,6 +115,8 @@ func TestImplementsInterfaces(t *testing.T) {
|
||||||
expectInterface[models.LovesExport](t, &spotify.SpotifyApiBackend{})
|
expectInterface[models.LovesExport](t, &spotify.SpotifyApiBackend{})
|
||||||
// expectInterface[models.LovesImport](t, &spotify.SpotifyApiBackend{})
|
// expectInterface[models.LovesImport](t, &spotify.SpotifyApiBackend{})
|
||||||
|
|
||||||
|
expectInterface[models.ListensExport](t, &spotifyhistory.SpotifyHistoryBackend{})
|
||||||
|
|
||||||
expectInterface[models.ListensExport](t, &scrobblerlog.ScrobblerLogBackend{})
|
expectInterface[models.ListensExport](t, &scrobblerlog.ScrobblerLogBackend{})
|
||||||
expectInterface[models.ListensImport](t, &scrobblerlog.ScrobblerLogBackend{})
|
expectInterface[models.ListensImport](t, &scrobblerlog.ScrobblerLogBackend{})
|
||||||
|
|
||||||
|
@ -125,6 +127,6 @@ func TestImplementsInterfaces(t *testing.T) {
|
||||||
func expectInterface[T interface{}](t *testing.T, backend models.Backend) {
|
func expectInterface[T interface{}](t *testing.T, backend models.Backend) {
|
||||||
ok, name := backends.ImplementsInterface[T](&backend)
|
ok, name := backends.ImplementsInterface[T](&backend)
|
||||||
if !ok {
|
if !ok {
|
||||||
t.Errorf("%v expected to implement %v", reflect.TypeOf(backend).Name(), name)
|
t.Errorf("%v expected to implement %v", backend.Name(), name)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
Scotty is free software: you can redistribute it and/or modify it under the
|
Scotty is free software: you can redistribute it and/or modify it under the
|
||||||
terms of the GNU General Public License as published by the Free Software
|
terms of the GNU General Public License as published by the Free Software
|
||||||
|
@ -77,7 +77,7 @@ func (b *DeezerApiBackend) OAuth2Setup(token oauth2.TokenSource) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *DeezerApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
func (b *DeezerApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
||||||
// Choose a high offset, we attempt to search the loves backwards starting
|
// Choose a high offset, we attempt to search the loves backwards starting
|
||||||
// at the oldest one.
|
// at the oldest one.
|
||||||
offset := math.MaxInt32
|
offset := math.MaxInt32
|
||||||
|
@ -88,13 +88,18 @@ func (b *DeezerApiBackend) ExportListens(oldestTimestamp time.Time, results chan
|
||||||
|
|
||||||
totalDuration := startTime.Sub(oldestTimestamp)
|
totalDuration := startTime.Sub(oldestTimestamp)
|
||||||
|
|
||||||
p := models.Progress{Total: int64(totalDuration.Seconds())}
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{
|
||||||
|
Total: int64(totalDuration.Seconds()),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
out:
|
out:
|
||||||
for {
|
for {
|
||||||
result, err := b.client.UserHistory(offset, perPage)
|
result, err := b.client.UserHistory(offset, perPage)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- p.Abort()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -102,7 +107,6 @@ out:
|
||||||
// The offset was higher then the actual number of tracks. Adjust the offset
|
// The offset was higher then the actual number of tracks. Adjust the offset
|
||||||
// and continue.
|
// and continue.
|
||||||
if offset >= result.Total {
|
if offset >= result.Total {
|
||||||
p.Total = int64(result.Total)
|
|
||||||
offset = max(result.Total-perPage, 0)
|
offset = max(result.Total-perPage, 0)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -128,7 +132,8 @@ out:
|
||||||
}
|
}
|
||||||
|
|
||||||
remainingTime := startTime.Sub(minTime)
|
remainingTime := startTime.Sub(minTime)
|
||||||
p.Elapsed = int64(totalDuration.Seconds() - remainingTime.Seconds())
|
p.Export.TotalItems += len(listens)
|
||||||
|
p.Export.Elapsed = int64(totalDuration.Seconds() - remainingTime.Seconds())
|
||||||
progress <- p
|
progress <- p
|
||||||
results <- models.ListensResult{Items: listens, OldestTimestamp: minTime}
|
results <- models.ListensResult{Items: listens, OldestTimestamp: minTime}
|
||||||
|
|
||||||
|
@ -144,23 +149,29 @@ out:
|
||||||
}
|
}
|
||||||
|
|
||||||
results <- models.ListensResult{OldestTimestamp: minTime}
|
results <- models.ListensResult{OldestTimestamp: minTime}
|
||||||
progress <- p.Complete()
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *DeezerApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
|
func (b *DeezerApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
|
||||||
// Choose a high offset, we attempt to search the loves backwards starting
|
// Choose a high offset, we attempt to search the loves backwards starting
|
||||||
// at the oldest one.
|
// at the oldest one.
|
||||||
offset := math.MaxInt32
|
offset := math.MaxInt32
|
||||||
perPage := MaxItemsPerGet
|
perPage := MaxItemsPerGet
|
||||||
|
|
||||||
p := models.Progress{Total: int64(perPage)}
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{
|
||||||
|
Total: int64(perPage),
|
||||||
|
},
|
||||||
|
}
|
||||||
var totalCount int
|
var totalCount int
|
||||||
|
|
||||||
out:
|
out:
|
||||||
for {
|
for {
|
||||||
result, err := b.client.UserTracks(offset, perPage)
|
result, err := b.client.UserTracks(offset, perPage)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- p.Abort()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.LovesResult{Error: err}
|
results <- models.LovesResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -168,8 +179,8 @@ out:
|
||||||
// The offset was higher then the actual number of tracks. Adjust the offset
|
// The offset was higher then the actual number of tracks. Adjust the offset
|
||||||
// and continue.
|
// and continue.
|
||||||
if offset >= result.Total {
|
if offset >= result.Total {
|
||||||
p.Total = int64(result.Total)
|
|
||||||
totalCount = result.Total
|
totalCount = result.Total
|
||||||
|
p.Export.Total = int64(totalCount)
|
||||||
offset = max(result.Total-perPage, 0)
|
offset = max(result.Total-perPage, 0)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -186,13 +197,14 @@ out:
|
||||||
loves = append(loves, love)
|
loves = append(loves, love)
|
||||||
} else {
|
} else {
|
||||||
totalCount -= 1
|
totalCount -= 1
|
||||||
break
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
sort.Sort(loves)
|
sort.Sort(loves)
|
||||||
results <- models.LovesResult{Items: loves, Total: totalCount}
|
results <- models.LovesResult{Items: loves, Total: totalCount}
|
||||||
p.Elapsed += int64(count)
|
p.Export.TotalItems = totalCount
|
||||||
|
p.Export.Total = int64(totalCount)
|
||||||
|
p.Export.Elapsed += int64(count)
|
||||||
progress <- p
|
progress <- p
|
||||||
|
|
||||||
if offset <= 0 {
|
if offset <= 0 {
|
||||||
|
@ -206,7 +218,8 @@ out:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
progress <- p.Complete()
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t Listen) AsListen() models.Listen {
|
func (t Listen) AsListen() models.Listen {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
This file is part of Scotty.
|
This file is part of Scotty.
|
||||||
|
|
||||||
|
@ -36,27 +36,27 @@ func (b *DumpBackend) InitConfig(config *config.ServiceConfig) error {
|
||||||
func (b *DumpBackend) StartImport() error { return nil }
|
func (b *DumpBackend) StartImport() error { return nil }
|
||||||
func (b *DumpBackend) FinishImport() error { return nil }
|
func (b *DumpBackend) FinishImport() error { return nil }
|
||||||
|
|
||||||
func (b *DumpBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
|
func (b *DumpBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
for _, listen := range export.Items {
|
for _, listen := range export.Items {
|
||||||
importResult.UpdateTimestamp(listen.ListenedAt)
|
importResult.UpdateTimestamp(listen.ListenedAt)
|
||||||
importResult.ImportCount += 1
|
importResult.ImportCount += 1
|
||||||
msg := fmt.Sprintf("🎶 %v: \"%v\" by %v (%v)",
|
msg := fmt.Sprintf("🎶 %v: \"%v\" by %v (%v)",
|
||||||
listen.ListenedAt, listen.TrackName, listen.ArtistName(), listen.RecordingMBID)
|
listen.ListenedAt, listen.TrackName, listen.ArtistName(), listen.RecordingMBID)
|
||||||
importResult.Log(models.Info, msg)
|
importResult.Log(models.Info, msg)
|
||||||
progress <- models.Progress{}.FromImportResult(importResult)
|
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
return importResult, nil
|
return importResult, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *DumpBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
|
func (b *DumpBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
for _, love := range export.Items {
|
for _, love := range export.Items {
|
||||||
importResult.UpdateTimestamp(love.Created)
|
importResult.UpdateTimestamp(love.Created)
|
||||||
importResult.ImportCount += 1
|
importResult.ImportCount += 1
|
||||||
msg := fmt.Sprintf("❤️ %v: \"%v\" by %v (%v)",
|
msg := fmt.Sprintf("❤️ %v: \"%v\" by %v (%v)",
|
||||||
love.Created, love.TrackName, love.ArtistName(), love.RecordingMBID)
|
love.Created, love.TrackName, love.ArtistName(), love.RecordingMBID)
|
||||||
importResult.Log(models.Info, msg)
|
importResult.Log(models.Info, msg)
|
||||||
progress <- models.Progress{}.FromImportResult(importResult)
|
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
return importResult, nil
|
return importResult, nil
|
||||||
|
|
|
@ -23,7 +23,7 @@ import (
|
||||||
|
|
||||||
type ExportProcessor[T models.ListensResult | models.LovesResult] interface {
|
type ExportProcessor[T models.ListensResult | models.LovesResult] interface {
|
||||||
ExportBackend() models.Backend
|
ExportBackend() models.Backend
|
||||||
Process(oldestTimestamp time.Time, results chan T, progress chan models.Progress)
|
Process(oldestTimestamp time.Time, results chan T, progress chan models.TransferProgress)
|
||||||
}
|
}
|
||||||
|
|
||||||
type ListensExportProcessor struct {
|
type ListensExportProcessor struct {
|
||||||
|
@ -34,9 +34,8 @@ func (p ListensExportProcessor) ExportBackend() models.Backend {
|
||||||
return p.Backend
|
return p.Backend
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p ListensExportProcessor) Process(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
func (p ListensExportProcessor) Process(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
||||||
defer close(results)
|
defer close(results)
|
||||||
defer close(progress)
|
|
||||||
p.Backend.ExportListens(oldestTimestamp, results, progress)
|
p.Backend.ExportListens(oldestTimestamp, results, progress)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -48,8 +47,7 @@ func (p LovesExportProcessor) ExportBackend() models.Backend {
|
||||||
return p.Backend
|
return p.Backend
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p LovesExportProcessor) Process(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
|
func (p LovesExportProcessor) Process(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
|
||||||
defer close(results)
|
defer close(results)
|
||||||
defer close(progress)
|
|
||||||
p.Backend.ExportLoves(oldestTimestamp, results, progress)
|
p.Backend.ExportLoves(oldestTimestamp, results, progress)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
This file is part of Scotty.
|
This file is part of Scotty.
|
||||||
|
|
||||||
|
@ -60,19 +60,26 @@ func (b *FunkwhaleApiBackend) InitConfig(config *config.ServiceConfig) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *FunkwhaleApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
func (b *FunkwhaleApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
||||||
page := 1
|
page := 1
|
||||||
perPage := MaxItemsPerGet
|
perPage := MaxItemsPerGet
|
||||||
|
|
||||||
// We need to gather the full list of listens in order to sort them
|
// We need to gather the full list of listens in order to sort them
|
||||||
listens := make(models.ListensList, 0, 2*perPage)
|
listens := make(models.ListensList, 0, 2*perPage)
|
||||||
p := models.Progress{Total: int64(perPage)}
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{
|
||||||
|
Total: int64(perPage),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
out:
|
out:
|
||||||
for {
|
for {
|
||||||
result, err := b.client.GetHistoryListenings(b.username, page, perPage)
|
result, err := b.client.GetHistoryListenings(b.username, page, perPage)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
count := len(result.Results)
|
count := len(result.Results)
|
||||||
|
@ -83,7 +90,7 @@ out:
|
||||||
for _, fwListen := range result.Results {
|
for _, fwListen := range result.Results {
|
||||||
listen := fwListen.AsListen()
|
listen := fwListen.AsListen()
|
||||||
if listen.ListenedAt.After(oldestTimestamp) {
|
if listen.ListenedAt.After(oldestTimestamp) {
|
||||||
p.Elapsed += 1
|
p.Export.Elapsed += 1
|
||||||
listens = append(listens, listen)
|
listens = append(listens, listen)
|
||||||
} else {
|
} else {
|
||||||
break out
|
break out
|
||||||
|
@ -92,34 +99,42 @@ out:
|
||||||
|
|
||||||
if result.Next == "" {
|
if result.Next == "" {
|
||||||
// No further results
|
// No further results
|
||||||
p.Total = p.Elapsed
|
p.Export.Total = p.Export.Elapsed
|
||||||
p.Total -= int64(perPage - count)
|
p.Export.Total -= int64(perPage - count)
|
||||||
break out
|
break out
|
||||||
}
|
}
|
||||||
|
|
||||||
p.Total += int64(perPage)
|
p.Export.TotalItems = len(listens)
|
||||||
|
p.Export.Total += int64(perPage)
|
||||||
progress <- p
|
progress <- p
|
||||||
page += 1
|
page += 1
|
||||||
}
|
}
|
||||||
|
|
||||||
sort.Sort(listens)
|
sort.Sort(listens)
|
||||||
progress <- p.Complete()
|
p.Export.TotalItems = len(listens)
|
||||||
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Items: listens}
|
results <- models.ListensResult{Items: listens}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *FunkwhaleApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
|
func (b *FunkwhaleApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
|
||||||
page := 1
|
page := 1
|
||||||
perPage := MaxItemsPerGet
|
perPage := MaxItemsPerGet
|
||||||
|
|
||||||
// We need to gather the full list of listens in order to sort them
|
// We need to gather the full list of listens in order to sort them
|
||||||
loves := make(models.LovesList, 0, 2*perPage)
|
loves := make(models.LovesList, 0, 2*perPage)
|
||||||
p := models.Progress{Total: int64(perPage)}
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{
|
||||||
|
Total: int64(perPage),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
out:
|
out:
|
||||||
for {
|
for {
|
||||||
result, err := b.client.GetFavoriteTracks(page, perPage)
|
result, err := b.client.GetFavoriteTracks(page, perPage)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- p.Abort()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.LovesResult{Error: err}
|
results <- models.LovesResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -132,7 +147,7 @@ out:
|
||||||
for _, favorite := range result.Results {
|
for _, favorite := range result.Results {
|
||||||
love := favorite.AsLove()
|
love := favorite.AsLove()
|
||||||
if love.Created.After(oldestTimestamp) {
|
if love.Created.After(oldestTimestamp) {
|
||||||
p.Elapsed += 1
|
p.Export.Elapsed += 1
|
||||||
loves = append(loves, love)
|
loves = append(loves, love)
|
||||||
} else {
|
} else {
|
||||||
break out
|
break out
|
||||||
|
@ -144,13 +159,16 @@ out:
|
||||||
break out
|
break out
|
||||||
}
|
}
|
||||||
|
|
||||||
p.Total += int64(perPage)
|
p.Export.TotalItems = len(loves)
|
||||||
|
p.Export.Total += int64(perPage)
|
||||||
progress <- p
|
progress <- p
|
||||||
page += 1
|
page += 1
|
||||||
}
|
}
|
||||||
|
|
||||||
sort.Sort(loves)
|
sort.Sort(loves)
|
||||||
progress <- p.Complete()
|
p.Export.TotalItems = len(loves)
|
||||||
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
results <- models.LovesResult{Items: loves}
|
results <- models.LovesResult{Items: loves}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,8 +23,8 @@ import (
|
||||||
|
|
||||||
type ImportProcessor[T models.ListensResult | models.LovesResult] interface {
|
type ImportProcessor[T models.ListensResult | models.LovesResult] interface {
|
||||||
ImportBackend() models.ImportBackend
|
ImportBackend() models.ImportBackend
|
||||||
Process(results chan T, out chan models.ImportResult, progress chan models.Progress)
|
Process(results chan T, out chan models.ImportResult, progress chan models.TransferProgress)
|
||||||
Import(export T, result models.ImportResult, out chan models.ImportResult, progress chan models.Progress) (models.ImportResult, error)
|
Import(export T, result models.ImportResult, out chan models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type ListensImportProcessor struct {
|
type ListensImportProcessor struct {
|
||||||
|
@ -35,11 +35,11 @@ func (p ListensImportProcessor) ImportBackend() models.ImportBackend {
|
||||||
return p.Backend
|
return p.Backend
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p ListensImportProcessor) Process(results chan models.ListensResult, out chan models.ImportResult, progress chan models.Progress) {
|
func (p ListensImportProcessor) Process(results chan models.ListensResult, out chan models.ImportResult, progress chan models.TransferProgress) {
|
||||||
process(p, results, out, progress)
|
process(p, results, out, progress)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p ListensImportProcessor) Import(export models.ListensResult, result models.ImportResult, out chan models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
|
func (p ListensImportProcessor) Import(export models.ListensResult, result models.ImportResult, out chan models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
if export.Error != nil {
|
if export.Error != nil {
|
||||||
return result, export.Error
|
return result, export.Error
|
||||||
}
|
}
|
||||||
|
@ -64,11 +64,11 @@ func (p LovesImportProcessor) ImportBackend() models.ImportBackend {
|
||||||
return p.Backend
|
return p.Backend
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p LovesImportProcessor) Process(results chan models.LovesResult, out chan models.ImportResult, progress chan models.Progress) {
|
func (p LovesImportProcessor) Process(results chan models.LovesResult, out chan models.ImportResult, progress chan models.TransferProgress) {
|
||||||
process(p, results, out, progress)
|
process(p, results, out, progress)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p LovesImportProcessor) Import(export models.LovesResult, result models.ImportResult, out chan models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
|
func (p LovesImportProcessor) Import(export models.LovesResult, result models.ImportResult, out chan models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
if export.Error != nil {
|
if export.Error != nil {
|
||||||
return result, export.Error
|
return result, export.Error
|
||||||
}
|
}
|
||||||
|
@ -85,11 +85,10 @@ func (p LovesImportProcessor) Import(export models.LovesResult, result models.Im
|
||||||
return importResult, nil
|
return importResult, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func process[R models.LovesResult | models.ListensResult, P ImportProcessor[R]](processor P, results chan R, out chan models.ImportResult, progress chan models.Progress) {
|
func process[R models.LovesResult | models.ListensResult, P ImportProcessor[R]](processor P, results chan R, out chan models.ImportResult, progress chan models.TransferProgress) {
|
||||||
defer close(out)
|
defer close(out)
|
||||||
defer close(progress)
|
|
||||||
result := models.ImportResult{}
|
result := models.ImportResult{}
|
||||||
p := models.Progress{}
|
p := models.TransferProgress{}
|
||||||
|
|
||||||
if err := processor.ImportBackend().StartImport(); err != nil {
|
if err := processor.ImportBackend().StartImport(); err != nil {
|
||||||
out <- handleError(result, err, progress)
|
out <- handleError(result, err, progress)
|
||||||
|
@ -104,7 +103,7 @@ func process[R models.LovesResult | models.ListensResult, P ImportProcessor[R]](
|
||||||
out <- handleError(result, err, progress)
|
out <- handleError(result, err, progress)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
progress <- p.FromImportResult(result)
|
progress <- p.FromImportResult(result, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := processor.ImportBackend().FinishImport(); err != nil {
|
if err := processor.ImportBackend().FinishImport(); err != nil {
|
||||||
|
@ -112,12 +111,14 @@ func process[R models.LovesResult | models.ListensResult, P ImportProcessor[R]](
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
progress <- p.FromImportResult(result).Complete()
|
progress <- p.FromImportResult(result, true)
|
||||||
out <- result
|
out <- result
|
||||||
}
|
}
|
||||||
|
|
||||||
func handleError(result models.ImportResult, err error, progress chan models.Progress) models.ImportResult {
|
func handleError(result models.ImportResult, err error, progress chan models.TransferProgress) models.ImportResult {
|
||||||
result.Error = err
|
result.Error = err
|
||||||
progress <- models.Progress{}.FromImportResult(result).Abort()
|
p := models.TransferProgress{}.FromImportResult(result, false)
|
||||||
|
p.Import.Abort()
|
||||||
|
progress <- p
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
|
@ -93,10 +93,15 @@ func (b *JSPFBackend) FinishImport() error {
|
||||||
return b.writeJSPF()
|
return b.writeJSPF()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *JSPFBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
func (b *JSPFBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
||||||
err := b.readJSPF()
|
err := b.readJSPF()
|
||||||
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{},
|
||||||
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- models.Progress{}.Abort()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -109,11 +114,13 @@ func (b *JSPFBackend) ExportListens(oldestTimestamp time.Time, results chan mode
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
sort.Sort(listens)
|
sort.Sort(listens)
|
||||||
progress <- models.Progress{Total: int64(len(listens))}.Complete()
|
p.Export.Total = int64(len(listens))
|
||||||
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Items: listens}
|
results <- models.ListensResult{Items: listens}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *JSPFBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
|
func (b *JSPFBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
for _, listen := range export.Items {
|
for _, listen := range export.Items {
|
||||||
track := listenAsTrack(listen)
|
track := listenAsTrack(listen)
|
||||||
b.playlist.Tracks = append(b.playlist.Tracks, track)
|
b.playlist.Tracks = append(b.playlist.Tracks, track)
|
||||||
|
@ -121,14 +128,19 @@ func (b *JSPFBackend) ImportListens(export models.ListensResult, importResult mo
|
||||||
importResult.UpdateTimestamp(listen.ListenedAt)
|
importResult.UpdateTimestamp(listen.ListenedAt)
|
||||||
}
|
}
|
||||||
|
|
||||||
progress <- models.Progress{}.FromImportResult(importResult)
|
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
|
||||||
return importResult, nil
|
return importResult, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *JSPFBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
|
func (b *JSPFBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
|
||||||
err := b.readJSPF()
|
err := b.readJSPF()
|
||||||
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{},
|
||||||
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- models.Progress{}.Abort()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.LovesResult{Error: err}
|
results <- models.LovesResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -141,11 +153,13 @@ func (b *JSPFBackend) ExportLoves(oldestTimestamp time.Time, results chan models
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
sort.Sort(loves)
|
sort.Sort(loves)
|
||||||
progress <- models.Progress{Total: int64(len(loves))}.Complete()
|
p.Export.Total = int64(len(loves))
|
||||||
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
results <- models.LovesResult{Items: loves}
|
results <- models.LovesResult{Items: loves}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *JSPFBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
|
func (b *JSPFBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
for _, love := range export.Items {
|
for _, love := range export.Items {
|
||||||
track := loveAsTrack(love)
|
track := loveAsTrack(love)
|
||||||
b.playlist.Tracks = append(b.playlist.Tracks, track)
|
b.playlist.Tracks = append(b.playlist.Tracks, track)
|
||||||
|
@ -153,7 +167,7 @@ func (b *JSPFBackend) ImportLoves(export models.LovesResult, importResult models
|
||||||
importResult.UpdateTimestamp(love.Created)
|
importResult.UpdateTimestamp(love.Created)
|
||||||
}
|
}
|
||||||
|
|
||||||
progress <- models.Progress{}.FromImportResult(importResult)
|
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
|
||||||
return importResult, nil
|
return importResult, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
Scotty is free software: you can redistribute it and/or modify it under the
|
Scotty is free software: you can redistribute it and/or modify it under the
|
||||||
terms of the GNU General Public License as published by the Free Software
|
terms of the GNU General Public License as published by the Free Software
|
||||||
|
@ -88,13 +88,17 @@ func (b *LastfmApiBackend) OAuth2Setup(token oauth2.TokenSource) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *LastfmApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
func (b *LastfmApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
||||||
page := MaxPage
|
page := MaxPage
|
||||||
minTime := oldestTimestamp
|
minTime := oldestTimestamp
|
||||||
perPage := MaxItemsPerGet
|
perPage := MaxItemsPerGet
|
||||||
|
|
||||||
// We need to gather the full list of listens in order to sort them
|
// We need to gather the full list of listens in order to sort them
|
||||||
p := models.Progress{Total: int64(page)}
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{
|
||||||
|
Total: int64(page),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
out:
|
out:
|
||||||
for page > 0 {
|
for page > 0 {
|
||||||
|
@ -108,7 +112,8 @@ out:
|
||||||
result, err := b.client.User.GetRecentTracks(args)
|
result, err := b.client.User.GetRecentTracks(args)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
progress <- p.Abort()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -127,11 +132,12 @@ out:
|
||||||
timestamp, err := strconv.ParseInt(scrobble.Date.Uts, 10, 64)
|
timestamp, err := strconv.ParseInt(scrobble.Date.Uts, 10, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
progress <- p.Abort()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
break out
|
break out
|
||||||
}
|
}
|
||||||
if timestamp > oldestTimestamp.Unix() {
|
if timestamp > oldestTimestamp.Unix() {
|
||||||
p.Elapsed += 1
|
p.Export.Elapsed += 1
|
||||||
listen := models.Listen{
|
listen := models.Listen{
|
||||||
ListenedAt: time.Unix(timestamp, 0),
|
ListenedAt: time.Unix(timestamp, 0),
|
||||||
UserName: b.username,
|
UserName: b.username,
|
||||||
|
@ -165,16 +171,18 @@ out:
|
||||||
Total: result.Total,
|
Total: result.Total,
|
||||||
OldestTimestamp: minTime,
|
OldestTimestamp: minTime,
|
||||||
}
|
}
|
||||||
p.Total = int64(result.TotalPages)
|
p.Export.Total = int64(result.TotalPages)
|
||||||
p.Elapsed = int64(result.TotalPages - page)
|
p.Export.Elapsed = int64(result.TotalPages - page)
|
||||||
|
p.Export.TotalItems += len(listens)
|
||||||
progress <- p
|
progress <- p
|
||||||
}
|
}
|
||||||
|
|
||||||
results <- models.ListensResult{OldestTimestamp: minTime}
|
results <- models.ListensResult{OldestTimestamp: minTime}
|
||||||
progress <- p.Complete()
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *LastfmApiBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
|
func (b *LastfmApiBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
total := len(export.Items)
|
total := len(export.Items)
|
||||||
for i := 0; i < total; i += MaxListensPerSubmission {
|
for i := 0; i < total; i += MaxListensPerSubmission {
|
||||||
listens := export.Items[i:min(i+MaxListensPerSubmission, total)]
|
listens := export.Items[i:min(i+MaxListensPerSubmission, total)]
|
||||||
|
@ -244,20 +252,24 @@ func (b *LastfmApiBackend) ImportListens(export models.ListensResult, importResu
|
||||||
|
|
||||||
importResult.UpdateTimestamp(listens[count-1].ListenedAt)
|
importResult.UpdateTimestamp(listens[count-1].ListenedAt)
|
||||||
importResult.ImportCount += accepted
|
importResult.ImportCount += accepted
|
||||||
progress <- models.Progress{}.FromImportResult(importResult)
|
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
return importResult, nil
|
return importResult, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *LastfmApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
|
func (b *LastfmApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
|
||||||
// Choose a high offset, we attempt to search the loves backwards starting
|
// Choose a high offset, we attempt to search the loves backwards starting
|
||||||
// at the oldest one.
|
// at the oldest one.
|
||||||
page := 1
|
page := 1
|
||||||
perPage := MaxItemsPerGet
|
perPage := MaxItemsPerGet
|
||||||
|
|
||||||
loves := make(models.LovesList, 0, 2*MaxItemsPerGet)
|
loves := make(models.LovesList, 0, 2*MaxItemsPerGet)
|
||||||
p := models.Progress{Total: int64(perPage)}
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{
|
||||||
|
Total: int64(perPage),
|
||||||
|
},
|
||||||
|
}
|
||||||
var totalCount int
|
var totalCount int
|
||||||
|
|
||||||
out:
|
out:
|
||||||
|
@ -268,12 +280,12 @@ out:
|
||||||
"page": page,
|
"page": page,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- p.Abort()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.LovesResult{Error: err}
|
results <- models.LovesResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
p.Total = int64(result.Total)
|
|
||||||
count := len(result.Tracks)
|
count := len(result.Tracks)
|
||||||
if count == 0 {
|
if count == 0 {
|
||||||
break out
|
break out
|
||||||
|
@ -282,7 +294,8 @@ out:
|
||||||
for _, track := range result.Tracks {
|
for _, track := range result.Tracks {
|
||||||
timestamp, err := strconv.ParseInt(track.Date.Uts, 10, 64)
|
timestamp, err := strconv.ParseInt(track.Date.Uts, 10, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- p.Abort()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.LovesResult{Error: err}
|
results <- models.LovesResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -308,18 +321,21 @@ out:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
p.Elapsed += int64(count)
|
p.Export.Total += int64(perPage)
|
||||||
|
p.Export.TotalItems = totalCount
|
||||||
|
p.Export.Elapsed += int64(count)
|
||||||
progress <- p
|
progress <- p
|
||||||
|
|
||||||
page += 1
|
page += 1
|
||||||
}
|
}
|
||||||
|
|
||||||
sort.Sort(loves)
|
sort.Sort(loves)
|
||||||
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
results <- models.LovesResult{Items: loves, Total: totalCount}
|
results <- models.LovesResult{Items: loves, Total: totalCount}
|
||||||
progress <- p.Complete()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *LastfmApiBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
|
func (b *LastfmApiBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
for _, love := range export.Items {
|
for _, love := range export.Items {
|
||||||
err := b.client.Track.Love(lastfm.P{
|
err := b.client.Track.Love(lastfm.P{
|
||||||
"track": love.TrackName,
|
"track": love.TrackName,
|
||||||
|
@ -335,7 +351,7 @@ func (b *LastfmApiBackend) ImportLoves(export models.LovesResult, importResult m
|
||||||
importResult.Log(models.Error, msg)
|
importResult.Log(models.Error, msg)
|
||||||
}
|
}
|
||||||
|
|
||||||
progress <- models.Progress{}.FromImportResult(importResult)
|
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
return importResult, nil
|
return importResult, nil
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
This file is part of Scotty.
|
This file is part of Scotty.
|
||||||
|
|
||||||
|
@ -72,21 +72,25 @@ func (b *ListenBrainzApiBackend) InitConfig(config *config.ServiceConfig) error
|
||||||
func (b *ListenBrainzApiBackend) StartImport() error { return nil }
|
func (b *ListenBrainzApiBackend) StartImport() error { return nil }
|
||||||
func (b *ListenBrainzApiBackend) FinishImport() error { return nil }
|
func (b *ListenBrainzApiBackend) FinishImport() error { return nil }
|
||||||
|
|
||||||
func (b *ListenBrainzApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
func (b *ListenBrainzApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
||||||
startTime := time.Now()
|
startTime := time.Now()
|
||||||
minTime := oldestTimestamp
|
minTime := oldestTimestamp
|
||||||
if minTime.Unix() < 1 {
|
if minTime.Unix() < 1 {
|
||||||
minTime = time.Unix(1, 0)
|
minTime = time.Unix(1, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
totalDuration := startTime.Sub(minTime)
|
totalDuration := startTime.Sub(oldestTimestamp)
|
||||||
|
p := models.TransferProgress{
|
||||||
p := models.Progress{Total: int64(totalDuration.Seconds())}
|
Export: &models.Progress{
|
||||||
|
Total: int64(totalDuration.Seconds()),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
for {
|
for {
|
||||||
result, err := b.client.GetListens(b.username, time.Now(), minTime)
|
result, err := b.client.GetListens(b.username, time.Now(), minTime)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- p.Abort()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -96,7 +100,7 @@ func (b *ListenBrainzApiBackend) ExportListens(oldestTimestamp time.Time, result
|
||||||
if minTime.Unix() < result.Payload.OldestListenTimestamp {
|
if minTime.Unix() < result.Payload.OldestListenTimestamp {
|
||||||
minTime = time.Unix(result.Payload.OldestListenTimestamp, 0)
|
minTime = time.Unix(result.Payload.OldestListenTimestamp, 0)
|
||||||
totalDuration = startTime.Sub(minTime)
|
totalDuration = startTime.Sub(minTime)
|
||||||
p.Total = int64(totalDuration.Seconds())
|
p.Export.Total = int64(totalDuration.Seconds())
|
||||||
continue
|
continue
|
||||||
} else {
|
} else {
|
||||||
break
|
break
|
||||||
|
@ -119,18 +123,20 @@ func (b *ListenBrainzApiBackend) ExportListens(oldestTimestamp time.Time, result
|
||||||
}
|
}
|
||||||
|
|
||||||
sort.Sort(listens)
|
sort.Sort(listens)
|
||||||
p.Elapsed = int64(totalDuration.Seconds() - remainingTime.Seconds())
|
p.Export.TotalItems += len(listens)
|
||||||
|
p.Export.Elapsed = int64(totalDuration.Seconds() - remainingTime.Seconds())
|
||||||
progress <- p
|
progress <- p
|
||||||
results <- models.ListensResult{Items: listens, OldestTimestamp: minTime}
|
results <- models.ListensResult{Items: listens, OldestTimestamp: minTime}
|
||||||
}
|
}
|
||||||
|
|
||||||
results <- models.ListensResult{OldestTimestamp: minTime}
|
results <- models.ListensResult{OldestTimestamp: minTime}
|
||||||
progress <- p.Complete()
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *ListenBrainzApiBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
|
func (b *ListenBrainzApiBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
total := len(export.Items)
|
total := len(export.Items)
|
||||||
p := models.Progress{}.FromImportResult(importResult)
|
p := models.TransferProgress{}.FromImportResult(importResult, false)
|
||||||
for i := 0; i < total; i += MaxListensPerRequest {
|
for i := 0; i < total; i += MaxListensPerRequest {
|
||||||
listens := export.Items[i:min(i+MaxListensPerRequest, total)]
|
listens := export.Items[i:min(i+MaxListensPerRequest, total)]
|
||||||
count := len(listens)
|
count := len(listens)
|
||||||
|
@ -146,7 +152,7 @@ func (b *ListenBrainzApiBackend) ImportListens(export models.ListensResult, impo
|
||||||
for _, l := range listens {
|
for _, l := range listens {
|
||||||
if b.checkDuplicates {
|
if b.checkDuplicates {
|
||||||
isDupe, err := b.checkDuplicateListen(l)
|
isDupe, err := b.checkDuplicateListen(l)
|
||||||
p.Elapsed += 1
|
p.Import.Elapsed += 1
|
||||||
progress <- p
|
progress <- p
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return importResult, err
|
return importResult, err
|
||||||
|
@ -186,31 +192,36 @@ func (b *ListenBrainzApiBackend) ImportListens(export models.ListensResult, impo
|
||||||
importResult.UpdateTimestamp(listens[count-1].ListenedAt)
|
importResult.UpdateTimestamp(listens[count-1].ListenedAt)
|
||||||
}
|
}
|
||||||
importResult.ImportCount += count
|
importResult.ImportCount += count
|
||||||
progress <- p.FromImportResult(importResult)
|
progress <- p.FromImportResult(importResult, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
return importResult, nil
|
return importResult, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *ListenBrainzApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
|
func (b *ListenBrainzApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
|
||||||
exportChan := make(chan models.LovesResult)
|
exportChan := make(chan models.LovesResult)
|
||||||
p := models.Progress{}
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{},
|
||||||
|
}
|
||||||
|
|
||||||
go b.exportLoves(oldestTimestamp, exportChan)
|
go b.exportLoves(oldestTimestamp, exportChan)
|
||||||
for existingLoves := range exportChan {
|
for existingLoves := range exportChan {
|
||||||
if existingLoves.Error != nil {
|
if existingLoves.Error != nil {
|
||||||
progress <- p.Abort()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.LovesResult{Error: existingLoves.Error}
|
results <- models.LovesResult{Error: existingLoves.Error}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
p.Total = int64(existingLoves.Total)
|
p.Export.TotalItems = existingLoves.Total
|
||||||
p.Elapsed += int64(existingLoves.Items.Len())
|
p.Export.Total = int64(existingLoves.Total)
|
||||||
|
p.Export.Elapsed += int64(len(existingLoves.Items))
|
||||||
progress <- p
|
progress <- p
|
||||||
results <- existingLoves
|
results <- existingLoves
|
||||||
}
|
}
|
||||||
|
|
||||||
progress <- p.Complete()
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *ListenBrainzApiBackend) exportLoves(oldestTimestamp time.Time, results chan models.LovesResult) {
|
func (b *ListenBrainzApiBackend) exportLoves(oldestTimestamp time.Time, results chan models.LovesResult) {
|
||||||
|
@ -260,7 +271,7 @@ out:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *ListenBrainzApiBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
|
func (b *ListenBrainzApiBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
if len(b.existingMBIDs) == 0 {
|
if len(b.existingMBIDs) == 0 {
|
||||||
existingLovesChan := make(chan models.LovesResult)
|
existingLovesChan := make(chan models.LovesResult)
|
||||||
go b.exportLoves(time.Unix(0, 0), existingLovesChan)
|
go b.exportLoves(time.Unix(0, 0), existingLovesChan)
|
||||||
|
@ -330,7 +341,7 @@ func (b *ListenBrainzApiBackend) ImportLoves(export models.LovesResult, importRe
|
||||||
importResult.Log(models.Error, msg)
|
importResult.Log(models.Error, msg)
|
||||||
}
|
}
|
||||||
|
|
||||||
progress <- models.Progress{}.FromImportResult(importResult)
|
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
return importResult, nil
|
return importResult, nil
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
This file is part of Scotty.
|
This file is part of Scotty.
|
||||||
|
|
||||||
|
@ -63,19 +63,24 @@ func (b *MalojaApiBackend) InitConfig(config *config.ServiceConfig) error {
|
||||||
func (b *MalojaApiBackend) StartImport() error { return nil }
|
func (b *MalojaApiBackend) StartImport() error { return nil }
|
||||||
func (b *MalojaApiBackend) FinishImport() error { return nil }
|
func (b *MalojaApiBackend) FinishImport() error { return nil }
|
||||||
|
|
||||||
func (b *MalojaApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
func (b *MalojaApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
||||||
page := 0
|
page := 0
|
||||||
perPage := MaxItemsPerGet
|
perPage := MaxItemsPerGet
|
||||||
|
|
||||||
// We need to gather the full list of listens in order to sort them
|
// We need to gather the full list of listens in order to sort them
|
||||||
listens := make(models.ListensList, 0, 2*perPage)
|
listens := make(models.ListensList, 0, 2*perPage)
|
||||||
p := models.Progress{Total: int64(perPage)}
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{
|
||||||
|
Total: int64(perPage),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
out:
|
out:
|
||||||
for {
|
for {
|
||||||
result, err := b.client.GetScrobbles(page, perPage)
|
result, err := b.client.GetScrobbles(page, perPage)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- p.Complete()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -87,25 +92,27 @@ out:
|
||||||
|
|
||||||
for _, scrobble := range result.List {
|
for _, scrobble := range result.List {
|
||||||
if scrobble.ListenedAt > oldestTimestamp.Unix() {
|
if scrobble.ListenedAt > oldestTimestamp.Unix() {
|
||||||
p.Elapsed += 1
|
p.Export.Elapsed += 1
|
||||||
listens = append(listens, scrobble.AsListen())
|
listens = append(listens, scrobble.AsListen())
|
||||||
} else {
|
} else {
|
||||||
break out
|
break out
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
p.Total += int64(perPage)
|
p.Export.TotalItems = len(listens)
|
||||||
|
p.Export.Total += int64(perPage)
|
||||||
progress <- p
|
progress <- p
|
||||||
page += 1
|
page += 1
|
||||||
}
|
}
|
||||||
|
|
||||||
sort.Sort(listens)
|
sort.Sort(listens)
|
||||||
progress <- p.Complete()
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Items: listens}
|
results <- models.ListensResult{Items: listens}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *MalojaApiBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
|
func (b *MalojaApiBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
p := models.Progress{}.FromImportResult(importResult)
|
p := models.TransferProgress{}.FromImportResult(importResult, false)
|
||||||
for _, listen := range export.Items {
|
for _, listen := range export.Items {
|
||||||
scrobble := NewScrobble{
|
scrobble := NewScrobble{
|
||||||
Title: listen.TrackName,
|
Title: listen.TrackName,
|
||||||
|
@ -126,7 +133,7 @@ func (b *MalojaApiBackend) ImportListens(export models.ListensResult, importResu
|
||||||
|
|
||||||
importResult.UpdateTimestamp(listen.ListenedAt)
|
importResult.UpdateTimestamp(listen.ListenedAt)
|
||||||
importResult.ImportCount += 1
|
importResult.ImportCount += 1
|
||||||
progress <- p.FromImportResult(importResult)
|
progress <- p.FromImportResult(importResult, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
return importResult, nil
|
return importResult, nil
|
||||||
|
|
|
@ -131,10 +131,14 @@ func (b *ScrobblerLogBackend) FinishImport() error {
|
||||||
return b.file.Close()
|
return b.file.Close()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *ScrobblerLogBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
func (b *ScrobblerLogBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
||||||
file, err := os.Open(b.filePath)
|
file, err := os.Open(b.filePath)
|
||||||
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{},
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- models.Progress{}.Abort()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -143,7 +147,8 @@ func (b *ScrobblerLogBackend) ExportListens(oldestTimestamp time.Time, results c
|
||||||
|
|
||||||
err = b.log.Parse(file, b.ignoreSkipped)
|
err = b.log.Parse(file, b.ignoreSkipped)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- models.Progress{}.Complete()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -157,11 +162,13 @@ func (b *ScrobblerLogBackend) ExportListens(oldestTimestamp time.Time, results c
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
sort.Sort(listens)
|
sort.Sort(listens)
|
||||||
progress <- models.Progress{Total: int64(len(listens))}.Complete()
|
p.Export.Total = int64(len(listens))
|
||||||
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Items: listens}
|
results <- models.ListensResult{Items: listens}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *ScrobblerLogBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
|
func (b *ScrobblerLogBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
records := make([]scrobblerlog.Record, len(export.Items))
|
records := make([]scrobblerlog.Record, len(export.Items))
|
||||||
for i, listen := range export.Items {
|
for i, listen := range export.Items {
|
||||||
records[i] = listenToRecord(listen)
|
records[i] = listenToRecord(listen)
|
||||||
|
@ -173,7 +180,7 @@ func (b *ScrobblerLogBackend) ImportListens(export models.ListensResult, importR
|
||||||
|
|
||||||
importResult.UpdateTimestamp(lastTimestamp)
|
importResult.UpdateTimestamp(lastTimestamp)
|
||||||
importResult.ImportCount += len(export.Items)
|
importResult.ImportCount += len(export.Items)
|
||||||
progress <- models.Progress{}.FromImportResult(importResult)
|
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
|
||||||
|
|
||||||
return importResult, nil
|
return importResult, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
This file is part of Scotty.
|
This file is part of Scotty.
|
||||||
|
|
||||||
|
@ -95,18 +95,22 @@ func (b *SpotifyApiBackend) OAuth2Setup(token oauth2.TokenSource) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *SpotifyApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
func (b *SpotifyApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
||||||
startTime := time.Now()
|
startTime := time.Now()
|
||||||
minTime := oldestTimestamp
|
minTime := oldestTimestamp
|
||||||
|
|
||||||
totalDuration := startTime.Sub(oldestTimestamp)
|
totalDuration := startTime.Sub(oldestTimestamp)
|
||||||
|
p := models.TransferProgress{
|
||||||
p := models.Progress{Total: int64(totalDuration.Seconds())}
|
Export: &models.Progress{
|
||||||
|
Total: int64(totalDuration.Seconds()),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
for {
|
for {
|
||||||
result, err := b.client.RecentlyPlayedAfter(minTime, MaxItemsPerGet)
|
result, err := b.client.RecentlyPlayedAfter(minTime, MaxItemsPerGet)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- p.Abort()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -118,7 +122,8 @@ func (b *SpotifyApiBackend) ExportListens(oldestTimestamp time.Time, results cha
|
||||||
// Set minTime to the newest returned listen
|
// Set minTime to the newest returned listen
|
||||||
after, err := strconv.ParseInt(result.Cursors.After, 10, 64)
|
after, err := strconv.ParseInt(result.Cursors.After, 10, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- p.Abort()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
return
|
return
|
||||||
} else if after <= minTime.Unix() {
|
} else if after <= minTime.Unix() {
|
||||||
|
@ -146,22 +151,28 @@ func (b *SpotifyApiBackend) ExportListens(oldestTimestamp time.Time, results cha
|
||||||
}
|
}
|
||||||
|
|
||||||
sort.Sort(listens)
|
sort.Sort(listens)
|
||||||
p.Elapsed = int64(totalDuration.Seconds() - remainingTime.Seconds())
|
p.Export.TotalItems += len(listens)
|
||||||
|
p.Export.Elapsed = int64(totalDuration.Seconds() - remainingTime.Seconds())
|
||||||
progress <- p
|
progress <- p
|
||||||
results <- models.ListensResult{Items: listens, OldestTimestamp: minTime}
|
results <- models.ListensResult{Items: listens, OldestTimestamp: minTime}
|
||||||
}
|
}
|
||||||
|
|
||||||
results <- models.ListensResult{OldestTimestamp: minTime}
|
results <- models.ListensResult{OldestTimestamp: minTime}
|
||||||
progress <- p.Complete()
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *SpotifyApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
|
func (b *SpotifyApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
|
||||||
// Choose a high offset, we attempt to search the loves backwards starting
|
// Choose a high offset, we attempt to search the loves backwards starting
|
||||||
// at the oldest one.
|
// at the oldest one.
|
||||||
offset := math.MaxInt32
|
offset := math.MaxInt32
|
||||||
perPage := MaxItemsPerGet
|
perPage := MaxItemsPerGet
|
||||||
|
|
||||||
p := models.Progress{Total: int64(perPage)}
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{
|
||||||
|
Total: int64(perPage),
|
||||||
|
},
|
||||||
|
}
|
||||||
totalCount := 0
|
totalCount := 0
|
||||||
exportCount := 0
|
exportCount := 0
|
||||||
|
|
||||||
|
@ -169,7 +180,8 @@ out:
|
||||||
for {
|
for {
|
||||||
result, err := b.client.UserTracks(offset, perPage)
|
result, err := b.client.UserTracks(offset, perPage)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- p.Abort()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.LovesResult{Error: err}
|
results <- models.LovesResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -177,7 +189,7 @@ out:
|
||||||
// The offset was higher then the actual number of tracks. Adjust the offset
|
// The offset was higher then the actual number of tracks. Adjust the offset
|
||||||
// and continue.
|
// and continue.
|
||||||
if offset >= result.Total {
|
if offset >= result.Total {
|
||||||
p.Total = int64(result.Total)
|
p.Export.Total = int64(result.Total)
|
||||||
totalCount = result.Total
|
totalCount = result.Total
|
||||||
offset = max(result.Total-perPage, 0)
|
offset = max(result.Total-perPage, 0)
|
||||||
continue
|
continue
|
||||||
|
@ -201,7 +213,7 @@ out:
|
||||||
exportCount += len(loves)
|
exportCount += len(loves)
|
||||||
sort.Sort(loves)
|
sort.Sort(loves)
|
||||||
results <- models.LovesResult{Items: loves, Total: totalCount}
|
results <- models.LovesResult{Items: loves, Total: totalCount}
|
||||||
p.Elapsed += int64(count)
|
p.Export.Elapsed += int64(count)
|
||||||
progress <- p
|
progress <- p
|
||||||
|
|
||||||
if offset <= 0 {
|
if offset <= 0 {
|
||||||
|
@ -216,7 +228,8 @@ out:
|
||||||
}
|
}
|
||||||
|
|
||||||
results <- models.LovesResult{Total: exportCount}
|
results <- models.LovesResult{Total: exportCount}
|
||||||
progress <- p.Complete()
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l Listen) AsListen() models.Listen {
|
func (l Listen) AsListen() models.Listen {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
This file is part of Scotty.
|
This file is part of Scotty.
|
||||||
|
|
||||||
|
@ -72,21 +72,27 @@ func (b *SpotifyHistoryBackend) InitConfig(config *config.ServiceConfig) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *SpotifyHistoryBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
func (b *SpotifyHistoryBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
||||||
files, err := filepath.Glob(path.Join(b.dirPath, historyFileGlob))
|
files, err := filepath.Glob(path.Join(b.dirPath, historyFileGlob))
|
||||||
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{},
|
||||||
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- models.Progress{}.Abort()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
slices.Sort(files)
|
slices.Sort(files)
|
||||||
fileCount := int64(len(files))
|
fileCount := int64(len(files))
|
||||||
p := models.Progress{Total: fileCount}
|
p.Export.Total = fileCount
|
||||||
for i, filePath := range files {
|
for i, filePath := range files {
|
||||||
history, err := readHistoryFile(filePath)
|
history, err := readHistoryFile(filePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- models.Progress{}.Abort()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -97,11 +103,13 @@ func (b *SpotifyHistoryBackend) ExportListens(oldestTimestamp time.Time, results
|
||||||
})
|
})
|
||||||
sort.Sort(listens)
|
sort.Sort(listens)
|
||||||
results <- models.ListensResult{Items: listens}
|
results <- models.ListensResult{Items: listens}
|
||||||
p.Elapsed = int64(i)
|
p.Export.Elapsed = int64(i)
|
||||||
|
p.Export.TotalItems += len(listens)
|
||||||
progress <- p
|
progress <- p
|
||||||
}
|
}
|
||||||
|
|
||||||
progress <- p.Complete()
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
}
|
}
|
||||||
|
|
||||||
func readHistoryFile(filePath string) (StreamingHistory, error) {
|
func readHistoryFile(filePath string) (StreamingHistory, error) {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
This file is part of Scotty.
|
This file is part of Scotty.
|
||||||
|
|
||||||
|
@ -63,25 +63,30 @@ func (b *SubsonicApiBackend) InitConfig(config *config.ServiceConfig) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *SubsonicApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
|
func (b *SubsonicApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
|
||||||
err := b.client.Authenticate(b.password)
|
err := b.client.Authenticate(b.password)
|
||||||
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{},
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- models.Progress{}.Abort()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.LovesResult{Error: err}
|
results <- models.LovesResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
starred, err := b.client.GetStarred2(map[string]string{})
|
starred, err := b.client.GetStarred2(map[string]string{})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- models.Progress{}.Abort()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.LovesResult{Error: err}
|
results <- models.LovesResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
loves := b.filterSongs(starred.Song, oldestTimestamp)
|
loves := b.filterSongs(starred.Song, oldestTimestamp)
|
||||||
progress <- models.Progress{
|
p.Export.Total = int64(len(loves))
|
||||||
Total: int64(loves.Len()),
|
p.Export.Complete()
|
||||||
}.Complete()
|
progress <- p
|
||||||
results <- models.LovesResult{Items: loves}
|
results <- models.LovesResult{Items: loves}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -28,7 +28,18 @@ import (
|
||||||
"go.uploadedlobster.com/scotty/internal/models"
|
"go.uploadedlobster.com/scotty/internal/models"
|
||||||
)
|
)
|
||||||
|
|
||||||
func progressBar(exportProgress chan models.Progress, importProgress chan models.Progress) *mpb.Progress {
|
type progressBarUpdater struct {
|
||||||
|
wg *sync.WaitGroup
|
||||||
|
progress *mpb.Progress
|
||||||
|
exportBar *mpb.Bar
|
||||||
|
importBar *mpb.Bar
|
||||||
|
updateChan chan models.TransferProgress
|
||||||
|
lastExportUpdate time.Time
|
||||||
|
totalItems int
|
||||||
|
importedItems int
|
||||||
|
}
|
||||||
|
|
||||||
|
func setupProgressBars(updateChan chan models.TransferProgress) progressBarUpdater {
|
||||||
wg := &sync.WaitGroup{}
|
wg := &sync.WaitGroup{}
|
||||||
p := mpb.New(
|
p := mpb.New(
|
||||||
mpb.WithWaitGroup(wg),
|
mpb.WithWaitGroup(wg),
|
||||||
|
@ -37,15 +48,71 @@ func progressBar(exportProgress chan models.Progress, importProgress chan models
|
||||||
mpb.WithAutoRefresh(),
|
mpb.WithAutoRefresh(),
|
||||||
)
|
)
|
||||||
|
|
||||||
exportBar := setupProgressBar(p, i18n.Tr("exporting"))
|
u := progressBarUpdater{
|
||||||
importBar := setupProgressBar(p, i18n.Tr("importing"))
|
wg: wg,
|
||||||
go updateProgressBar(exportBar, wg, exportProgress)
|
progress: p,
|
||||||
go updateProgressBar(importBar, wg, importProgress)
|
exportBar: initProgressBar(p, i18n.Tr("exporting")),
|
||||||
|
importBar: initProgressBar(p, i18n.Tr("importing")),
|
||||||
|
updateChan: updateChan,
|
||||||
|
}
|
||||||
|
|
||||||
return p
|
go u.update()
|
||||||
|
return u
|
||||||
}
|
}
|
||||||
|
|
||||||
func setupProgressBar(p *mpb.Progress, name string) *mpb.Bar {
|
func (u *progressBarUpdater) wait() {
|
||||||
|
// FIXME: This should probably be closed elsewhere
|
||||||
|
close(u.updateChan)
|
||||||
|
u.progress.Wait()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (u *progressBarUpdater) update() {
|
||||||
|
u.wg.Add(1)
|
||||||
|
defer u.wg.Done()
|
||||||
|
u.lastExportUpdate = time.Now()
|
||||||
|
for progress := range u.updateChan {
|
||||||
|
if progress.Export != nil {
|
||||||
|
u.updateExportProgress(progress.Export)
|
||||||
|
}
|
||||||
|
|
||||||
|
if progress.Import != nil {
|
||||||
|
if int64(u.totalItems) > progress.Import.Total {
|
||||||
|
progress.Import.Total = int64(u.totalItems)
|
||||||
|
}
|
||||||
|
u.updateImportProgress(progress.Import)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (u *progressBarUpdater) updateExportProgress(progress *models.Progress) {
|
||||||
|
bar := u.exportBar
|
||||||
|
u.totalItems = progress.TotalItems
|
||||||
|
|
||||||
|
if progress.Aborted {
|
||||||
|
bar.Abort(false)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
oldIterTime := u.lastExportUpdate
|
||||||
|
u.lastExportUpdate = time.Now()
|
||||||
|
elapsedTime := u.lastExportUpdate.Sub(oldIterTime)
|
||||||
|
bar.EwmaSetCurrent(progress.Elapsed, elapsedTime)
|
||||||
|
bar.SetTotal(progress.Total, progress.Completed)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (u *progressBarUpdater) updateImportProgress(progress *models.Progress) {
|
||||||
|
bar := u.importBar
|
||||||
|
|
||||||
|
if progress.Aborted {
|
||||||
|
bar.Abort(false)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
bar.SetCurrent(progress.Elapsed)
|
||||||
|
bar.SetTotal(progress.Total, progress.Completed)
|
||||||
|
}
|
||||||
|
|
||||||
|
func initProgressBar(p *mpb.Progress, name string) *mpb.Bar {
|
||||||
green := color.New(color.FgGreen).SprintFunc()
|
green := color.New(color.FgGreen).SprintFunc()
|
||||||
return p.New(0,
|
return p.New(0,
|
||||||
mpb.BarStyle(),
|
mpb.BarStyle(),
|
||||||
|
@ -69,19 +136,3 @@ func setupProgressBar(p *mpb.Progress, name string) *mpb.Bar {
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
func updateProgressBar(bar *mpb.Bar, wg *sync.WaitGroup, progressChan chan models.Progress) {
|
|
||||||
wg.Add(1)
|
|
||||||
defer wg.Done()
|
|
||||||
lastIterTime := time.Now()
|
|
||||||
for progress := range progressChan {
|
|
||||||
if progress.Aborted {
|
|
||||||
bar.Abort(false)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
oldIterTime := lastIterTime
|
|
||||||
lastIterTime = time.Now()
|
|
||||||
bar.EwmaSetCurrent(progress.Elapsed, lastIterTime.Sub(oldIterTime))
|
|
||||||
bar.SetTotal(progress.Total, progress.Completed)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -109,19 +109,18 @@ func (c *TransferCmd[E, I, R]) Transfer(exp backends.ExportProcessor[R], imp bac
|
||||||
printTimestamp("From timestamp: %v (%v)", timestamp)
|
printTimestamp("From timestamp: %v (%v)", timestamp)
|
||||||
|
|
||||||
// Prepare progress bars
|
// Prepare progress bars
|
||||||
exportProgress := make(chan models.Progress)
|
progressChan := make(chan models.TransferProgress)
|
||||||
importProgress := make(chan models.Progress)
|
progress := setupProgressBars(progressChan)
|
||||||
progress := progressBar(exportProgress, importProgress)
|
|
||||||
|
|
||||||
// Export from source
|
// Export from source
|
||||||
exportChan := make(chan R, 1000)
|
exportChan := make(chan R, 1000)
|
||||||
go exp.Process(timestamp, exportChan, exportProgress)
|
go exp.Process(timestamp, exportChan, progressChan)
|
||||||
|
|
||||||
// Import into target
|
// Import into target
|
||||||
resultChan := make(chan models.ImportResult)
|
resultChan := make(chan models.ImportResult)
|
||||||
go imp.Process(exportChan, resultChan, importProgress)
|
go imp.Process(exportChan, resultChan, progressChan)
|
||||||
result := <-resultChan
|
result := <-resultChan
|
||||||
progress.Wait()
|
progress.wait()
|
||||||
|
|
||||||
// Update timestamp
|
// Update timestamp
|
||||||
err = c.updateTimestamp(&result, timestamp)
|
err = c.updateTimestamp(&result, timestamp)
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
This file is part of Scotty.
|
This file is part of Scotty.
|
||||||
|
|
||||||
|
@ -55,7 +55,7 @@ type ListensExport interface {
|
||||||
// Returns a list of all listens newer then oldestTimestamp.
|
// Returns a list of all listens newer then oldestTimestamp.
|
||||||
// The returned list of listens is supposed to be ordered by the
|
// The returned list of listens is supposed to be ordered by the
|
||||||
// Listen.ListenedAt timestamp, with the oldest entry first.
|
// Listen.ListenedAt timestamp, with the oldest entry first.
|
||||||
ExportListens(oldestTimestamp time.Time, results chan ListensResult, progress chan Progress)
|
ExportListens(oldestTimestamp time.Time, results chan ListensResult, progress chan TransferProgress)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Must be implemented by services supporting the import of listens.
|
// Must be implemented by services supporting the import of listens.
|
||||||
|
@ -63,7 +63,7 @@ type ListensImport interface {
|
||||||
ImportBackend
|
ImportBackend
|
||||||
|
|
||||||
// Imports the given list of listens.
|
// Imports the given list of listens.
|
||||||
ImportListens(export ListensResult, importResult ImportResult, progress chan Progress) (ImportResult, error)
|
ImportListens(export ListensResult, importResult ImportResult, progress chan TransferProgress) (ImportResult, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Must be implemented by services supporting the export of loves.
|
// Must be implemented by services supporting the export of loves.
|
||||||
|
@ -73,7 +73,7 @@ type LovesExport interface {
|
||||||
// Returns a list of all loves newer then oldestTimestamp.
|
// Returns a list of all loves newer then oldestTimestamp.
|
||||||
// The returned list of listens is supposed to be ordered by the
|
// The returned list of listens is supposed to be ordered by the
|
||||||
// Love.Created timestamp, with the oldest entry first.
|
// Love.Created timestamp, with the oldest entry first.
|
||||||
ExportLoves(oldestTimestamp time.Time, results chan LovesResult, progress chan Progress)
|
ExportLoves(oldestTimestamp time.Time, results chan LovesResult, progress chan TransferProgress)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Must be implemented by services supporting the import of loves.
|
// Must be implemented by services supporting the import of loves.
|
||||||
|
@ -81,5 +81,5 @@ type LovesImport interface {
|
||||||
ImportBackend
|
ImportBackend
|
||||||
|
|
||||||
// Imports the given list of loves.
|
// Imports the given list of loves.
|
||||||
ImportLoves(export LovesResult, importResult ImportResult, progress chan Progress) (ImportResult, error)
|
ImportLoves(export LovesResult, importResult ImportResult, progress chan TransferProgress) (ImportResult, error)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
@ -209,11 +209,25 @@ func (i *ImportResult) Log(t LogEntryType, msg string) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type TransferProgress struct {
|
||||||
|
Export *Progress
|
||||||
|
Import *Progress
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p TransferProgress) FromImportResult(result ImportResult, completed bool) TransferProgress {
|
||||||
|
importProgress := Progress{
|
||||||
|
Completed: completed,
|
||||||
|
}.FromImportResult(result)
|
||||||
|
p.Import = &importProgress
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
type Progress struct {
|
type Progress struct {
|
||||||
Total int64
|
TotalItems int
|
||||||
Elapsed int64
|
Total int64
|
||||||
Completed bool
|
Elapsed int64
|
||||||
Aborted bool
|
Completed bool
|
||||||
|
Aborted bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p Progress) FromImportResult(result ImportResult) Progress {
|
func (p Progress) FromImportResult(result ImportResult) Progress {
|
||||||
|
@ -222,13 +236,11 @@ func (p Progress) FromImportResult(result ImportResult) Progress {
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p Progress) Complete() Progress {
|
func (p *Progress) Complete() {
|
||||||
p.Elapsed = p.Total
|
p.Elapsed = p.Total
|
||||||
p.Completed = true
|
p.Completed = true
|
||||||
return p
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p Progress) Abort() Progress {
|
func (p *Progress) Abort() {
|
||||||
p.Aborted = true
|
p.Aborted = true
|
||||||
return p
|
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue