mirror of
https://git.sr.ht/~phw/scotty
synced 2025-05-10 18:27:03 +02:00
Close export results channel in generic implementation
This removes the need for every implementation to handle this case.
This commit is contained in:
parent
9480c69cbb
commit
1c3364dad5
11 changed files with 5 additions and 32 deletions
|
@ -88,8 +88,6 @@ func (b *DeezerApiBackend) ExportListens(oldestTimestamp time.Time, results chan
|
||||||
|
|
||||||
totalDuration := startTime.Sub(oldestTimestamp)
|
totalDuration := startTime.Sub(oldestTimestamp)
|
||||||
|
|
||||||
defer close(results)
|
|
||||||
|
|
||||||
p := models.Progress{Total: int64(totalDuration.Seconds())}
|
p := models.Progress{Total: int64(totalDuration.Seconds())}
|
||||||
|
|
||||||
out:
|
out:
|
||||||
|
@ -155,8 +153,6 @@ func (b *DeezerApiBackend) ExportLoves(oldestTimestamp time.Time, results chan m
|
||||||
offset := math.MaxInt32
|
offset := math.MaxInt32
|
||||||
perPage := MaxItemsPerGet
|
perPage := MaxItemsPerGet
|
||||||
|
|
||||||
defer close(results)
|
|
||||||
|
|
||||||
p := models.Progress{Total: int64(perPage)}
|
p := models.Progress{Total: int64(perPage)}
|
||||||
var totalCount int
|
var totalCount int
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
Scotty is free software: you can redistribute it and/or modify it under the
|
Scotty is free software: you can redistribute it and/or modify it under the
|
||||||
terms of the GNU General Public License as published by the Free Software
|
terms of the GNU General Public License as published by the Free Software
|
||||||
|
@ -35,8 +35,9 @@ func (p ListensExportProcessor) ExportBackend() models.Backend {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p ListensExportProcessor) Process(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
func (p ListensExportProcessor) Process(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
||||||
|
defer close(results)
|
||||||
|
defer close(progress)
|
||||||
p.Backend.ExportListens(oldestTimestamp, results, progress)
|
p.Backend.ExportListens(oldestTimestamp, results, progress)
|
||||||
close(progress)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type LovesExportProcessor struct {
|
type LovesExportProcessor struct {
|
||||||
|
@ -48,6 +49,7 @@ func (p LovesExportProcessor) ExportBackend() models.Backend {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p LovesExportProcessor) Process(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
|
func (p LovesExportProcessor) Process(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
|
||||||
|
defer close(results)
|
||||||
|
defer close(progress)
|
||||||
p.Backend.ExportLoves(oldestTimestamp, results, progress)
|
p.Backend.ExportLoves(oldestTimestamp, results, progress)
|
||||||
close(progress)
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -64,8 +64,6 @@ func (b *FunkwhaleApiBackend) ExportListens(oldestTimestamp time.Time, results c
|
||||||
page := 1
|
page := 1
|
||||||
perPage := MaxItemsPerGet
|
perPage := MaxItemsPerGet
|
||||||
|
|
||||||
defer close(results)
|
|
||||||
|
|
||||||
// We need to gather the full list of listens in order to sort them
|
// We need to gather the full list of listens in order to sort them
|
||||||
listens := make(models.ListensList, 0, 2*perPage)
|
listens := make(models.ListensList, 0, 2*perPage)
|
||||||
p := models.Progress{Total: int64(perPage)}
|
p := models.Progress{Total: int64(perPage)}
|
||||||
|
@ -113,8 +111,6 @@ func (b *FunkwhaleApiBackend) ExportLoves(oldestTimestamp time.Time, results cha
|
||||||
page := 1
|
page := 1
|
||||||
perPage := MaxItemsPerGet
|
perPage := MaxItemsPerGet
|
||||||
|
|
||||||
defer close(results)
|
|
||||||
|
|
||||||
// We need to gather the full list of listens in order to sort them
|
// We need to gather the full list of listens in order to sort them
|
||||||
loves := make(models.LovesList, 0, 2*perPage)
|
loves := make(models.LovesList, 0, 2*perPage)
|
||||||
p := models.Progress{Total: int64(perPage)}
|
p := models.Progress{Total: int64(perPage)}
|
||||||
|
|
|
@ -94,8 +94,6 @@ func (b *JSPFBackend) FinishImport() error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *JSPFBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
func (b *JSPFBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
||||||
defer close(results)
|
|
||||||
|
|
||||||
err := b.readJSPF()
|
err := b.readJSPF()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- models.Progress{}.Complete()
|
progress <- models.Progress{}.Complete()
|
||||||
|
@ -128,8 +126,6 @@ func (b *JSPFBackend) ImportListens(export models.ListensResult, importResult mo
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *JSPFBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
|
func (b *JSPFBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
|
||||||
defer close(results)
|
|
||||||
|
|
||||||
err := b.readJSPF()
|
err := b.readJSPF()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- models.Progress{}.Complete()
|
progress <- models.Progress{}.Complete()
|
||||||
|
|
|
@ -93,8 +93,6 @@ func (b *LastfmApiBackend) ExportListens(oldestTimestamp time.Time, results chan
|
||||||
minTime := oldestTimestamp
|
minTime := oldestTimestamp
|
||||||
perPage := MaxItemsPerGet
|
perPage := MaxItemsPerGet
|
||||||
|
|
||||||
defer close(results)
|
|
||||||
|
|
||||||
// We need to gather the full list of listens in order to sort them
|
// We need to gather the full list of listens in order to sort them
|
||||||
p := models.Progress{Total: int64(page)}
|
p := models.Progress{Total: int64(page)}
|
||||||
|
|
||||||
|
@ -258,8 +256,6 @@ func (b *LastfmApiBackend) ExportLoves(oldestTimestamp time.Time, results chan m
|
||||||
page := 1
|
page := 1
|
||||||
perPage := MaxItemsPerGet
|
perPage := MaxItemsPerGet
|
||||||
|
|
||||||
defer close(results)
|
|
||||||
|
|
||||||
loves := make(models.LovesList, 0, 2*MaxItemsPerGet)
|
loves := make(models.LovesList, 0, 2*MaxItemsPerGet)
|
||||||
p := models.Progress{Total: int64(perPage)}
|
p := models.Progress{Total: int64(perPage)}
|
||||||
var totalCount int
|
var totalCount int
|
||||||
|
|
|
@ -81,8 +81,6 @@ func (b *ListenBrainzApiBackend) ExportListens(oldestTimestamp time.Time, result
|
||||||
|
|
||||||
totalDuration := startTime.Sub(minTime)
|
totalDuration := startTime.Sub(minTime)
|
||||||
|
|
||||||
defer close(results)
|
|
||||||
|
|
||||||
p := models.Progress{Total: int64(totalDuration.Seconds())}
|
p := models.Progress{Total: int64(totalDuration.Seconds())}
|
||||||
|
|
||||||
for {
|
for {
|
||||||
|
@ -195,7 +193,6 @@ func (b *ListenBrainzApiBackend) ImportListens(export models.ListensResult, impo
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *ListenBrainzApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
|
func (b *ListenBrainzApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
|
||||||
defer close(results)
|
|
||||||
exportChan := make(chan models.LovesResult)
|
exportChan := make(chan models.LovesResult)
|
||||||
p := models.Progress{}
|
p := models.Progress{}
|
||||||
|
|
||||||
|
|
|
@ -67,8 +67,6 @@ func (b *MalojaApiBackend) ExportListens(oldestTimestamp time.Time, results chan
|
||||||
page := 0
|
page := 0
|
||||||
perPage := MaxItemsPerGet
|
perPage := MaxItemsPerGet
|
||||||
|
|
||||||
defer close(results)
|
|
||||||
|
|
||||||
// We need to gather the full list of listens in order to sort them
|
// We need to gather the full list of listens in order to sort them
|
||||||
listens := make(models.ListensList, 0, 2*perPage)
|
listens := make(models.ListensList, 0, 2*perPage)
|
||||||
p := models.Progress{Total: int64(perPage)}
|
p := models.Progress{Total: int64(perPage)}
|
||||||
|
|
|
@ -132,7 +132,6 @@ func (b *ScrobblerLogBackend) FinishImport() error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *ScrobblerLogBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
func (b *ScrobblerLogBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
||||||
defer close(results)
|
|
||||||
file, err := os.Open(b.filePath)
|
file, err := os.Open(b.filePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- models.Progress{}.Complete()
|
progress <- models.Progress{}.Complete()
|
||||||
|
|
|
@ -101,8 +101,6 @@ func (b *SpotifyApiBackend) ExportListens(oldestTimestamp time.Time, results cha
|
||||||
|
|
||||||
totalDuration := startTime.Sub(oldestTimestamp)
|
totalDuration := startTime.Sub(oldestTimestamp)
|
||||||
|
|
||||||
defer close(results)
|
|
||||||
|
|
||||||
p := models.Progress{Total: int64(totalDuration.Seconds())}
|
p := models.Progress{Total: int64(totalDuration.Seconds())}
|
||||||
|
|
||||||
for {
|
for {
|
||||||
|
@ -163,8 +161,6 @@ func (b *SpotifyApiBackend) ExportLoves(oldestTimestamp time.Time, results chan
|
||||||
offset := math.MaxInt32
|
offset := math.MaxInt32
|
||||||
perPage := MaxItemsPerGet
|
perPage := MaxItemsPerGet
|
||||||
|
|
||||||
defer close(results)
|
|
||||||
|
|
||||||
p := models.Progress{Total: int64(perPage)}
|
p := models.Progress{Total: int64(perPage)}
|
||||||
totalCount := 0
|
totalCount := 0
|
||||||
exportCount := 0
|
exportCount := 0
|
||||||
|
|
|
@ -73,8 +73,6 @@ func (b *SpotifyHistoryBackend) InitConfig(config *config.ServiceConfig) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *SpotifyHistoryBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
func (b *SpotifyHistoryBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
||||||
defer close(results)
|
|
||||||
|
|
||||||
files, err := filepath.Glob(path.Join(b.dirPath, historyFileGlob))
|
files, err := filepath.Glob(path.Join(b.dirPath, historyFileGlob))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- models.Progress{}.Complete()
|
progress <- models.Progress{}.Complete()
|
||||||
|
|
|
@ -64,7 +64,6 @@ func (b *SubsonicApiBackend) InitConfig(config *config.ServiceConfig) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *SubsonicApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
|
func (b *SubsonicApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
|
||||||
defer close(results)
|
|
||||||
err := b.client.Authenticate(b.password)
|
err := b.client.Authenticate(b.password)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- models.Progress{}.Complete()
|
progress <- models.Progress{}.Complete()
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue