mirror of
https://git.sr.ht/~phw/scotty
synced 2025-05-31 10:58:35 +02:00
Implemented deezer-history loves export
This commit is contained in:
parent
e85090fe4a
commit
78a05e9f54
2 changed files with 75 additions and 3 deletions
|
@ -120,7 +120,7 @@ The following table lists the available backends and the currently supported fea
|
|||
Backend | Listens Export | Listens Import | Loves Export | Loves Import
|
||||
---------------------|----------------|----------------|--------------|-------------
|
||||
deezer | ✓ | ⨯ | ✓ | -
|
||||
deezer-history | ✓ | ⨯ | - | ⨯
|
||||
deezer-history | ✓ | ⨯ | ✓ | ⨯
|
||||
funkwhale | ✓ | ⨯ | ✓ | -
|
||||
jspf | ✓ | ✓ | ✓ | ✓
|
||||
lastfm | ✓ | ✓ | ✓ | ✓
|
||||
|
|
|
@ -22,6 +22,7 @@ import (
|
|||
"fmt"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/xuri/excelize/v2"
|
||||
|
@ -33,7 +34,7 @@ import (
|
|||
|
||||
const (
|
||||
sheetListeningHistory = "10_listeningHistory"
|
||||
sheetfavoriteSongs = "8_favoriteSong"
|
||||
sheetFavoriteSongs = "8_favoriteSong"
|
||||
)
|
||||
|
||||
type DeezerHistoryBackend struct {
|
||||
|
@ -96,6 +97,46 @@ func (b *DeezerHistoryBackend) ExportListens(ctx context.Context, oldestTimestam
|
|||
progress <- p
|
||||
}
|
||||
|
||||
func (b *DeezerHistoryBackend) ExportLoves(ctx context.Context, oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
|
||||
p := models.TransferProgress{
|
||||
Export: &models.Progress{},
|
||||
}
|
||||
|
||||
rows, err := ReadXLSXSheet(b.filePath, sheetFavoriteSongs)
|
||||
if err != nil {
|
||||
p.Export.Abort()
|
||||
progress <- p
|
||||
results <- models.LovesResult{Error: err}
|
||||
return
|
||||
}
|
||||
|
||||
count := len(rows) - 1 // Exclude the header row
|
||||
p.Export.TotalItems = count
|
||||
p.Export.Total = int64(count)
|
||||
|
||||
love := make(models.LovesList, 0, count)
|
||||
for i, row := range models.IterExportProgress(rows, &p, progress) {
|
||||
// Skip header row
|
||||
if i == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
l, err := RowAsLove(row)
|
||||
if err != nil {
|
||||
p.Export.Abort()
|
||||
progress <- p
|
||||
results <- models.LovesResult{Error: err}
|
||||
return
|
||||
}
|
||||
love = append(love, *l)
|
||||
}
|
||||
|
||||
sort.Sort(love)
|
||||
results <- models.LovesResult{Items: love}
|
||||
p.Export.Complete()
|
||||
progress <- p
|
||||
}
|
||||
|
||||
func ReadXLSXSheet(path string, sheet string) ([][]string, error) {
|
||||
exc, err := excelize.OpenFile(path)
|
||||
if err != nil {
|
||||
|
@ -103,7 +144,7 @@ func ReadXLSXSheet(path string, sheet string) ([][]string, error) {
|
|||
}
|
||||
|
||||
// Get all the rows in the Sheet1.
|
||||
return exc.GetRows(sheetListeningHistory)
|
||||
return exc.GetRows(sheet)
|
||||
}
|
||||
|
||||
func RowAsListen(row []string) (*models.Listen, error) {
|
||||
|
@ -123,6 +164,9 @@ func RowAsListen(row []string) (*models.Listen, error) {
|
|||
ArtistNames: []string{row[1]},
|
||||
ReleaseName: row[3],
|
||||
ISRC: mbtypes.ISRC(row[2]),
|
||||
AdditionalInfo: map[string]any{
|
||||
"music_service": "deezer.com",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -132,3 +176,31 @@ func RowAsListen(row []string) (*models.Listen, error) {
|
|||
|
||||
return &listen, nil
|
||||
}
|
||||
|
||||
func RowAsLove(row []string) (*models.Love, error) {
|
||||
if len(row) < 5 {
|
||||
err := fmt.Errorf("Invalid row, expected 5 columns, got %d", len(row))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
url := row[4]
|
||||
if !strings.HasPrefix(url, "http://") || !strings.HasPrefix(url, "https") {
|
||||
url = "https://" + url
|
||||
}
|
||||
|
||||
love := models.Love{
|
||||
Track: models.Track{
|
||||
TrackName: row[0],
|
||||
ArtistNames: []string{row[1]},
|
||||
ReleaseName: row[2],
|
||||
ISRC: mbtypes.ISRC(row[3]),
|
||||
AdditionalInfo: map[string]any{
|
||||
"music_service": "deezer.com",
|
||||
"origin_url": url,
|
||||
"deezer_id": url,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
return &love, nil
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue