mirror of
https://git.sr.ht/~phw/scotty
synced 2025-06-06 21:08:34 +02:00
Compare commits
No commits in common. "4da569743555301b18ffc993f5bcfffb4d625f98" and "34b6bb9aa33c918b138546146f16a841319d3c21" have entirely different histories.
4da5697435
...
34b6bb9aa3
38 changed files with 246 additions and 1376 deletions
20
CHANGES.md
20
CHANGES.md
|
@ -1,25 +1,5 @@
|
||||||
# Scotty Changelog
|
# Scotty Changelog
|
||||||
|
|
||||||
## 0.7.0 - WIP
|
|
||||||
- listenbrainz-archive: new backend to load listens and loves from a
|
|
||||||
ListenBrainz export. The data can be read from the downloaded ZIP archive
|
|
||||||
or a directory where the contents of the archive have been extracted to.
|
|
||||||
- listenbrainz: faster loading of missing loves metadata using the ListenBrainz
|
|
||||||
API instead of MusicBrainz. Fallback to slower MusicBrainz query, if
|
|
||||||
ListenBrainz does not provide the data.
|
|
||||||
- listenbrainz: fixed issue were timestamp was not updated properly if
|
|
||||||
duplicate listens where detected during import.
|
|
||||||
- spotify-history: it is now possible to specify the path directly to the
|
|
||||||
`my_spotify_data_extended.zip` ZIP file as downloaded from Spotify.
|
|
||||||
- spotify-history: the parameter to the export archive path has been renamed to
|
|
||||||
`archive-path`. For backward compatibility the old `dir-path` parameter is
|
|
||||||
still read.
|
|
||||||
- deezer: fixed endless export loop if the user's listen history was empty.
|
|
||||||
- dump: it is now possible to specify a file to write the text output to.
|
|
||||||
- Fixed potential issues with MusicBrainz rate limiting.
|
|
||||||
- Fixed import log output duplicating.
|
|
||||||
|
|
||||||
|
|
||||||
## 0.6.0 - 2025-05-23
|
## 0.6.0 - 2025-05-23
|
||||||
- Fully reworked progress report
|
- Fully reworked progress report
|
||||||
- Cancel both export and import on error
|
- Cancel both export and import on error
|
||||||
|
|
31
README.md
31
README.md
|
@ -117,30 +117,23 @@ scotty beam listens deezer listenbrainz --timestamp "2023-12-06 14:26:24"
|
||||||
### Supported backends
|
### Supported backends
|
||||||
The following table lists the available backends and the currently supported features.
|
The following table lists the available backends and the currently supported features.
|
||||||
|
|
||||||
Backend | Listens Export | Listens Import | Loves Export | Loves Import
|
Backend | Listens Export | Listens Import | Loves Export | Loves Import
|
||||||
---------------------|----------------|----------------|--------------|-------------
|
----------------|----------------|----------------|--------------|-------------
|
||||||
deezer | ✓ | ⨯ | ✓ | -
|
deezer | ✓ | ⨯ | ✓ | -
|
||||||
funkwhale | ✓ | ⨯ | ✓ | -
|
funkwhale | ✓ | ⨯ | ✓ | -
|
||||||
jspf | ✓ | ✓ | ✓ | ✓
|
jspf | ✓ | ✓ | ✓ | ✓
|
||||||
lastfm | ✓ | ✓ | ✓ | ✓
|
lastfm | ✓ | ✓ | ✓ | ✓
|
||||||
listenbrainz | ✓ | ✓ | ✓ | ✓
|
listenbrainz | ✓ | ✓ | ✓ | ✓
|
||||||
listenbrainz-archive | ✓ | - | ✓ | -
|
maloja | ✓ | ✓ | ⨯ | ⨯
|
||||||
maloja | ✓ | ✓ | ⨯ | ⨯
|
scrobbler-log | ✓ | ✓ | ⨯ | ⨯
|
||||||
scrobbler-log | ✓ | ✓ | ⨯ | ⨯
|
spotify | ✓ | ⨯ | ✓ | -
|
||||||
spotify | ✓ | ⨯ | ✓ | -
|
spotify-history | ✓ | ⨯ | ⨯ | ⨯
|
||||||
spotify-history | ✓ | ⨯ | ⨯ | ⨯
|
subsonic | ⨯ | ⨯ | ✓ | -
|
||||||
subsonic | ⨯ | ⨯ | ✓ | -
|
|
||||||
|
|
||||||
**✓** implemented **-** not yet implemented **⨯** unavailable / not planned
|
**✓** implemented **-** not yet implemented **⨯** unavailable / not planned
|
||||||
|
|
||||||
See the comments in [config.example.toml](./config.example.toml) for a description of each backend's available configuration options.
|
See the comments in [config.example.toml](./config.example.toml) for a description of each backend's available configuration options.
|
||||||
|
|
||||||
**NOTE:** Some services, e.g. the Spotify and Deezer API, do not provide access
|
|
||||||
to the user's full listening history. Hence the API integrations are not suited
|
|
||||||
to do a full history export. They can however be well used for continuously
|
|
||||||
transfer recent listens to other services when running scotty frequently, e.g.
|
|
||||||
as a cron job.
|
|
||||||
|
|
||||||
|
|
||||||
## Contribute
|
## Contribute
|
||||||
The source code for Scotty is available on [SourceHut](https://sr.ht/~phw/scotty/). To report issues or feature requests please [create a ticket](https://todo.sr.ht/~phw/scotty).
|
The source code for Scotty is available on [SourceHut](https://sr.ht/~phw/scotty/). To report issues or feature requests please [create a ticket](https://todo.sr.ht/~phw/scotty).
|
||||||
|
|
|
@ -19,15 +19,6 @@ token = ""
|
||||||
# not already exists in your ListenBrainz profile.
|
# not already exists in your ListenBrainz profile.
|
||||||
check-duplicate-listens = false
|
check-duplicate-listens = false
|
||||||
|
|
||||||
[service.listenbrainz-archive]
|
|
||||||
# This backend supports listens from a ListenBrainz export archive
|
|
||||||
# (https://listenbrainz.org/settings/export/).
|
|
||||||
backend = "listenbrainz-archive"
|
|
||||||
# The file path to the ListenBrainz export archive. The path can either point
|
|
||||||
# to the ZIP file as downloaded from ListenBrainz or a directory were the
|
|
||||||
# ZIP was extracted to.
|
|
||||||
archive-path = "./listenbrainz_outsidecontext.zip"
|
|
||||||
|
|
||||||
[service.maloja]
|
[service.maloja]
|
||||||
# Maloja is a self hosted listening service (https://github.com/krateng/maloja)
|
# Maloja is a self hosted listening service (https://github.com/krateng/maloja)
|
||||||
backend = "maloja"
|
backend = "maloja"
|
||||||
|
@ -106,14 +97,10 @@ client-secret = ""
|
||||||
|
|
||||||
[service.spotify-history]
|
[service.spotify-history]
|
||||||
# Read listens from a Spotify extended history export
|
# Read listens from a Spotify extended history export
|
||||||
# NOTE: The Spotify API does not allow access to the full listen history,
|
|
||||||
# but only to recent listens.
|
|
||||||
backend = "spotify-history"
|
backend = "spotify-history"
|
||||||
# Path to the Spotify extended history archive. This can either point directly
|
# Directory where the extended history JSON files are located. The files must
|
||||||
# to the "my_spotify_data_extended.zip" ZIP file provided by Spotify or a
|
# follow the naming scheme "Streaming_History_Audio_*.json".
|
||||||
# directory where this file has been extracted to. The history files are
|
dir-path = "./my_spotify_data_extended/Spotify Extended Streaming History"
|
||||||
# expected to follow the naming pattern "Streaming_History_Audio_*.json".
|
|
||||||
archive-path = "./my_spotify_data_extended.zip"
|
|
||||||
# If true (default), ignore listens from a Spotify "private session".
|
# If true (default), ignore listens from a Spotify "private session".
|
||||||
ignore-incognito = true
|
ignore-incognito = true
|
||||||
# If true, ignore listens marked as skipped. Default is false.
|
# If true, ignore listens marked as skipped. Default is false.
|
||||||
|
@ -124,9 +111,7 @@ ignore-skipped = false
|
||||||
ignore-min-duration-seconds = 30
|
ignore-min-duration-seconds = 30
|
||||||
|
|
||||||
[service.deezer]
|
[service.deezer]
|
||||||
# Read listens and loves from a Deezer account.
|
# Read listens and loves from a Deezer account
|
||||||
# NOTE: The Deezer API does not allow access to the full listen history,
|
|
||||||
# but only to recent listens.
|
|
||||||
backend = "deezer"
|
backend = "deezer"
|
||||||
# You need to register an application on https://developers.deezer.com/myapps
|
# You need to register an application on https://developers.deezer.com/myapps
|
||||||
# and set the client ID and client secret below.
|
# and set the client ID and client secret below.
|
||||||
|
@ -149,10 +134,4 @@ client-secret = ""
|
||||||
[service.dump]
|
[service.dump]
|
||||||
# This backend allows writing listens and loves as console output. Useful for
|
# This backend allows writing listens and loves as console output. Useful for
|
||||||
# debugging the export from other services.
|
# debugging the export from other services.
|
||||||
backend = "dump"
|
backend = "dump"
|
||||||
# Path to a file where the listens and loves are written to. If not set,
|
|
||||||
# the output is written to stdout.
|
|
||||||
file-path = ""
|
|
||||||
# If true (default), new listens will be appended to the existing file. Set to
|
|
||||||
# false to overwrite the file on every run.
|
|
||||||
append = true
|
|
||||||
|
|
3
go.mod
3
go.mod
|
@ -15,7 +15,6 @@ require (
|
||||||
github.com/manifoldco/promptui v0.9.0
|
github.com/manifoldco/promptui v0.9.0
|
||||||
github.com/pelletier/go-toml/v2 v2.2.4
|
github.com/pelletier/go-toml/v2 v2.2.4
|
||||||
github.com/shkh/lastfm-go v0.0.0-20191215035245-89a801c244e0
|
github.com/shkh/lastfm-go v0.0.0-20191215035245-89a801c244e0
|
||||||
github.com/simonfrey/jsonl v0.0.0-20240904112901-935399b9a740
|
|
||||||
github.com/spf13/cast v1.8.0
|
github.com/spf13/cast v1.8.0
|
||||||
github.com/spf13/cobra v1.9.1
|
github.com/spf13/cobra v1.9.1
|
||||||
github.com/spf13/viper v1.20.1
|
github.com/spf13/viper v1.20.1
|
||||||
|
@ -23,7 +22,7 @@ require (
|
||||||
github.com/supersonic-app/go-subsonic v0.0.0-20241224013245-9b2841f3711d
|
github.com/supersonic-app/go-subsonic v0.0.0-20241224013245-9b2841f3711d
|
||||||
github.com/vbauerster/mpb/v8 v8.10.1
|
github.com/vbauerster/mpb/v8 v8.10.1
|
||||||
go.uploadedlobster.com/mbtypes v0.4.0
|
go.uploadedlobster.com/mbtypes v0.4.0
|
||||||
go.uploadedlobster.com/musicbrainzws2 v0.15.1-0.20250524094913-01f007ad1064
|
go.uploadedlobster.com/musicbrainzws2 v0.15.0
|
||||||
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6
|
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6
|
||||||
golang.org/x/oauth2 v0.30.0
|
golang.org/x/oauth2 v0.30.0
|
||||||
golang.org/x/text v0.25.0
|
golang.org/x/text v0.25.0
|
||||||
|
|
4
go.sum
4
go.sum
|
@ -107,8 +107,6 @@ github.com/sagikazarmark/locafero v0.9.0 h1:GbgQGNtTrEmddYDSAH9QLRyfAHY12md+8YFT
|
||||||
github.com/sagikazarmark/locafero v0.9.0/go.mod h1:UBUyz37V+EdMS3hDF3QWIiVr/2dPrx49OMO0Bn0hJqk=
|
github.com/sagikazarmark/locafero v0.9.0/go.mod h1:UBUyz37V+EdMS3hDF3QWIiVr/2dPrx49OMO0Bn0hJqk=
|
||||||
github.com/shkh/lastfm-go v0.0.0-20191215035245-89a801c244e0 h1:cgqwZtnR+IQfUYDLJ3Kiy4aE+O/wExTzEIg8xwC4Qfs=
|
github.com/shkh/lastfm-go v0.0.0-20191215035245-89a801c244e0 h1:cgqwZtnR+IQfUYDLJ3Kiy4aE+O/wExTzEIg8xwC4Qfs=
|
||||||
github.com/shkh/lastfm-go v0.0.0-20191215035245-89a801c244e0/go.mod h1:n3nudMl178cEvD44PaopxH9jhJaQzthSxUzLO5iKMy4=
|
github.com/shkh/lastfm-go v0.0.0-20191215035245-89a801c244e0/go.mod h1:n3nudMl178cEvD44PaopxH9jhJaQzthSxUzLO5iKMy4=
|
||||||
github.com/simonfrey/jsonl v0.0.0-20240904112901-935399b9a740 h1:CXJI+lliMiiEwzfgE8yt/38K0heYDgQ0L3f/3fxRnQU=
|
|
||||||
github.com/simonfrey/jsonl v0.0.0-20240904112901-935399b9a740/go.mod h1:G4w16caPmc6at7u4fmkj/8OAoOnM9mkmJr2fvL0vhaw=
|
|
||||||
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
|
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
|
||||||
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
|
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
|
||||||
github.com/spf13/afero v1.14.0 h1:9tH6MapGnn/j0eb0yIXiLjERO8RB6xIVZRDCX7PtqWA=
|
github.com/spf13/afero v1.14.0 h1:9tH6MapGnn/j0eb0yIXiLjERO8RB6xIVZRDCX7PtqWA=
|
||||||
|
@ -136,8 +134,6 @@ go.uploadedlobster.com/mbtypes v0.4.0 h1:D5asCgHsRWufj4Yn5u0IuH2J9z1UuYImYkYIp1Z
|
||||||
go.uploadedlobster.com/mbtypes v0.4.0/go.mod h1:Bu1K1Hl77QTAE2Z7QKiW/JAp9KqYWQebkRRfG02dlZM=
|
go.uploadedlobster.com/mbtypes v0.4.0/go.mod h1:Bu1K1Hl77QTAE2Z7QKiW/JAp9KqYWQebkRRfG02dlZM=
|
||||||
go.uploadedlobster.com/musicbrainzws2 v0.15.0 h1:njJeyf1dDwfz2toEHaZSuockVsn1fg+967/tVfLHhwQ=
|
go.uploadedlobster.com/musicbrainzws2 v0.15.0 h1:njJeyf1dDwfz2toEHaZSuockVsn1fg+967/tVfLHhwQ=
|
||||||
go.uploadedlobster.com/musicbrainzws2 v0.15.0/go.mod h1:T6sYE7ZHRH3mJWT3g9jdSUPKJLZubnBjKyjMPNdkgao=
|
go.uploadedlobster.com/musicbrainzws2 v0.15.0/go.mod h1:T6sYE7ZHRH3mJWT3g9jdSUPKJLZubnBjKyjMPNdkgao=
|
||||||
go.uploadedlobster.com/musicbrainzws2 v0.15.1-0.20250524094913-01f007ad1064 h1:bir8kas9u0A+T54sfzj3il7SUAV5KQtb5QzDtwvslxI=
|
|
||||||
go.uploadedlobster.com/musicbrainzws2 v0.15.1-0.20250524094913-01f007ad1064/go.mod h1:T6sYE7ZHRH3mJWT3g9jdSUPKJLZubnBjKyjMPNdkgao=
|
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8=
|
golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8=
|
||||||
|
|
|
@ -1,179 +0,0 @@
|
||||||
/*
|
|
||||||
Copyright © 2025 Philipp Wolfer <phw@uploadedlobster.com>
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in
|
|
||||||
all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
||||||
THE SOFTWARE.
|
|
||||||
*/
|
|
||||||
|
|
||||||
// Implements generic access to files inside an archive.
|
|
||||||
//
|
|
||||||
// An archive in this context can be any container that holds files.
|
|
||||||
// In this implementation the archive can be a ZIP file or a directory.
|
|
||||||
package archive
|
|
||||||
|
|
||||||
import (
|
|
||||||
"archive/zip"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"io/fs"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Generic archive interface.
|
|
||||||
type Archive interface {
|
|
||||||
Close() error
|
|
||||||
Open(path string) (fs.File, error)
|
|
||||||
Glob(pattern string) ([]FileInfo, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Open an archive in path.
|
|
||||||
// The archive can be a ZIP file or a directory. The implementation
|
|
||||||
// will detect the type of archive and return the appropriate
|
|
||||||
// implementation of the Archive interface.
|
|
||||||
func OpenArchive(path string) (Archive, error) {
|
|
||||||
fi, err := os.Stat(path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
switch mode := fi.Mode(); {
|
|
||||||
case mode.IsRegular():
|
|
||||||
archive := &zipArchive{}
|
|
||||||
err := archive.OpenArchive(path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return archive, nil
|
|
||||||
case mode.IsDir():
|
|
||||||
archive := &dirArchive{}
|
|
||||||
err := archive.OpenArchive(path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return archive, nil
|
|
||||||
default:
|
|
||||||
return nil, fmt.Errorf("unsupported file mode: %s", mode)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Interface for a file that can be opened when needed.
|
|
||||||
type OpenableFile interface {
|
|
||||||
Open() (io.ReadCloser, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generic information about a file inside an archive.
|
|
||||||
type FileInfo struct {
|
|
||||||
Name string
|
|
||||||
File OpenableFile
|
|
||||||
}
|
|
||||||
|
|
||||||
// A openable file in the filesystem.
|
|
||||||
type filesystemFile struct {
|
|
||||||
path string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *filesystemFile) Open() (io.ReadCloser, error) {
|
|
||||||
return os.Open(f.path)
|
|
||||||
}
|
|
||||||
|
|
||||||
// An implementation of the archiveBackend interface for zip files.
|
|
||||||
type zipArchive struct {
|
|
||||||
zip *zip.ReadCloser
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *zipArchive) OpenArchive(path string) error {
|
|
||||||
zip, err := zip.OpenReader(path)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
a.zip = zip
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *zipArchive) Close() error {
|
|
||||||
if a.zip == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return a.zip.Close()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *zipArchive) Glob(pattern string) ([]FileInfo, error) {
|
|
||||||
result := make([]FileInfo, 0)
|
|
||||||
for _, file := range a.zip.File {
|
|
||||||
if matched, err := filepath.Match(pattern, file.Name); matched {
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
info := FileInfo{
|
|
||||||
Name: file.Name,
|
|
||||||
File: file,
|
|
||||||
}
|
|
||||||
result = append(result, info)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *zipArchive) Open(path string) (fs.File, error) {
|
|
||||||
file, err := a.zip.Open(path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return file, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// An implementation of the archiveBackend interface for directories.
|
|
||||||
type dirArchive struct {
|
|
||||||
path string
|
|
||||||
dirFS fs.FS
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *dirArchive) OpenArchive(path string) error {
|
|
||||||
a.path = filepath.Clean(path)
|
|
||||||
a.dirFS = os.DirFS(path)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *dirArchive) Close() error {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Open opens the named file in the archive.
|
|
||||||
// [fs.File.Close] must be called to release any associated resources.
|
|
||||||
func (a *dirArchive) Open(path string) (fs.File, error) {
|
|
||||||
return a.dirFS.Open(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *dirArchive) Glob(pattern string) ([]FileInfo, error) {
|
|
||||||
files, err := fs.Glob(a.dirFS, pattern)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
result := make([]FileInfo, 0)
|
|
||||||
for _, name := range files {
|
|
||||||
fullPath := filepath.Join(a.path, name)
|
|
||||||
info := FileInfo{
|
|
||||||
Name: name,
|
|
||||||
File: &filesystemFile{path: fullPath},
|
|
||||||
}
|
|
||||||
result = append(result, info)
|
|
||||||
}
|
|
||||||
|
|
||||||
return result, nil
|
|
||||||
}
|
|
|
@ -27,7 +27,6 @@ import (
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/funkwhale"
|
"go.uploadedlobster.com/scotty/internal/backends/funkwhale"
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/jspf"
|
"go.uploadedlobster.com/scotty/internal/backends/jspf"
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/lastfm"
|
"go.uploadedlobster.com/scotty/internal/backends/lastfm"
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/lbarchive"
|
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/listenbrainz"
|
"go.uploadedlobster.com/scotty/internal/backends/listenbrainz"
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/maloja"
|
"go.uploadedlobster.com/scotty/internal/backends/maloja"
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/scrobblerlog"
|
"go.uploadedlobster.com/scotty/internal/backends/scrobblerlog"
|
||||||
|
@ -106,18 +105,17 @@ func GetBackends() BackendList {
|
||||||
}
|
}
|
||||||
|
|
||||||
var knownBackends = map[string]func() models.Backend{
|
var knownBackends = map[string]func() models.Backend{
|
||||||
"deezer": func() models.Backend { return &deezer.DeezerApiBackend{} },
|
"deezer": func() models.Backend { return &deezer.DeezerApiBackend{} },
|
||||||
"dump": func() models.Backend { return &dump.DumpBackend{} },
|
"dump": func() models.Backend { return &dump.DumpBackend{} },
|
||||||
"funkwhale": func() models.Backend { return &funkwhale.FunkwhaleApiBackend{} },
|
"funkwhale": func() models.Backend { return &funkwhale.FunkwhaleApiBackend{} },
|
||||||
"jspf": func() models.Backend { return &jspf.JSPFBackend{} },
|
"jspf": func() models.Backend { return &jspf.JSPFBackend{} },
|
||||||
"lastfm": func() models.Backend { return &lastfm.LastfmApiBackend{} },
|
"lastfm": func() models.Backend { return &lastfm.LastfmApiBackend{} },
|
||||||
"listenbrainz": func() models.Backend { return &listenbrainz.ListenBrainzApiBackend{} },
|
"listenbrainz": func() models.Backend { return &listenbrainz.ListenBrainzApiBackend{} },
|
||||||
"listenbrainz-archive": func() models.Backend { return &lbarchive.ListenBrainzArchiveBackend{} },
|
"maloja": func() models.Backend { return &maloja.MalojaApiBackend{} },
|
||||||
"maloja": func() models.Backend { return &maloja.MalojaApiBackend{} },
|
"scrobbler-log": func() models.Backend { return &scrobblerlog.ScrobblerLogBackend{} },
|
||||||
"scrobbler-log": func() models.Backend { return &scrobblerlog.ScrobblerLogBackend{} },
|
"spotify": func() models.Backend { return &spotify.SpotifyApiBackend{} },
|
||||||
"spotify": func() models.Backend { return &spotify.SpotifyApiBackend{} },
|
"spotify-history": func() models.Backend { return &spotifyhistory.SpotifyHistoryBackend{} },
|
||||||
"spotify-history": func() models.Backend { return &spotifyhistory.SpotifyHistoryBackend{} },
|
"subsonic": func() models.Backend { return &subsonic.SubsonicApiBackend{} },
|
||||||
"subsonic": func() models.Backend { return &subsonic.SubsonicApiBackend{} },
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func backendWithConfig(config config.ServiceConfig) (models.Backend, error) {
|
func backendWithConfig(config config.ServiceConfig) (models.Backend, error) {
|
||||||
|
|
|
@ -28,7 +28,6 @@ import (
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/funkwhale"
|
"go.uploadedlobster.com/scotty/internal/backends/funkwhale"
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/jspf"
|
"go.uploadedlobster.com/scotty/internal/backends/jspf"
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/lastfm"
|
"go.uploadedlobster.com/scotty/internal/backends/lastfm"
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/lbarchive"
|
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/listenbrainz"
|
"go.uploadedlobster.com/scotty/internal/backends/listenbrainz"
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/maloja"
|
"go.uploadedlobster.com/scotty/internal/backends/maloja"
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/scrobblerlog"
|
"go.uploadedlobster.com/scotty/internal/backends/scrobblerlog"
|
||||||
|
@ -104,11 +103,6 @@ func TestImplementsInterfaces(t *testing.T) {
|
||||||
expectInterface[models.LovesExport](t, &lastfm.LastfmApiBackend{})
|
expectInterface[models.LovesExport](t, &lastfm.LastfmApiBackend{})
|
||||||
expectInterface[models.LovesImport](t, &lastfm.LastfmApiBackend{})
|
expectInterface[models.LovesImport](t, &lastfm.LastfmApiBackend{})
|
||||||
|
|
||||||
expectInterface[models.ListensExport](t, &lbarchive.ListenBrainzArchiveBackend{})
|
|
||||||
// expectInterface[models.ListensImport](t, &lbarchive.ListenBrainzArchiveBackend{})
|
|
||||||
expectInterface[models.LovesExport](t, &lbarchive.ListenBrainzArchiveBackend{})
|
|
||||||
// expectInterface[models.LovesImport](t, &lbarchive.ListenBrainzArchiveBackend{})
|
|
||||||
|
|
||||||
expectInterface[models.ListensExport](t, &listenbrainz.ListenBrainzApiBackend{})
|
expectInterface[models.ListensExport](t, &listenbrainz.ListenBrainzApiBackend{})
|
||||||
expectInterface[models.ListensImport](t, &listenbrainz.ListenBrainzApiBackend{})
|
expectInterface[models.ListensImport](t, &listenbrainz.ListenBrainzApiBackend{})
|
||||||
expectInterface[models.LovesExport](t, &listenbrainz.ListenBrainzApiBackend{})
|
expectInterface[models.LovesExport](t, &listenbrainz.ListenBrainzApiBackend{})
|
||||||
|
|
|
@ -105,11 +105,6 @@ out:
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// No result, break immediately
|
|
||||||
if result.Total == 0 {
|
|
||||||
break out
|
|
||||||
}
|
|
||||||
|
|
||||||
// The offset was higher then the actual number of tracks. Adjust the offset
|
// The offset was higher then the actual number of tracks. Adjust the offset
|
||||||
// and continue.
|
// and continue.
|
||||||
if offset >= result.Total {
|
if offset >= result.Total {
|
||||||
|
|
|
@ -17,83 +17,25 @@ Scotty. If not, see <https://www.gnu.org/licenses/>.
|
||||||
package dump
|
package dump
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
|
||||||
"os"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"go.uploadedlobster.com/scotty/internal/config"
|
"go.uploadedlobster.com/scotty/internal/config"
|
||||||
"go.uploadedlobster.com/scotty/internal/i18n"
|
|
||||||
"go.uploadedlobster.com/scotty/internal/models"
|
"go.uploadedlobster.com/scotty/internal/models"
|
||||||
)
|
)
|
||||||
|
|
||||||
type DumpBackend struct {
|
type DumpBackend struct{}
|
||||||
buffer io.ReadWriter
|
|
||||||
print bool // Whether to print the output to stdout
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *DumpBackend) Name() string { return "dump" }
|
func (b *DumpBackend) Name() string { return "dump" }
|
||||||
|
|
||||||
func (b *DumpBackend) Options() []models.BackendOption {
|
func (b *DumpBackend) Options() []models.BackendOption { return nil }
|
||||||
return []models.BackendOption{{
|
|
||||||
Name: "file-path",
|
|
||||||
Label: i18n.Tr("File path"),
|
|
||||||
Type: models.String,
|
|
||||||
}, {
|
|
||||||
Name: "append",
|
|
||||||
Label: i18n.Tr("Append to file"),
|
|
||||||
Type: models.Bool,
|
|
||||||
Default: "true",
|
|
||||||
}}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *DumpBackend) InitConfig(config *config.ServiceConfig) error {
|
func (b *DumpBackend) InitConfig(config *config.ServiceConfig) error {
|
||||||
filePath := config.GetString("file-path")
|
|
||||||
append := config.GetBool("append", true)
|
|
||||||
if strings.TrimSpace(filePath) != "" {
|
|
||||||
mode := os.O_WRONLY | os.O_CREATE
|
|
||||||
if !append {
|
|
||||||
mode |= os.O_TRUNC // Truncate the file if not appending
|
|
||||||
}
|
|
||||||
f, err := os.OpenFile(filePath, mode, 0644)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
b.buffer = f
|
|
||||||
b.print = false // If a file path is specified, we don't print to stdout
|
|
||||||
} else {
|
|
||||||
// If no file path is specified, use a bytes.Buffer for in-memory dumping
|
|
||||||
b.buffer = new(bytes.Buffer)
|
|
||||||
b.print = true // Print to stdout
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *DumpBackend) StartImport() error { return nil }
|
func (b *DumpBackend) StartImport() error { return nil }
|
||||||
|
func (b *DumpBackend) FinishImport() error { return nil }
|
||||||
func (b *DumpBackend) FinishImport(result *models.ImportResult) error {
|
|
||||||
if b.print {
|
|
||||||
out := new(strings.Builder)
|
|
||||||
_, err := io.Copy(out, b.buffer)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if result != nil {
|
|
||||||
result.Log(models.Output, out.String())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Close the io writer if it is closable
|
|
||||||
if closer, ok := b.buffer.(io.Closer); ok {
|
|
||||||
if err := closer.Close(); err != nil {
|
|
||||||
return fmt.Errorf("failed to close output file: %w", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *DumpBackend) ImportListens(ctx context.Context, export models.ListensResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
func (b *DumpBackend) ImportListens(ctx context.Context, export models.ListensResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
for _, listen := range export.Items {
|
for _, listen := range export.Items {
|
||||||
|
@ -103,11 +45,9 @@ func (b *DumpBackend) ImportListens(ctx context.Context, export models.ListensRe
|
||||||
|
|
||||||
importResult.UpdateTimestamp(listen.ListenedAt)
|
importResult.UpdateTimestamp(listen.ListenedAt)
|
||||||
importResult.ImportCount += 1
|
importResult.ImportCount += 1
|
||||||
_, err := fmt.Fprintf(b.buffer, "🎶 %v: \"%v\" by %v (%v)\n",
|
msg := fmt.Sprintf("🎶 %v: \"%v\" by %v (%v)",
|
||||||
listen.ListenedAt, listen.TrackName, listen.ArtistName(), listen.RecordingMBID)
|
listen.ListenedAt, listen.TrackName, listen.ArtistName(), listen.RecordingMBID)
|
||||||
if err != nil {
|
importResult.Log(models.Info, msg)
|
||||||
return importResult, err
|
|
||||||
}
|
|
||||||
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
|
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -122,11 +62,9 @@ func (b *DumpBackend) ImportLoves(ctx context.Context, export models.LovesResult
|
||||||
|
|
||||||
importResult.UpdateTimestamp(love.Created)
|
importResult.UpdateTimestamp(love.Created)
|
||||||
importResult.ImportCount += 1
|
importResult.ImportCount += 1
|
||||||
_, err := fmt.Fprintf(b.buffer, "❤️ %v: \"%v\" by %v (%v)\n",
|
msg := fmt.Sprintf("❤️ %v: \"%v\" by %v (%v)",
|
||||||
love.Created, love.TrackName, love.ArtistName(), love.RecordingMBID)
|
love.Created, love.TrackName, love.ArtistName(), love.RecordingMBID)
|
||||||
if err != nil {
|
importResult.Log(models.Info, msg)
|
||||||
return importResult, err
|
|
||||||
}
|
|
||||||
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
|
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -107,23 +107,22 @@ func process[R models.LovesResult | models.ListensResult, P ImportProcessor[R]](
|
||||||
|
|
||||||
for exportResult := range results {
|
for exportResult := range results {
|
||||||
if err := ctx.Err(); err != nil {
|
if err := ctx.Err(); err != nil {
|
||||||
processor.ImportBackend().FinishImport(&result)
|
processor.ImportBackend().FinishImport()
|
||||||
out <- handleError(result, err, progress)
|
out <- handleError(result, err, progress)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
importResult, err := processor.Import(
|
importResult, err := processor.Import(ctx, exportResult, result, out, progress)
|
||||||
ctx, exportResult, result.Copy(), out, progress)
|
result.Update(importResult)
|
||||||
result.Update(&importResult)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
processor.ImportBackend().FinishImport(&result)
|
processor.ImportBackend().FinishImport()
|
||||||
out <- handleError(result, err, progress)
|
out <- handleError(result, err, progress)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
progress <- p.FromImportResult(result, false)
|
progress <- p.FromImportResult(result, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := processor.ImportBackend().FinishImport(&result); err != nil {
|
if err := processor.ImportBackend().FinishImport(); err != nil {
|
||||||
out <- handleError(result, err, progress)
|
out <- handleError(result, err, progress)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
|
@ -90,7 +90,7 @@ func (b *JSPFBackend) StartImport() error {
|
||||||
return b.readJSPF()
|
return b.readJSPF()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *JSPFBackend) FinishImport(result *models.ImportResult) error {
|
func (b *JSPFBackend) FinishImport() error {
|
||||||
return b.writeJSPF()
|
return b.writeJSPF()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -70,10 +70,8 @@ func (b *LastfmApiBackend) InitConfig(config *config.ServiceConfig) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *LastfmApiBackend) StartImport() error { return nil }
|
func (b *LastfmApiBackend) StartImport() error { return nil }
|
||||||
func (b *LastfmApiBackend) FinishImport(result *models.ImportResult) error {
|
func (b *LastfmApiBackend) FinishImport() error { return nil }
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *LastfmApiBackend) OAuth2Strategy(redirectURL *url.URL) auth.OAuth2Strategy {
|
func (b *LastfmApiBackend) OAuth2Strategy(redirectURL *url.URL) auth.OAuth2Strategy {
|
||||||
return lastfmStrategy{
|
return lastfmStrategy{
|
||||||
|
|
|
@ -1,218 +0,0 @@
|
||||||
/*
|
|
||||||
Copyright © 2025 Philipp Wolfer <phw@uploadedlobster.com>
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in
|
|
||||||
all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
||||||
THE SOFTWARE.
|
|
||||||
*/
|
|
||||||
package lbarchive
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"go.uploadedlobster.com/musicbrainzws2"
|
|
||||||
lbapi "go.uploadedlobster.com/scotty/internal/backends/listenbrainz"
|
|
||||||
"go.uploadedlobster.com/scotty/internal/config"
|
|
||||||
"go.uploadedlobster.com/scotty/internal/i18n"
|
|
||||||
"go.uploadedlobster.com/scotty/internal/listenbrainz"
|
|
||||||
"go.uploadedlobster.com/scotty/internal/models"
|
|
||||||
"go.uploadedlobster.com/scotty/internal/version"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
listensBatchSize = 2000
|
|
||||||
lovesBatchSize = listenbrainz.MaxItemsPerGet
|
|
||||||
)
|
|
||||||
|
|
||||||
type ListenBrainzArchiveBackend struct {
|
|
||||||
filePath string
|
|
||||||
lbClient listenbrainz.Client
|
|
||||||
mbClient musicbrainzws2.Client
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *ListenBrainzArchiveBackend) Name() string { return "listenbrainz-archive" }
|
|
||||||
|
|
||||||
func (b *ListenBrainzArchiveBackend) Options() []models.BackendOption {
|
|
||||||
return []models.BackendOption{{
|
|
||||||
Name: "archive-path",
|
|
||||||
Label: i18n.Tr("Archive path"),
|
|
||||||
Type: models.String,
|
|
||||||
}}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *ListenBrainzArchiveBackend) InitConfig(config *config.ServiceConfig) error {
|
|
||||||
b.filePath = config.GetString("archive-path")
|
|
||||||
b.lbClient = listenbrainz.NewClient("", version.UserAgent())
|
|
||||||
b.mbClient = *musicbrainzws2.NewClient(musicbrainzws2.AppInfo{
|
|
||||||
Name: version.AppName,
|
|
||||||
Version: version.AppVersion,
|
|
||||||
URL: version.AppURL,
|
|
||||||
})
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *ListenBrainzArchiveBackend) ExportListens(
|
|
||||||
ctx context.Context, oldestTimestamp time.Time,
|
|
||||||
results chan models.ListensResult, progress chan models.TransferProgress) {
|
|
||||||
startTime := time.Now()
|
|
||||||
minTime := oldestTimestamp
|
|
||||||
if minTime.Unix() < 1 {
|
|
||||||
minTime = time.Unix(1, 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
totalDuration := startTime.Sub(oldestTimestamp)
|
|
||||||
p := models.TransferProgress{
|
|
||||||
Export: &models.Progress{
|
|
||||||
Total: int64(totalDuration.Seconds()),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
archive, err := listenbrainz.OpenExportArchive(b.filePath)
|
|
||||||
if err != nil {
|
|
||||||
p.Export.Abort()
|
|
||||||
progress <- p
|
|
||||||
results <- models.ListensResult{Error: err}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
defer archive.Close()
|
|
||||||
|
|
||||||
userInfo, err := archive.UserInfo()
|
|
||||||
if err != nil {
|
|
||||||
p.Export.Abort()
|
|
||||||
progress <- p
|
|
||||||
results <- models.ListensResult{Error: err}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
listens := make(models.ListensList, 0, listensBatchSize)
|
|
||||||
for rawListen, err := range archive.IterListens(oldestTimestamp) {
|
|
||||||
if err != nil {
|
|
||||||
p.Export.Abort()
|
|
||||||
progress <- p
|
|
||||||
results <- models.ListensResult{Error: err}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
listen := lbapi.AsListen(rawListen)
|
|
||||||
if listen.UserName == "" {
|
|
||||||
listen.UserName = userInfo.Name
|
|
||||||
}
|
|
||||||
listens = append(listens, listen)
|
|
||||||
|
|
||||||
// Update the progress
|
|
||||||
p.Export.TotalItems += 1
|
|
||||||
remainingTime := startTime.Sub(listen.ListenedAt)
|
|
||||||
p.Export.Elapsed = int64(totalDuration.Seconds() - remainingTime.Seconds())
|
|
||||||
|
|
||||||
// Allow the importer to start processing the listens by
|
|
||||||
// sending them in batches.
|
|
||||||
if len(listens) >= listensBatchSize {
|
|
||||||
results <- models.ListensResult{Items: listens}
|
|
||||||
progress <- p
|
|
||||||
listens = listens[:0]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
results <- models.ListensResult{Items: listens}
|
|
||||||
p.Export.Complete()
|
|
||||||
progress <- p
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *ListenBrainzArchiveBackend) ExportLoves(
|
|
||||||
ctx context.Context, oldestTimestamp time.Time,
|
|
||||||
results chan models.LovesResult, progress chan models.TransferProgress) {
|
|
||||||
startTime := time.Now()
|
|
||||||
minTime := oldestTimestamp
|
|
||||||
if minTime.Unix() < 1 {
|
|
||||||
minTime = time.Unix(1, 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
totalDuration := startTime.Sub(oldestTimestamp)
|
|
||||||
p := models.TransferProgress{
|
|
||||||
Export: &models.Progress{
|
|
||||||
Total: int64(totalDuration.Seconds()),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
archive, err := listenbrainz.OpenExportArchive(b.filePath)
|
|
||||||
if err != nil {
|
|
||||||
p.Export.Abort()
|
|
||||||
progress <- p
|
|
||||||
results <- models.LovesResult{Error: err}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
defer archive.Close()
|
|
||||||
|
|
||||||
userInfo, err := archive.UserInfo()
|
|
||||||
if err != nil {
|
|
||||||
p.Export.Abort()
|
|
||||||
progress <- p
|
|
||||||
results <- models.LovesResult{Error: err}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
batch := make([]listenbrainz.Feedback, 0, lovesBatchSize)
|
|
||||||
for feedback, err := range archive.IterFeedback(oldestTimestamp) {
|
|
||||||
if err != nil {
|
|
||||||
p.Export.Abort()
|
|
||||||
progress <- p
|
|
||||||
results <- models.LovesResult{Error: err}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if feedback.UserName == "" {
|
|
||||||
feedback.UserName = userInfo.Name
|
|
||||||
}
|
|
||||||
|
|
||||||
batch = append(batch, feedback)
|
|
||||||
|
|
||||||
// Update the progress
|
|
||||||
p.Export.TotalItems += 1
|
|
||||||
remainingTime := startTime.Sub(time.Unix(feedback.Created, 0))
|
|
||||||
p.Export.Elapsed = int64(totalDuration.Seconds() - remainingTime.Seconds())
|
|
||||||
|
|
||||||
// Allow the importer to start processing the listens by
|
|
||||||
// sending them in batches.
|
|
||||||
if len(batch) >= lovesBatchSize {
|
|
||||||
// The dump does not contain track metadata. Extend it with additional
|
|
||||||
// lookups
|
|
||||||
loves, err := lbapi.ExtendTrackMetadata(ctx, &b.lbClient, &b.mbClient, &batch)
|
|
||||||
if err != nil {
|
|
||||||
p.Export.Abort()
|
|
||||||
progress <- p
|
|
||||||
results <- models.LovesResult{Error: err}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
results <- models.LovesResult{Items: loves}
|
|
||||||
progress <- p
|
|
||||||
batch = batch[:0]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
loves, err := lbapi.ExtendTrackMetadata(ctx, &b.lbClient, &b.mbClient, &batch)
|
|
||||||
if err != nil {
|
|
||||||
p.Export.Abort()
|
|
||||||
progress <- p
|
|
||||||
results <- models.LovesResult{Error: err}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
results <- models.LovesResult{Items: loves}
|
|
||||||
p.Export.Complete()
|
|
||||||
progress <- p
|
|
||||||
}
|
|
|
@ -1,40 +0,0 @@
|
||||||
/*
|
|
||||||
Copyright © 2025 Philipp Wolfer <phw@uploadedlobster.com>
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in
|
|
||||||
all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
||||||
THE SOFTWARE.
|
|
||||||
*/
|
|
||||||
package lbarchive_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/spf13/viper"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/lbarchive"
|
|
||||||
"go.uploadedlobster.com/scotty/internal/config"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestInitConfig(t *testing.T) {
|
|
||||||
c := viper.New()
|
|
||||||
c.Set("file-path", "/foo/lbarchive.zip")
|
|
||||||
service := config.NewServiceConfig("test", c)
|
|
||||||
backend := lbarchive.ListenBrainzArchiveBackend{}
|
|
||||||
err := backend.InitConfig(&service)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
|
|
@ -28,7 +28,7 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
"go.uploadedlobster.com/mbtypes"
|
"go.uploadedlobster.com/scotty/internal/version"
|
||||||
"go.uploadedlobster.com/scotty/pkg/ratelimit"
|
"go.uploadedlobster.com/scotty/pkg/ratelimit"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -44,13 +44,13 @@ type Client struct {
|
||||||
MaxResults int
|
MaxResults int
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewClient(token string, userAgent string) Client {
|
func NewClient(token string) Client {
|
||||||
client := resty.New()
|
client := resty.New()
|
||||||
client.SetBaseURL(listenBrainzBaseURL)
|
client.SetBaseURL(listenBrainzBaseURL)
|
||||||
client.SetAuthScheme("Token")
|
client.SetAuthScheme("Token")
|
||||||
client.SetAuthToken(token)
|
client.SetAuthToken(token)
|
||||||
client.SetHeader("Accept", "application/json")
|
client.SetHeader("Accept", "application/json")
|
||||||
client.SetHeader("User-Agent", userAgent)
|
client.SetHeader("User-Agent", version.UserAgent())
|
||||||
|
|
||||||
// Handle rate limiting (see https://listenbrainz.readthedocs.io/en/latest/users/api/index.html#rate-limiting)
|
// Handle rate limiting (see https://listenbrainz.readthedocs.io/en/latest/users/api/index.html#rate-limiting)
|
||||||
ratelimit.EnableHTTPHeaderRateLimit(client, "X-RateLimit-Reset-In")
|
ratelimit.EnableHTTPHeaderRateLimit(client, "X-RateLimit-Reset-In")
|
||||||
|
@ -159,24 +159,3 @@ func (c Client) Lookup(ctx context.Context, recordingName string, artistName str
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Client) MetadataRecordings(ctx context.Context, mbids []mbtypes.MBID) (result RecordingMetadataResult, err error) {
|
|
||||||
const path = "/metadata/recording/"
|
|
||||||
errorResult := ErrorResult{}
|
|
||||||
body := RecordingMetadataRequest{
|
|
||||||
RecordingMBIDs: mbids,
|
|
||||||
Includes: "artist release",
|
|
||||||
}
|
|
||||||
response, err := c.HTTPClient.R().
|
|
||||||
SetContext(ctx).
|
|
||||||
SetBody(body).
|
|
||||||
SetResult(&result).
|
|
||||||
SetError(&errorResult).
|
|
||||||
Post(path)
|
|
||||||
|
|
||||||
if !response.IsSuccess() {
|
|
||||||
err = errors.New(errorResult.Error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
|
@ -31,12 +31,12 @@ import (
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"go.uploadedlobster.com/mbtypes"
|
"go.uploadedlobster.com/mbtypes"
|
||||||
"go.uploadedlobster.com/scotty/internal/listenbrainz"
|
"go.uploadedlobster.com/scotty/internal/backends/listenbrainz"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestNewClient(t *testing.T) {
|
func TestNewClient(t *testing.T) {
|
||||||
token := "foobar123"
|
token := "foobar123"
|
||||||
client := listenbrainz.NewClient(token, "test/1.0")
|
client := listenbrainz.NewClient(token)
|
||||||
assert.Equal(t, token, client.HTTPClient.Token)
|
assert.Equal(t, token, client.HTTPClient.Token)
|
||||||
assert.Equal(t, listenbrainz.DefaultItemsPerGet, client.MaxResults)
|
assert.Equal(t, listenbrainz.DefaultItemsPerGet, client.MaxResults)
|
||||||
}
|
}
|
||||||
|
@ -44,7 +44,7 @@ func TestNewClient(t *testing.T) {
|
||||||
func TestGetListens(t *testing.T) {
|
func TestGetListens(t *testing.T) {
|
||||||
defer httpmock.DeactivateAndReset()
|
defer httpmock.DeactivateAndReset()
|
||||||
|
|
||||||
client := listenbrainz.NewClient("thetoken", "test/1.0")
|
client := listenbrainz.NewClient("thetoken")
|
||||||
client.MaxResults = 2
|
client.MaxResults = 2
|
||||||
setupHTTPMock(t, client.HTTPClient.GetClient(),
|
setupHTTPMock(t, client.HTTPClient.GetClient(),
|
||||||
"https://api.listenbrainz.org/1/user/outsidecontext/listens",
|
"https://api.listenbrainz.org/1/user/outsidecontext/listens",
|
||||||
|
@ -64,7 +64,7 @@ func TestGetListens(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSubmitListens(t *testing.T) {
|
func TestSubmitListens(t *testing.T) {
|
||||||
client := listenbrainz.NewClient("thetoken", "test/1.0")
|
client := listenbrainz.NewClient("thetoken")
|
||||||
httpmock.ActivateNonDefault(client.HTTPClient.GetClient())
|
httpmock.ActivateNonDefault(client.HTTPClient.GetClient())
|
||||||
|
|
||||||
responder, err := httpmock.NewJsonResponder(200, listenbrainz.StatusResult{
|
responder, err := httpmock.NewJsonResponder(200, listenbrainz.StatusResult{
|
||||||
|
@ -104,7 +104,7 @@ func TestSubmitListens(t *testing.T) {
|
||||||
func TestGetFeedback(t *testing.T) {
|
func TestGetFeedback(t *testing.T) {
|
||||||
defer httpmock.DeactivateAndReset()
|
defer httpmock.DeactivateAndReset()
|
||||||
|
|
||||||
client := listenbrainz.NewClient("thetoken", "test/1.0")
|
client := listenbrainz.NewClient("thetoken")
|
||||||
client.MaxResults = 2
|
client.MaxResults = 2
|
||||||
setupHTTPMock(t, client.HTTPClient.GetClient(),
|
setupHTTPMock(t, client.HTTPClient.GetClient(),
|
||||||
"https://api.listenbrainz.org/1/feedback/user/outsidecontext/get-feedback",
|
"https://api.listenbrainz.org/1/feedback/user/outsidecontext/get-feedback",
|
||||||
|
@ -123,7 +123,7 @@ func TestGetFeedback(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSendFeedback(t *testing.T) {
|
func TestSendFeedback(t *testing.T) {
|
||||||
client := listenbrainz.NewClient("thetoken", "test/1.0")
|
client := listenbrainz.NewClient("thetoken")
|
||||||
httpmock.ActivateNonDefault(client.HTTPClient.GetClient())
|
httpmock.ActivateNonDefault(client.HTTPClient.GetClient())
|
||||||
|
|
||||||
responder, err := httpmock.NewJsonResponder(200, listenbrainz.StatusResult{
|
responder, err := httpmock.NewJsonResponder(200, listenbrainz.StatusResult{
|
||||||
|
@ -149,7 +149,7 @@ func TestSendFeedback(t *testing.T) {
|
||||||
func TestLookup(t *testing.T) {
|
func TestLookup(t *testing.T) {
|
||||||
defer httpmock.DeactivateAndReset()
|
defer httpmock.DeactivateAndReset()
|
||||||
|
|
||||||
client := listenbrainz.NewClient("thetoken", "test/1.0")
|
client := listenbrainz.NewClient("thetoken")
|
||||||
setupHTTPMock(t, client.HTTPClient.GetClient(),
|
setupHTTPMock(t, client.HTTPClient.GetClient(),
|
||||||
"https://api.listenbrainz.org/1/metadata/lookup",
|
"https://api.listenbrainz.org/1/metadata/lookup",
|
||||||
"testdata/lookup.json")
|
"testdata/lookup.json")
|
|
@ -1,190 +0,0 @@
|
||||||
/*
|
|
||||||
Copyright © 2025 Philipp Wolfer <phw@uploadedlobster.com>
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in
|
|
||||||
all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
||||||
THE SOFTWARE.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package listenbrainz
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"go.uploadedlobster.com/mbtypes"
|
|
||||||
"go.uploadedlobster.com/musicbrainzws2"
|
|
||||||
"go.uploadedlobster.com/scotty/internal/listenbrainz"
|
|
||||||
"go.uploadedlobster.com/scotty/internal/models"
|
|
||||||
)
|
|
||||||
|
|
||||||
func AsListen(lbListen listenbrainz.Listen) models.Listen {
|
|
||||||
listen := models.Listen{
|
|
||||||
ListenedAt: time.Unix(lbListen.ListenedAt, 0),
|
|
||||||
UserName: lbListen.UserName,
|
|
||||||
Track: AsTrack(lbListen.TrackMetadata),
|
|
||||||
}
|
|
||||||
return listen
|
|
||||||
}
|
|
||||||
|
|
||||||
func AsLove(f listenbrainz.Feedback) models.Love {
|
|
||||||
recordingMBID := f.RecordingMBID
|
|
||||||
track := f.TrackMetadata
|
|
||||||
if track == nil {
|
|
||||||
track = &listenbrainz.Track{}
|
|
||||||
}
|
|
||||||
love := models.Love{
|
|
||||||
UserName: f.UserName,
|
|
||||||
RecordingMBID: recordingMBID,
|
|
||||||
Created: time.Unix(f.Created, 0),
|
|
||||||
Track: AsTrack(*track),
|
|
||||||
}
|
|
||||||
|
|
||||||
if love.Track.RecordingMBID == "" {
|
|
||||||
love.Track.RecordingMBID = love.RecordingMBID
|
|
||||||
}
|
|
||||||
|
|
||||||
return love
|
|
||||||
}
|
|
||||||
|
|
||||||
func AsTrack(t listenbrainz.Track) models.Track {
|
|
||||||
track := models.Track{
|
|
||||||
TrackName: t.TrackName,
|
|
||||||
ReleaseName: t.ReleaseName,
|
|
||||||
ArtistNames: []string{t.ArtistName},
|
|
||||||
Duration: t.Duration(),
|
|
||||||
TrackNumber: t.TrackNumber(),
|
|
||||||
DiscNumber: t.DiscNumber(),
|
|
||||||
RecordingMBID: t.RecordingMBID(),
|
|
||||||
ReleaseMBID: t.ReleaseMBID(),
|
|
||||||
ReleaseGroupMBID: t.ReleaseGroupMBID(),
|
|
||||||
ISRC: t.ISRC(),
|
|
||||||
AdditionalInfo: t.AdditionalInfo,
|
|
||||||
}
|
|
||||||
|
|
||||||
if t.MBIDMapping != nil && len(track.ArtistMBIDs) == 0 {
|
|
||||||
for _, artistMBID := range t.MBIDMapping.ArtistMBIDs {
|
|
||||||
track.ArtistMBIDs = append(track.ArtistMBIDs, artistMBID)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return track
|
|
||||||
}
|
|
||||||
|
|
||||||
func LookupRecording(
|
|
||||||
ctx context.Context,
|
|
||||||
mb *musicbrainzws2.Client,
|
|
||||||
mbid mbtypes.MBID,
|
|
||||||
) (*listenbrainz.Track, error) {
|
|
||||||
filter := musicbrainzws2.IncludesFilter{
|
|
||||||
Includes: []string{"artist-credits"},
|
|
||||||
}
|
|
||||||
recording, err := mb.LookupRecording(ctx, mbid, filter)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
artistMBIDs := make([]mbtypes.MBID, 0, len(recording.ArtistCredit))
|
|
||||||
for _, artist := range recording.ArtistCredit {
|
|
||||||
artistMBIDs = append(artistMBIDs, artist.Artist.ID)
|
|
||||||
}
|
|
||||||
track := listenbrainz.Track{
|
|
||||||
TrackName: recording.Title,
|
|
||||||
ArtistName: recording.ArtistCredit.String(),
|
|
||||||
MBIDMapping: &listenbrainz.MBIDMapping{
|
|
||||||
// In case of redirects this MBID differs from the looked up MBID
|
|
||||||
RecordingMBID: recording.ID,
|
|
||||||
ArtistMBIDs: artistMBIDs,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
return &track, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func ExtendTrackMetadata(
|
|
||||||
ctx context.Context,
|
|
||||||
lb *listenbrainz.Client,
|
|
||||||
mb *musicbrainzws2.Client,
|
|
||||||
feedbacks *[]listenbrainz.Feedback,
|
|
||||||
) ([]models.Love, error) {
|
|
||||||
mbids := make([]mbtypes.MBID, 0, len(*feedbacks))
|
|
||||||
for _, feedback := range *feedbacks {
|
|
||||||
if feedback.TrackMetadata == nil && feedback.RecordingMBID != "" {
|
|
||||||
mbids = append(mbids, feedback.RecordingMBID)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
result, err := lb.MetadataRecordings(ctx, mbids)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
loves := make([]models.Love, 0, len(*feedbacks))
|
|
||||||
for _, feedback := range *feedbacks {
|
|
||||||
if feedback.TrackMetadata == nil && feedback.RecordingMBID != "" {
|
|
||||||
metadata, ok := result[feedback.RecordingMBID]
|
|
||||||
if ok {
|
|
||||||
feedback.TrackMetadata = trackFromMetadataLookup(
|
|
||||||
feedback.RecordingMBID, metadata)
|
|
||||||
} else {
|
|
||||||
// MBID not in result. This is probably a MBID redirect, get
|
|
||||||
// data from MB instead (slower).
|
|
||||||
// If this also fails, just leave the metadata empty.
|
|
||||||
track, err := LookupRecording(ctx, mb, feedback.RecordingMBID)
|
|
||||||
if err == nil {
|
|
||||||
feedback.TrackMetadata = track
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
loves = append(loves, AsLove(feedback))
|
|
||||||
}
|
|
||||||
|
|
||||||
return loves, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func trackFromMetadataLookup(
|
|
||||||
recordingMBID mbtypes.MBID,
|
|
||||||
metadata listenbrainz.RecordingMetadata,
|
|
||||||
) *listenbrainz.Track {
|
|
||||||
artistMBIDs := make([]mbtypes.MBID, 0, len(metadata.Artist.Artists))
|
|
||||||
artists := make([]listenbrainz.Artist, 0, len(metadata.Artist.Artists))
|
|
||||||
for _, artist := range metadata.Artist.Artists {
|
|
||||||
artistMBIDs = append(artistMBIDs, artist.ArtistMBID)
|
|
||||||
artists = append(artists, listenbrainz.Artist{
|
|
||||||
ArtistCreditName: artist.Name,
|
|
||||||
ArtistMBID: artist.ArtistMBID,
|
|
||||||
JoinPhrase: artist.JoinPhrase,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return &listenbrainz.Track{
|
|
||||||
TrackName: metadata.Recording.Name,
|
|
||||||
ArtistName: metadata.Artist.Name,
|
|
||||||
ReleaseName: metadata.Release.Name,
|
|
||||||
AdditionalInfo: map[string]any{
|
|
||||||
"duration_ms": metadata.Recording.Length,
|
|
||||||
"release_group_mbid": metadata.Release.ReleaseGroupMBID,
|
|
||||||
},
|
|
||||||
MBIDMapping: &listenbrainz.MBIDMapping{
|
|
||||||
RecordingMBID: recordingMBID,
|
|
||||||
ReleaseMBID: metadata.Release.MBID,
|
|
||||||
ArtistMBIDs: artistMBIDs,
|
|
||||||
Artists: artists,
|
|
||||||
CAAID: metadata.Release.CAAID,
|
|
||||||
CAAReleaseMBID: metadata.Release.CAAReleaseMBID,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -26,16 +26,13 @@ import (
|
||||||
"go.uploadedlobster.com/musicbrainzws2"
|
"go.uploadedlobster.com/musicbrainzws2"
|
||||||
"go.uploadedlobster.com/scotty/internal/config"
|
"go.uploadedlobster.com/scotty/internal/config"
|
||||||
"go.uploadedlobster.com/scotty/internal/i18n"
|
"go.uploadedlobster.com/scotty/internal/i18n"
|
||||||
"go.uploadedlobster.com/scotty/internal/listenbrainz"
|
|
||||||
"go.uploadedlobster.com/scotty/internal/models"
|
"go.uploadedlobster.com/scotty/internal/models"
|
||||||
"go.uploadedlobster.com/scotty/internal/similarity"
|
"go.uploadedlobster.com/scotty/internal/similarity"
|
||||||
"go.uploadedlobster.com/scotty/internal/version"
|
"go.uploadedlobster.com/scotty/internal/version"
|
||||||
)
|
)
|
||||||
|
|
||||||
const lovesBatchSize = listenbrainz.MaxItemsPerGet
|
|
||||||
|
|
||||||
type ListenBrainzApiBackend struct {
|
type ListenBrainzApiBackend struct {
|
||||||
client listenbrainz.Client
|
client Client
|
||||||
mbClient musicbrainzws2.Client
|
mbClient musicbrainzws2.Client
|
||||||
username string
|
username string
|
||||||
checkDuplicates bool
|
checkDuplicates bool
|
||||||
|
@ -61,22 +58,20 @@ func (b *ListenBrainzApiBackend) Options() []models.BackendOption {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *ListenBrainzApiBackend) InitConfig(config *config.ServiceConfig) error {
|
func (b *ListenBrainzApiBackend) InitConfig(config *config.ServiceConfig) error {
|
||||||
b.client = listenbrainz.NewClient(config.GetString("token"), version.UserAgent())
|
b.client = NewClient(config.GetString("token"))
|
||||||
b.mbClient = *musicbrainzws2.NewClient(musicbrainzws2.AppInfo{
|
b.mbClient = *musicbrainzws2.NewClient(musicbrainzws2.AppInfo{
|
||||||
Name: version.AppName,
|
Name: version.AppName,
|
||||||
Version: version.AppVersion,
|
Version: version.AppVersion,
|
||||||
URL: version.AppURL,
|
URL: version.AppURL,
|
||||||
})
|
})
|
||||||
b.client.MaxResults = listenbrainz.MaxItemsPerGet
|
b.client.MaxResults = MaxItemsPerGet
|
||||||
b.username = config.GetString("username")
|
b.username = config.GetString("username")
|
||||||
b.checkDuplicates = config.GetBool("check-duplicate-listens", false)
|
b.checkDuplicates = config.GetBool("check-duplicate-listens", false)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *ListenBrainzApiBackend) StartImport() error { return nil }
|
func (b *ListenBrainzApiBackend) StartImport() error { return nil }
|
||||||
func (b *ListenBrainzApiBackend) FinishImport(result *models.ImportResult) error {
|
func (b *ListenBrainzApiBackend) FinishImport() error { return nil }
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *ListenBrainzApiBackend) ExportListens(ctx context.Context, oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
func (b *ListenBrainzApiBackend) ExportListens(ctx context.Context, oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
||||||
startTime := time.Now()
|
startTime := time.Now()
|
||||||
|
@ -121,7 +116,7 @@ func (b *ListenBrainzApiBackend) ExportListens(ctx context.Context, oldestTimest
|
||||||
|
|
||||||
for _, listen := range result.Payload.Listens {
|
for _, listen := range result.Payload.Listens {
|
||||||
if listen.ListenedAt > oldestTimestamp.Unix() {
|
if listen.ListenedAt > oldestTimestamp.Unix() {
|
||||||
listens = append(listens, AsListen(listen))
|
listens = append(listens, listen.AsListen())
|
||||||
} else {
|
} else {
|
||||||
// result contains listens older then oldestTimestamp
|
// result contains listens older then oldestTimestamp
|
||||||
break
|
break
|
||||||
|
@ -143,16 +138,16 @@ func (b *ListenBrainzApiBackend) ExportListens(ctx context.Context, oldestTimest
|
||||||
func (b *ListenBrainzApiBackend) ImportListens(ctx context.Context, export models.ListensResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
func (b *ListenBrainzApiBackend) ImportListens(ctx context.Context, export models.ListensResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
total := len(export.Items)
|
total := len(export.Items)
|
||||||
p := models.TransferProgress{}.FromImportResult(importResult, false)
|
p := models.TransferProgress{}.FromImportResult(importResult, false)
|
||||||
for i := 0; i < total; i += listenbrainz.MaxListensPerRequest {
|
for i := 0; i < total; i += MaxListensPerRequest {
|
||||||
listens := export.Items[i:min(i+listenbrainz.MaxListensPerRequest, total)]
|
listens := export.Items[i:min(i+MaxListensPerRequest, total)]
|
||||||
count := len(listens)
|
count := len(listens)
|
||||||
if count == 0 {
|
if count == 0 {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
submission := listenbrainz.ListenSubmission{
|
submission := ListenSubmission{
|
||||||
ListenType: listenbrainz.Import,
|
ListenType: Import,
|
||||||
Payload: make([]listenbrainz.Listen, 0, count),
|
Payload: make([]Listen, 0, count),
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, l := range listens {
|
for _, l := range listens {
|
||||||
|
@ -167,15 +162,14 @@ func (b *ListenBrainzApiBackend) ImportListens(ctx context.Context, export model
|
||||||
msg := i18n.Tr("Ignored duplicate listen %v: \"%v\" by %v (%v)",
|
msg := i18n.Tr("Ignored duplicate listen %v: \"%v\" by %v (%v)",
|
||||||
l.ListenedAt, l.TrackName, l.ArtistName(), l.RecordingMBID)
|
l.ListenedAt, l.TrackName, l.ArtistName(), l.RecordingMBID)
|
||||||
importResult.Log(models.Info, msg)
|
importResult.Log(models.Info, msg)
|
||||||
importResult.UpdateTimestamp(l.ListenedAt)
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
l.FillAdditionalInfo()
|
l.FillAdditionalInfo()
|
||||||
listen := listenbrainz.Listen{
|
listen := Listen{
|
||||||
ListenedAt: l.ListenedAt.Unix(),
|
ListenedAt: l.ListenedAt.Unix(),
|
||||||
TrackMetadata: listenbrainz.Track{
|
TrackMetadata: Track{
|
||||||
TrackName: l.TrackName,
|
TrackName: l.TrackName,
|
||||||
ReleaseName: l.ReleaseName,
|
ReleaseName: l.ReleaseName,
|
||||||
ArtistName: l.ArtistName(),
|
ArtistName: l.ArtistName(),
|
||||||
|
@ -234,8 +228,7 @@ func (b *ListenBrainzApiBackend) ExportLoves(ctx context.Context, oldestTimestam
|
||||||
func (b *ListenBrainzApiBackend) exportLoves(ctx context.Context, oldestTimestamp time.Time, results chan models.LovesResult) {
|
func (b *ListenBrainzApiBackend) exportLoves(ctx context.Context, oldestTimestamp time.Time, results chan models.LovesResult) {
|
||||||
offset := 0
|
offset := 0
|
||||||
defer close(results)
|
defer close(results)
|
||||||
allLoves := make(models.LovesList, 0, 2*listenbrainz.MaxItemsPerGet)
|
loves := make(models.LovesList, 0, 2*MaxItemsPerGet)
|
||||||
batch := make([]listenbrainz.Feedback, 0, lovesBatchSize)
|
|
||||||
|
|
||||||
out:
|
out:
|
||||||
for {
|
for {
|
||||||
|
@ -251,45 +244,31 @@ out:
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, feedback := range result.Feedback {
|
for _, feedback := range result.Feedback {
|
||||||
if time.Unix(feedback.Created, 0).After(oldestTimestamp) {
|
// Missing track metadata indicates that the recording MBID is no
|
||||||
batch = append(batch, feedback)
|
// longer available and might have been merged. Try fetching details
|
||||||
|
// from MusicBrainz.
|
||||||
|
if feedback.TrackMetadata == nil {
|
||||||
|
track, err := b.lookupRecording(ctx, feedback.RecordingMBID)
|
||||||
|
if err == nil {
|
||||||
|
feedback.TrackMetadata = track
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
love := feedback.AsLove()
|
||||||
|
if love.Created.After(oldestTimestamp) {
|
||||||
|
loves = append(loves, love)
|
||||||
} else {
|
} else {
|
||||||
break out
|
break out
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(batch) >= lovesBatchSize {
|
|
||||||
// Missing track metadata indicates that the recording MBID is no
|
|
||||||
// longer available and might have been merged. Try fetching details
|
|
||||||
// from MusicBrainz.
|
|
||||||
lovesBatch, err := ExtendTrackMetadata(ctx, &b.client, &b.mbClient, &batch)
|
|
||||||
if err != nil {
|
|
||||||
results <- models.LovesResult{Error: err}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, l := range lovesBatch {
|
|
||||||
allLoves = append(allLoves, l)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
offset += listenbrainz.MaxItemsPerGet
|
offset += MaxItemsPerGet
|
||||||
}
|
}
|
||||||
|
|
||||||
lovesBatch, err := ExtendTrackMetadata(ctx, &b.client, &b.mbClient, &batch)
|
sort.Sort(loves)
|
||||||
if err != nil {
|
|
||||||
results <- models.LovesResult{Error: err}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, l := range lovesBatch {
|
|
||||||
allLoves = append(allLoves, l)
|
|
||||||
}
|
|
||||||
|
|
||||||
sort.Sort(allLoves)
|
|
||||||
results <- models.LovesResult{
|
results <- models.LovesResult{
|
||||||
Total: len(allLoves),
|
Total: len(loves),
|
||||||
Items: allLoves,
|
Items: loves,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -299,7 +278,7 @@ func (b *ListenBrainzApiBackend) ImportLoves(ctx context.Context, export models.
|
||||||
go b.exportLoves(ctx, time.Unix(0, 0), existingLovesChan)
|
go b.exportLoves(ctx, time.Unix(0, 0), existingLovesChan)
|
||||||
|
|
||||||
// TODO: Store MBIDs directly
|
// TODO: Store MBIDs directly
|
||||||
b.existingMBIDs = make(map[mbtypes.MBID]bool, listenbrainz.MaxItemsPerGet)
|
b.existingMBIDs = make(map[mbtypes.MBID]bool, MaxItemsPerGet)
|
||||||
|
|
||||||
for existingLoves := range existingLovesChan {
|
for existingLoves := range existingLovesChan {
|
||||||
if existingLoves.Error != nil {
|
if existingLoves.Error != nil {
|
||||||
|
@ -337,7 +316,7 @@ func (b *ListenBrainzApiBackend) ImportLoves(ctx context.Context, export models.
|
||||||
if b.existingMBIDs[recordingMBID] {
|
if b.existingMBIDs[recordingMBID] {
|
||||||
ok = true
|
ok = true
|
||||||
} else {
|
} else {
|
||||||
resp, err := b.client.SendFeedback(ctx, listenbrainz.Feedback{
|
resp, err := b.client.SendFeedback(ctx, Feedback{
|
||||||
RecordingMBID: recordingMBID,
|
RecordingMBID: recordingMBID,
|
||||||
Score: 1,
|
Score: 1,
|
||||||
})
|
})
|
||||||
|
@ -387,7 +366,7 @@ func (b *ListenBrainzApiBackend) checkDuplicateListen(ctx context.Context, liste
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, c := range candidates.Payload.Listens {
|
for _, c := range candidates.Payload.Listens {
|
||||||
sim := similarity.CompareTracks(listen.Track, AsTrack(c.TrackMetadata))
|
sim := similarity.CompareTracks(listen.Track, c.TrackMetadata.AsTrack())
|
||||||
if sim >= trackSimilarityThreshold {
|
if sim >= trackSimilarityThreshold {
|
||||||
return true, nil
|
return true, nil
|
||||||
}
|
}
|
||||||
|
@ -395,3 +374,81 @@ func (b *ListenBrainzApiBackend) checkDuplicateListen(ctx context.Context, liste
|
||||||
|
|
||||||
return false, nil
|
return false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (b *ListenBrainzApiBackend) lookupRecording(ctx context.Context, mbid mbtypes.MBID) (*Track, error) {
|
||||||
|
filter := musicbrainzws2.IncludesFilter{
|
||||||
|
Includes: []string{"artist-credits"},
|
||||||
|
}
|
||||||
|
recording, err := b.mbClient.LookupRecording(ctx, mbid, filter)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
artistMBIDs := make([]mbtypes.MBID, 0, len(recording.ArtistCredit))
|
||||||
|
for _, artist := range recording.ArtistCredit {
|
||||||
|
artistMBIDs = append(artistMBIDs, artist.Artist.ID)
|
||||||
|
}
|
||||||
|
track := Track{
|
||||||
|
TrackName: recording.Title,
|
||||||
|
ArtistName: recording.ArtistCredit.String(),
|
||||||
|
MBIDMapping: &MBIDMapping{
|
||||||
|
// In case of redirects this MBID differs from the looked up MBID
|
||||||
|
RecordingMBID: recording.ID,
|
||||||
|
ArtistMBIDs: artistMBIDs,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return &track, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (lbListen Listen) AsListen() models.Listen {
|
||||||
|
listen := models.Listen{
|
||||||
|
ListenedAt: time.Unix(lbListen.ListenedAt, 0),
|
||||||
|
UserName: lbListen.UserName,
|
||||||
|
Track: lbListen.TrackMetadata.AsTrack(),
|
||||||
|
}
|
||||||
|
return listen
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f Feedback) AsLove() models.Love {
|
||||||
|
recordingMBID := f.RecordingMBID
|
||||||
|
track := f.TrackMetadata
|
||||||
|
if track == nil {
|
||||||
|
track = &Track{}
|
||||||
|
}
|
||||||
|
love := models.Love{
|
||||||
|
UserName: f.UserName,
|
||||||
|
RecordingMBID: recordingMBID,
|
||||||
|
Created: time.Unix(f.Created, 0),
|
||||||
|
Track: track.AsTrack(),
|
||||||
|
}
|
||||||
|
|
||||||
|
if love.Track.RecordingMBID == "" {
|
||||||
|
love.Track.RecordingMBID = love.RecordingMBID
|
||||||
|
}
|
||||||
|
|
||||||
|
return love
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t Track) AsTrack() models.Track {
|
||||||
|
track := models.Track{
|
||||||
|
TrackName: t.TrackName,
|
||||||
|
ReleaseName: t.ReleaseName,
|
||||||
|
ArtistNames: []string{t.ArtistName},
|
||||||
|
Duration: t.Duration(),
|
||||||
|
TrackNumber: t.TrackNumber(),
|
||||||
|
DiscNumber: t.DiscNumber(),
|
||||||
|
RecordingMBID: t.RecordingMBID(),
|
||||||
|
ReleaseMBID: t.ReleaseMBID(),
|
||||||
|
ReleaseGroupMBID: t.ReleaseGroupMBID(),
|
||||||
|
ISRC: t.ISRC(),
|
||||||
|
AdditionalInfo: t.AdditionalInfo,
|
||||||
|
}
|
||||||
|
|
||||||
|
if t.MBIDMapping != nil && len(track.ArtistMBIDs) == 0 {
|
||||||
|
for _, artistMBID := range t.MBIDMapping.ArtistMBIDs {
|
||||||
|
track.ArtistMBIDs = append(track.ArtistMBIDs, artistMBID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return track
|
||||||
|
}
|
||||||
|
|
|
@ -24,16 +24,15 @@ import (
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"go.uploadedlobster.com/mbtypes"
|
"go.uploadedlobster.com/mbtypes"
|
||||||
lbapi "go.uploadedlobster.com/scotty/internal/backends/listenbrainz"
|
"go.uploadedlobster.com/scotty/internal/backends/listenbrainz"
|
||||||
"go.uploadedlobster.com/scotty/internal/config"
|
"go.uploadedlobster.com/scotty/internal/config"
|
||||||
"go.uploadedlobster.com/scotty/internal/listenbrainz"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestInitConfig(t *testing.T) {
|
func TestInitConfig(t *testing.T) {
|
||||||
c := viper.New()
|
c := viper.New()
|
||||||
c.Set("token", "thetoken")
|
c.Set("token", "thetoken")
|
||||||
service := config.NewServiceConfig("test", c)
|
service := config.NewServiceConfig("test", c)
|
||||||
backend := lbapi.ListenBrainzApiBackend{}
|
backend := listenbrainz.ListenBrainzApiBackend{}
|
||||||
err := backend.InitConfig(&service)
|
err := backend.InitConfig(&service)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
@ -58,7 +57,7 @@ func TestListenBrainzListenAsListen(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
listen := lbapi.AsListen(lbListen)
|
listen := lbListen.AsListen()
|
||||||
assert.Equal(t, time.Unix(1699289873, 0), listen.ListenedAt)
|
assert.Equal(t, time.Unix(1699289873, 0), listen.ListenedAt)
|
||||||
assert.Equal(t, lbListen.UserName, listen.UserName)
|
assert.Equal(t, lbListen.UserName, listen.UserName)
|
||||||
assert.Equal(t, time.Duration(413787*time.Millisecond), listen.Duration)
|
assert.Equal(t, time.Duration(413787*time.Millisecond), listen.Duration)
|
||||||
|
@ -94,7 +93,7 @@ func TestListenBrainzFeedbackAsLove(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
love := lbapi.AsLove(feedback)
|
love := feedback.AsLove()
|
||||||
assert := assert.New(t)
|
assert := assert.New(t)
|
||||||
assert.Equal(time.Unix(1699859066, 0).Unix(), love.Created.Unix())
|
assert.Equal(time.Unix(1699859066, 0).Unix(), love.Created.Unix())
|
||||||
assert.Equal(feedback.UserName, love.UserName)
|
assert.Equal(feedback.UserName, love.UserName)
|
||||||
|
@ -115,7 +114,7 @@ func TestListenBrainzPartialFeedbackAsLove(t *testing.T) {
|
||||||
RecordingMBID: recordingMBID,
|
RecordingMBID: recordingMBID,
|
||||||
Score: 1,
|
Score: 1,
|
||||||
}
|
}
|
||||||
love := lbapi.AsLove(feedback)
|
love := feedback.AsLove()
|
||||||
assert := assert.New(t)
|
assert := assert.New(t)
|
||||||
assert.Equal(time.Unix(1699859066, 0).Unix(), love.Created.Unix())
|
assert.Equal(time.Unix(1699859066, 0).Unix(), love.Created.Unix())
|
||||||
assert.Equal(recordingMBID, love.RecordingMBID)
|
assert.Equal(recordingMBID, love.RecordingMBID)
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
@ -55,36 +55,33 @@ type ListenSubmission struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type Listen struct {
|
type Listen struct {
|
||||||
InsertedAt float64 `json:"inserted_at,omitempty"`
|
InsertedAt int64 `json:"inserted_at,omitempty"`
|
||||||
ListenedAt int64 `json:"listened_at"`
|
ListenedAt int64 `json:"listened_at"`
|
||||||
RecordingMSID string `json:"recording_msid,omitempty"`
|
RecordingMSID string `json:"recording_msid,omitempty"`
|
||||||
UserName string `json:"user_name,omitempty"`
|
UserName string `json:"user_name,omitempty"`
|
||||||
TrackMetadata Track `json:"track_metadata"`
|
TrackMetadata Track `json:"track_metadata"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Track struct {
|
type Track struct {
|
||||||
TrackName string `json:"track_name,omitempty"`
|
TrackName string `json:"track_name,omitempty"`
|
||||||
ArtistName string `json:"artist_name,omitempty"`
|
ArtistName string `json:"artist_name,omitempty"`
|
||||||
ReleaseName string `json:"release_name,omitempty"`
|
ReleaseName string `json:"release_name,omitempty"`
|
||||||
RecordingMSID string `json:"recording_msid,omitempty"`
|
|
||||||
AdditionalInfo map[string]any `json:"additional_info,omitempty"`
|
AdditionalInfo map[string]any `json:"additional_info,omitempty"`
|
||||||
MBIDMapping *MBIDMapping `json:"mbid_mapping,omitempty"`
|
MBIDMapping *MBIDMapping `json:"mbid_mapping,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type MBIDMapping struct {
|
type MBIDMapping struct {
|
||||||
ArtistMBIDs []mbtypes.MBID `json:"artist_mbids,omitempty"`
|
RecordingName string `json:"recording_name,omitempty"`
|
||||||
Artists []Artist `json:"artists,omitempty"`
|
RecordingMBID mbtypes.MBID `json:"recording_mbid,omitempty"`
|
||||||
RecordingMBID mbtypes.MBID `json:"recording_mbid,omitempty"`
|
ReleaseMBID mbtypes.MBID `json:"release_mbid,omitempty"`
|
||||||
RecordingName string `json:"recording_name,omitempty"`
|
ArtistMBIDs []mbtypes.MBID `json:"artist_mbids,omitempty"`
|
||||||
ReleaseMBID mbtypes.MBID `json:"release_mbid,omitempty"`
|
Artists []Artist `json:"artists,omitempty"`
|
||||||
CAAID int `json:"caa_id,omitempty"`
|
|
||||||
CAAReleaseMBID mbtypes.MBID `json:"caa_release_mbid,omitempty"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type Artist struct {
|
type Artist struct {
|
||||||
ArtistCreditName string `json:"artist_credit_name,omitempty"`
|
ArtistCreditName string `json:"artist_credit_name,omitempty"`
|
||||||
ArtistMBID mbtypes.MBID `json:"artist_mbid,omitempty"`
|
ArtistMBID string `json:"artist_mbid,omitempty"`
|
||||||
JoinPhrase string `json:"join_phrase,omitempty"`
|
JoinPhrase string `json:"join_phrase,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type GetFeedbackResult struct {
|
type GetFeedbackResult struct {
|
||||||
|
@ -112,44 +109,6 @@ type LookupResult struct {
|
||||||
ArtistMBIDs []mbtypes.MBID `json:"artist_mbids"`
|
ArtistMBIDs []mbtypes.MBID `json:"artist_mbids"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type RecordingMetadataRequest struct {
|
|
||||||
RecordingMBIDs []mbtypes.MBID `json:"recording_mbids"`
|
|
||||||
Includes string `json:"inc,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// Result for a recording metadata lookup
|
|
||||||
type RecordingMetadataResult map[mbtypes.MBID]RecordingMetadata
|
|
||||||
|
|
||||||
type RecordingMetadata struct {
|
|
||||||
Artist struct {
|
|
||||||
Name string `json:"name"`
|
|
||||||
ArtistCreditID int `json:"artist_credit_id"`
|
|
||||||
Artists []struct {
|
|
||||||
Name string `json:"name"`
|
|
||||||
Area string `json:"area"`
|
|
||||||
ArtistMBID mbtypes.MBID `json:"artist_mbid"`
|
|
||||||
JoinPhrase string `json:"join_phrase"`
|
|
||||||
BeginYear int `json:"begin_year"`
|
|
||||||
Type string `json:"type"`
|
|
||||||
// todo rels
|
|
||||||
} `json:"artists"`
|
|
||||||
} `json:"artist"`
|
|
||||||
Recording struct {
|
|
||||||
Name string `json:"name"`
|
|
||||||
Length int `json:"length"`
|
|
||||||
// TODO rels
|
|
||||||
} `json:"recording"`
|
|
||||||
Release struct {
|
|
||||||
Name string `json:"name"`
|
|
||||||
AlbumArtistName string `json:"album_artist_name"`
|
|
||||||
Year int `json:"year"`
|
|
||||||
MBID mbtypes.MBID `json:"mbid"`
|
|
||||||
ReleaseGroupMBID mbtypes.MBID `json:"release_group_mbid"`
|
|
||||||
CAAID int `json:"caa_id"`
|
|
||||||
CAAReleaseMBID mbtypes.MBID `json:"caa_release_mbid"`
|
|
||||||
} `json:"release"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type StatusResult struct {
|
type StatusResult struct {
|
||||||
Status string `json:"status"`
|
Status string `json:"status"`
|
||||||
}
|
}
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
@ -29,7 +29,7 @@ import (
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"go.uploadedlobster.com/mbtypes"
|
"go.uploadedlobster.com/mbtypes"
|
||||||
"go.uploadedlobster.com/scotty/internal/listenbrainz"
|
"go.uploadedlobster.com/scotty/internal/backends/listenbrainz"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestTrackDurationMillisecondsInt(t *testing.T) {
|
func TestTrackDurationMillisecondsInt(t *testing.T) {
|
|
@ -61,10 +61,8 @@ func (b *MalojaApiBackend) InitConfig(config *config.ServiceConfig) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *MalojaApiBackend) StartImport() error { return nil }
|
func (b *MalojaApiBackend) StartImport() error { return nil }
|
||||||
func (b *MalojaApiBackend) FinishImport(result *models.ImportResult) error {
|
func (b *MalojaApiBackend) FinishImport() error { return nil }
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *MalojaApiBackend) ExportListens(ctx context.Context, oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
func (b *MalojaApiBackend) ExportListens(ctx context.Context, oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
||||||
page := 0
|
page := 0
|
||||||
|
|
|
@ -126,7 +126,7 @@ func (b *ScrobblerLogBackend) StartImport() error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *ScrobblerLogBackend) FinishImport(result *models.ImportResult) error {
|
func (b *ScrobblerLogBackend) FinishImport() error {
|
||||||
return b.file.Close()
|
return b.file.Close()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,82 +0,0 @@
|
||||||
/*
|
|
||||||
Copyright © 2025 Philipp Wolfer <phw@uploadedlobster.com>
|
|
||||||
|
|
||||||
This file is part of Scotty.
|
|
||||||
|
|
||||||
Scotty is free software: you can redistribute it and/or modify it under the
|
|
||||||
terms of the GNU General Public License as published by the Free Software
|
|
||||||
Foundation, either version 3 of the License, or (at your option) any later version.
|
|
||||||
|
|
||||||
Scotty is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
||||||
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
|
|
||||||
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU General Public License along with
|
|
||||||
Scotty. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package spotifyhistory
|
|
||||||
|
|
||||||
import (
|
|
||||||
"errors"
|
|
||||||
"sort"
|
|
||||||
|
|
||||||
"go.uploadedlobster.com/scotty/internal/archive"
|
|
||||||
)
|
|
||||||
|
|
||||||
var historyFileGlobs = []string{
|
|
||||||
"Spotify Extended Streaming History/Streaming_History_Audio_*.json",
|
|
||||||
"Streaming_History_Audio_*.json",
|
|
||||||
}
|
|
||||||
|
|
||||||
// Access a Spotify history archive.
|
|
||||||
// This can be either the ZIP file as provided by Spotify
|
|
||||||
// or a directory where this was extracted to.
|
|
||||||
type HistoryArchive struct {
|
|
||||||
backend archive.Archive
|
|
||||||
}
|
|
||||||
|
|
||||||
// Open a Spotify history archive from file path.
|
|
||||||
func OpenHistoryArchive(path string) (*HistoryArchive, error) {
|
|
||||||
backend, err := archive.OpenArchive(path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return &HistoryArchive{backend: backend}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *HistoryArchive) GetHistoryFiles() ([]archive.FileInfo, error) {
|
|
||||||
for _, glob := range historyFileGlobs {
|
|
||||||
files, err := h.backend.Glob(glob)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(files) > 0 {
|
|
||||||
sort.Slice(files, func(i, j int) bool {
|
|
||||||
return files[i].Name < files[j].Name
|
|
||||||
})
|
|
||||||
return files, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Found no files, fail
|
|
||||||
return nil, errors.New("found no history files in archive")
|
|
||||||
}
|
|
||||||
|
|
||||||
func readHistoryFile(f archive.OpenableFile) (StreamingHistory, error) {
|
|
||||||
file, err := f.Open()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
defer file.Close()
|
|
||||||
history := StreamingHistory{}
|
|
||||||
err = history.Read(file)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return history, nil
|
|
||||||
}
|
|
|
@ -19,6 +19,9 @@ package spotifyhistory
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"slices"
|
||||||
"sort"
|
"sort"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
@ -27,8 +30,10 @@ import (
|
||||||
"go.uploadedlobster.com/scotty/internal/models"
|
"go.uploadedlobster.com/scotty/internal/models"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const historyFileGlob = "Streaming_History_Audio_*.json"
|
||||||
|
|
||||||
type SpotifyHistoryBackend struct {
|
type SpotifyHistoryBackend struct {
|
||||||
archivePath string
|
dirPath string
|
||||||
ignoreIncognito bool
|
ignoreIncognito bool
|
||||||
ignoreSkipped bool
|
ignoreSkipped bool
|
||||||
skippedMinSeconds int
|
skippedMinSeconds int
|
||||||
|
@ -38,11 +43,9 @@ func (b *SpotifyHistoryBackend) Name() string { return "spotify-history" }
|
||||||
|
|
||||||
func (b *SpotifyHistoryBackend) Options() []models.BackendOption {
|
func (b *SpotifyHistoryBackend) Options() []models.BackendOption {
|
||||||
return []models.BackendOption{{
|
return []models.BackendOption{{
|
||||||
Name: "archive-path",
|
Name: "dir-path",
|
||||||
Label: i18n.Tr("Archive path"),
|
Label: i18n.Tr("Directory path"),
|
||||||
Type: models.String,
|
Type: models.String,
|
||||||
Default: "./my_spotify_data_extended.zip",
|
|
||||||
MigrateFrom: "dir-path",
|
|
||||||
}, {
|
}, {
|
||||||
Name: "ignore-incognito",
|
Name: "ignore-incognito",
|
||||||
Label: i18n.Tr("Ignore listens in incognito mode"),
|
Label: i18n.Tr("Ignore listens in incognito mode"),
|
||||||
|
@ -62,11 +65,7 @@ func (b *SpotifyHistoryBackend) Options() []models.BackendOption {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *SpotifyHistoryBackend) InitConfig(config *config.ServiceConfig) error {
|
func (b *SpotifyHistoryBackend) InitConfig(config *config.ServiceConfig) error {
|
||||||
b.archivePath = config.GetString("archive-path")
|
b.dirPath = config.GetString("dir-path")
|
||||||
// Backward compatibility
|
|
||||||
if b.archivePath == "" {
|
|
||||||
b.archivePath = config.GetString("dir-path")
|
|
||||||
}
|
|
||||||
b.ignoreIncognito = config.GetBool("ignore-incognito", true)
|
b.ignoreIncognito = config.GetBool("ignore-incognito", true)
|
||||||
b.ignoreSkipped = config.GetBool("ignore-skipped", false)
|
b.ignoreSkipped = config.GetBool("ignore-skipped", false)
|
||||||
b.skippedMinSeconds = config.GetInt("ignore-min-duration-seconds", 30)
|
b.skippedMinSeconds = config.GetInt("ignore-min-duration-seconds", 30)
|
||||||
|
@ -74,19 +73,11 @@ func (b *SpotifyHistoryBackend) InitConfig(config *config.ServiceConfig) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *SpotifyHistoryBackend) ExportListens(ctx context.Context, oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
func (b *SpotifyHistoryBackend) ExportListens(ctx context.Context, oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
||||||
|
files, err := filepath.Glob(filepath.Join(b.dirPath, historyFileGlob))
|
||||||
p := models.TransferProgress{
|
p := models.TransferProgress{
|
||||||
Export: &models.Progress{},
|
Export: &models.Progress{},
|
||||||
}
|
}
|
||||||
|
|
||||||
archive, err := OpenHistoryArchive(b.archivePath)
|
|
||||||
if err != nil {
|
|
||||||
p.Export.Abort()
|
|
||||||
progress <- p
|
|
||||||
results <- models.ListensResult{Error: err}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
files, err := archive.GetHistoryFiles()
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
p.Export.Abort()
|
p.Export.Abort()
|
||||||
progress <- p
|
progress <- p
|
||||||
|
@ -94,9 +85,10 @@ func (b *SpotifyHistoryBackend) ExportListens(ctx context.Context, oldestTimesta
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
slices.Sort(files)
|
||||||
fileCount := int64(len(files))
|
fileCount := int64(len(files))
|
||||||
p.Export.Total = fileCount
|
p.Export.Total = fileCount
|
||||||
for i, f := range files {
|
for i, filePath := range files {
|
||||||
if err := ctx.Err(); err != nil {
|
if err := ctx.Err(); err != nil {
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
p.Export.Abort()
|
p.Export.Abort()
|
||||||
|
@ -104,7 +96,7 @@ func (b *SpotifyHistoryBackend) ExportListens(ctx context.Context, oldestTimesta
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
history, err := readHistoryFile(f.File)
|
history, err := readHistoryFile(filePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
p.Export.Abort()
|
p.Export.Abort()
|
||||||
|
@ -126,3 +118,19 @@ func (b *SpotifyHistoryBackend) ExportListens(ctx context.Context, oldestTimesta
|
||||||
p.Export.Complete()
|
p.Export.Complete()
|
||||||
progress <- p
|
progress <- p
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func readHistoryFile(filePath string) (StreamingHistory, error) {
|
||||||
|
file, err := os.Open(filePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
defer file.Close()
|
||||||
|
history := StreamingHistory{}
|
||||||
|
err = history.Read(file)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return history, nil
|
||||||
|
}
|
||||||
|
|
|
@ -83,12 +83,6 @@ func PromptExtraOptions(config config.ServiceConfig) (config.ServiceConfig, erro
|
||||||
current, exists := config.ConfigValues[opt.Name]
|
current, exists := config.ConfigValues[opt.Name]
|
||||||
if exists {
|
if exists {
|
||||||
opt.Default = fmt.Sprintf("%v", current)
|
opt.Default = fmt.Sprintf("%v", current)
|
||||||
} else if opt.MigrateFrom != "" {
|
|
||||||
// If there is an old value to migrate from, try that
|
|
||||||
fallback, exists := config.ConfigValues[opt.MigrateFrom]
|
|
||||||
if exists {
|
|
||||||
opt.Default = fmt.Sprintf("%v", fallback)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
val, err := Prompt(opt)
|
val, err := Prompt(opt)
|
||||||
|
|
|
@ -157,11 +157,7 @@ func (c *TransferCmd[E, I, R]) Transfer(exp backends.ExportProcessor[R], imp bac
|
||||||
fmt.Println()
|
fmt.Println()
|
||||||
fmt.Println(i18n.Tr("Import log:"))
|
fmt.Println(i18n.Tr("Import log:"))
|
||||||
for _, entry := range result.ImportLog {
|
for _, entry := range result.ImportLog {
|
||||||
if entry.Type != models.Output {
|
fmt.Println(i18n.Tr("%v: %v", entry.Type, entry.Message))
|
||||||
fmt.Println(i18n.Tr("%v: %v", entry.Type, entry.Message))
|
|
||||||
} else {
|
|
||||||
fmt.Println(entry.Message)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,251 +0,0 @@
|
||||||
/*
|
|
||||||
Copyright © 2025 Philipp Wolfer <phw@uploadedlobster.com>
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in
|
|
||||||
all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
||||||
THE SOFTWARE.
|
|
||||||
*/
|
|
||||||
package listenbrainz
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"errors"
|
|
||||||
"io"
|
|
||||||
"iter"
|
|
||||||
"regexp"
|
|
||||||
"sort"
|
|
||||||
"strconv"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/simonfrey/jsonl"
|
|
||||||
"go.uploadedlobster.com/scotty/internal/archive"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Represents a ListenBrainz export archive.
|
|
||||||
//
|
|
||||||
// The export contains the user's listen history, favorite tracks and
|
|
||||||
// user information.
|
|
||||||
type ExportArchive struct {
|
|
||||||
backend archive.Archive
|
|
||||||
}
|
|
||||||
|
|
||||||
// Open a ListenBrainz archive from file path.
|
|
||||||
func OpenExportArchive(path string) (*ExportArchive, error) {
|
|
||||||
backend, err := archive.OpenArchive(path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return &ExportArchive{backend: backend}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Close the archive and release any resources.
|
|
||||||
func (a *ExportArchive) Close() error {
|
|
||||||
if a.backend == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return a.backend.Close()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read the user information from the archive.
|
|
||||||
func (a *ExportArchive) UserInfo() (UserInfo, error) {
|
|
||||||
f, err := a.backend.Open("user.json")
|
|
||||||
if err != nil {
|
|
||||||
return UserInfo{}, err
|
|
||||||
}
|
|
||||||
defer f.Close()
|
|
||||||
|
|
||||||
userInfo := UserInfo{}
|
|
||||||
bytes, err := io.ReadAll(f)
|
|
||||||
if err != nil {
|
|
||||||
return userInfo, err
|
|
||||||
}
|
|
||||||
|
|
||||||
json.Unmarshal(bytes, &userInfo)
|
|
||||||
return userInfo, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *ExportArchive) ListListenExports() ([]ListenExportFileInfo, error) {
|
|
||||||
re := regexp.MustCompile(`^listens/(\d{4})/(\d{1,2})\.jsonl$`)
|
|
||||||
result := make([]ListenExportFileInfo, 0)
|
|
||||||
|
|
||||||
files, err := a.backend.Glob("listens/*/*.jsonl")
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, file := range files {
|
|
||||||
match := re.FindStringSubmatch(file.Name)
|
|
||||||
if match == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
year := match[1]
|
|
||||||
month := match[2]
|
|
||||||
times, err := getMonthTimeRange(year, month)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
info := ListenExportFileInfo{
|
|
||||||
Name: file.Name,
|
|
||||||
TimeRange: *times,
|
|
||||||
f: file.File,
|
|
||||||
}
|
|
||||||
result = append(result, info)
|
|
||||||
}
|
|
||||||
|
|
||||||
return result, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Yields all listens from the archive that are newer than the given timestamp.
|
|
||||||
// The listens are yielded in ascending order of their listened_at timestamp.
|
|
||||||
func (a *ExportArchive) IterListens(minTimestamp time.Time) iter.Seq2[Listen, error] {
|
|
||||||
return func(yield func(Listen, error) bool) {
|
|
||||||
files, err := a.ListListenExports()
|
|
||||||
if err != nil {
|
|
||||||
yield(Listen{}, err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
sort.Slice(files, func(i, j int) bool {
|
|
||||||
return files[i].TimeRange.Start.Before(files[j].TimeRange.Start)
|
|
||||||
})
|
|
||||||
|
|
||||||
for _, file := range files {
|
|
||||||
if file.TimeRange.End.Before(minTimestamp) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
f := JSONLFile[Listen]{file: file.f}
|
|
||||||
for l, err := range f.IterItems() {
|
|
||||||
if err != nil {
|
|
||||||
yield(Listen{}, err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if !time.Unix(l.ListenedAt, 0).After(minTimestamp) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if !yield(l, nil) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Yields all feedbacks from the archive that are newer than the given timestamp.
|
|
||||||
// The feedbacks are yielded in ascending order of their Created timestamp.
|
|
||||||
func (a *ExportArchive) IterFeedback(minTimestamp time.Time) iter.Seq2[Feedback, error] {
|
|
||||||
return func(yield func(Feedback, error) bool) {
|
|
||||||
files, err := a.backend.Glob("feedback.jsonl")
|
|
||||||
if err != nil {
|
|
||||||
yield(Feedback{}, err)
|
|
||||||
return
|
|
||||||
} else if len(files) == 0 {
|
|
||||||
yield(Feedback{}, errors.New("no feedback.jsonl file found in archive"))
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
j := JSONLFile[Feedback]{file: files[0].File}
|
|
||||||
for l, err := range j.IterItems() {
|
|
||||||
if err != nil {
|
|
||||||
yield(Feedback{}, err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if !time.Unix(l.Created, 0).After(minTimestamp) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if !yield(l, nil) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type UserInfo struct {
|
|
||||||
ID string `json:"user_id"`
|
|
||||||
Name string `json:"username"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type timeRange struct {
|
|
||||||
Start time.Time
|
|
||||||
End time.Time
|
|
||||||
}
|
|
||||||
|
|
||||||
type ListenExportFileInfo struct {
|
|
||||||
Name string
|
|
||||||
TimeRange timeRange
|
|
||||||
f archive.OpenableFile
|
|
||||||
}
|
|
||||||
|
|
||||||
type JSONLFile[T any] struct {
|
|
||||||
file archive.OpenableFile
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *JSONLFile[T]) openReader() (*jsonl.Reader, error) {
|
|
||||||
fio, err := f.file.Open()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
reader := jsonl.NewReader(fio)
|
|
||||||
return &reader, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *JSONLFile[T]) IterItems() iter.Seq2[T, error] {
|
|
||||||
return func(yield func(T, error) bool) {
|
|
||||||
reader, err := f.openReader()
|
|
||||||
if err != nil {
|
|
||||||
var listen T
|
|
||||||
yield(listen, err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
defer reader.Close()
|
|
||||||
|
|
||||||
for {
|
|
||||||
var listen T
|
|
||||||
err := reader.ReadSingleLine(&listen)
|
|
||||||
if err != nil {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if !yield(listen, nil) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func getMonthTimeRange(year string, month string) (*timeRange, error) {
|
|
||||||
yearInt, err := strconv.Atoi(year)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
monthInt, err := strconv.Atoi(month)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
r := &timeRange{}
|
|
||||||
r.Start = time.Date(yearInt, time.Month(monthInt), 1, 0, 0, 0, 0, time.UTC)
|
|
||||||
|
|
||||||
// Get the end of the month
|
|
||||||
nextMonth := monthInt + 1
|
|
||||||
r.End = time.Date(
|
|
||||||
yearInt, time.Month(nextMonth), 1, 0, 0, 0, 0, time.UTC).Add(-time.Second)
|
|
||||||
return r, nil
|
|
||||||
}
|
|
|
@ -46,7 +46,7 @@ type ImportBackend interface {
|
||||||
|
|
||||||
// The implementation can perform all steps here to finalize the
|
// The implementation can perform all steps here to finalize the
|
||||||
// export/import and free used resources.
|
// export/import and free used resources.
|
||||||
FinishImport(result *ImportResult) error
|
FinishImport() error
|
||||||
}
|
}
|
||||||
|
|
||||||
// Must be implemented by services supporting the export of listens.
|
// Must be implemented by services supporting the export of listens.
|
||||||
|
|
|
@ -169,7 +169,6 @@ type LovesResult ExportResult[LovesList]
|
||||||
type LogEntryType string
|
type LogEntryType string
|
||||||
|
|
||||||
const (
|
const (
|
||||||
Output LogEntryType = ""
|
|
||||||
Info LogEntryType = "Info"
|
Info LogEntryType = "Info"
|
||||||
Warning LogEntryType = "Warning"
|
Warning LogEntryType = "Warning"
|
||||||
Error LogEntryType = "Error"
|
Error LogEntryType = "Error"
|
||||||
|
@ -197,21 +196,11 @@ func (i *ImportResult) UpdateTimestamp(newTime time.Time) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *ImportResult) Update(from *ImportResult) {
|
func (i *ImportResult) Update(from ImportResult) {
|
||||||
if i != from {
|
i.TotalCount = from.TotalCount
|
||||||
i.TotalCount = from.TotalCount
|
i.ImportCount = from.ImportCount
|
||||||
i.ImportCount = from.ImportCount
|
i.UpdateTimestamp(from.LastTimestamp)
|
||||||
i.UpdateTimestamp(from.LastTimestamp)
|
i.ImportLog = append(i.ImportLog, from.ImportLog...)
|
||||||
i.ImportLog = append(i.ImportLog, from.ImportLog...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i *ImportResult) Copy() ImportResult {
|
|
||||||
return ImportResult{
|
|
||||||
TotalCount: i.TotalCount,
|
|
||||||
ImportCount: i.ImportCount,
|
|
||||||
LastTimestamp: i.LastTimestamp,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *ImportResult) Log(t LogEntryType, msg string) {
|
func (i *ImportResult) Log(t LogEntryType, msg string) {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
@ -138,31 +138,13 @@ func TestImportResultUpdate(t *testing.T) {
|
||||||
LastTimestamp: time.Now().Add(1 * time.Hour),
|
LastTimestamp: time.Now().Add(1 * time.Hour),
|
||||||
ImportLog: []models.LogEntry{logEntry2},
|
ImportLog: []models.LogEntry{logEntry2},
|
||||||
}
|
}
|
||||||
result.Update(&newResult)
|
result.Update(newResult)
|
||||||
assert.Equal(t, 120, result.TotalCount)
|
assert.Equal(t, 120, result.TotalCount)
|
||||||
assert.Equal(t, 50, result.ImportCount)
|
assert.Equal(t, 50, result.ImportCount)
|
||||||
assert.Equal(t, newResult.LastTimestamp, result.LastTimestamp)
|
assert.Equal(t, newResult.LastTimestamp, result.LastTimestamp)
|
||||||
assert.Equal(t, []models.LogEntry{logEntry1, logEntry2}, result.ImportLog)
|
assert.Equal(t, []models.LogEntry{logEntry1, logEntry2}, result.ImportLog)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestImportResultCopy(t *testing.T) {
|
|
||||||
logEntry := models.LogEntry{
|
|
||||||
Type: models.Warning,
|
|
||||||
Message: "foo",
|
|
||||||
}
|
|
||||||
result := models.ImportResult{
|
|
||||||
TotalCount: 100,
|
|
||||||
ImportCount: 20,
|
|
||||||
LastTimestamp: time.Now(),
|
|
||||||
ImportLog: []models.LogEntry{logEntry},
|
|
||||||
}
|
|
||||||
copy := result.Copy()
|
|
||||||
assert.Equal(t, result.TotalCount, copy.TotalCount)
|
|
||||||
assert.Equal(t, result.ImportCount, copy.ImportCount)
|
|
||||||
assert.Equal(t, result.LastTimestamp, copy.LastTimestamp)
|
|
||||||
assert.Empty(t, copy.ImportLog)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestImportResultLog(t *testing.T) {
|
func TestImportResultLog(t *testing.T) {
|
||||||
result := models.ImportResult{}
|
result := models.ImportResult{}
|
||||||
result.Log(models.Warning, "foo")
|
result.Log(models.Warning, "foo")
|
||||||
|
|
|
@ -25,10 +25,9 @@ const (
|
||||||
)
|
)
|
||||||
|
|
||||||
type BackendOption struct {
|
type BackendOption struct {
|
||||||
Name string
|
Name string
|
||||||
Label string
|
Label string
|
||||||
Type OptionType
|
Type OptionType
|
||||||
Default string
|
Default string
|
||||||
Validate func(string) error
|
Validate func(string) error
|
||||||
MigrateFrom string
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
Scotty is free software: you can redistribute it and/or modify it under the
|
Scotty is free software: you can redistribute it and/or modify it under the
|
||||||
terms of the GNU General Public License as published by the Free Software
|
terms of the GNU General Public License as published by the Free Software
|
||||||
|
@ -25,9 +25,9 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
RetryCount = 5
|
RetryCount = 5
|
||||||
DefaultRateLimitWait = 5 * time.Second
|
DefaultRateLimitWaitSeconds = 5
|
||||||
MaxWaitTime = 60 * time.Second
|
MaxWaitTimeSeconds = 60
|
||||||
)
|
)
|
||||||
|
|
||||||
// Implements rate HTTP header based limiting for resty.
|
// Implements rate HTTP header based limiting for resty.
|
||||||
|
@ -47,15 +47,16 @@ func EnableHTTPHeaderRateLimit(client *resty.Client, resetInHeader string) {
|
||||||
return code == http.StatusTooManyRequests || code >= http.StatusInternalServerError
|
return code == http.StatusTooManyRequests || code >= http.StatusInternalServerError
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
client.SetRetryMaxWaitTime(MaxWaitTime)
|
client.SetRetryMaxWaitTime(time.Duration(MaxWaitTimeSeconds * time.Second))
|
||||||
client.SetRetryAfter(func(client *resty.Client, resp *resty.Response) (time.Duration, error) {
|
client.SetRetryAfter(func(client *resty.Client, resp *resty.Response) (time.Duration, error) {
|
||||||
retryAfter := DefaultRateLimitWait
|
var err error
|
||||||
|
var retryAfter int = DefaultRateLimitWaitSeconds
|
||||||
if resp.StatusCode() == http.StatusTooManyRequests {
|
if resp.StatusCode() == http.StatusTooManyRequests {
|
||||||
retryAfterHeader, err := strconv.Atoi(resp.Header().Get(resetInHeader))
|
retryAfter, err = strconv.Atoi(resp.Header().Get(resetInHeader))
|
||||||
if err == nil {
|
if err != nil {
|
||||||
retryAfter = time.Duration(retryAfterHeader) * time.Second
|
retryAfter = DefaultRateLimitWaitSeconds
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return retryAfter, nil
|
return time.Duration(retryAfter * int(time.Second)), err
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue