Compare commits

..

No commits in common. "main" and "v0.5.2" have entirely different histories.
main ... v0.5.2

32 changed files with 676 additions and 996 deletions

View file

@ -1,18 +1,5 @@
# Scotty Changelog
## 0.6.0 - WIP
- Fix program hanging endlessly if import fails (#11)
- If import fails still store the last successfully imported timestamp
- Show progress bars as aborted on export / import error
- JSPF: implemented export as loves and listens
- JSPF: write track duration
- JSPF: read username and recording MSID
- JSPF: add MusicBrainz playlist extension in append mode, if it does not exist
in the existing JSPF file
- scrobblerlog: fix timezone not being set from config (#6)
- scrobblerlog: fix listen export not considering latest timestamp
## 0.5.2 - 2025-05-01
- ListenBrainz: fixed loves export not considering latest timestamp
@ -29,9 +16,9 @@
- ListenBrainz: log missing recording MBID on love import
- Subsonic: support OpenSubsonic fields for recording MBID and genres (#5)
- Subsonic: fixed progress for loves export
- scrobblerlog: add "time-zone" config option (#6)
- scrobblerlog: add "time-zone" config option (#6).
- scrobblerlog: fixed progress for listen export
- scrobblerlog: renamed setting `include-skipped` to `ignore-skipped`
- scrobblerlog: renamed setting `include-skipped` to `ignore-skipped`.
Note: 386 builds for Linux are not available with this release due to an
incompatibility with latest version of gorm.

View file

@ -121,7 +121,7 @@ Backend | Listens Export | Listens Import | Loves Export | Loves Import
----------------|----------------|----------------|--------------|-------------
deezer | ✓ | | ✓ | -
funkwhale | ✓ | | ✓ | -
jspf | ✓ | ✓ | ✓ | ✓
jspf | - | ✓ | - | ✓
lastfm | ✓ | ✓ | ✓ | ✓
listenbrainz | ✓ | ✓ | ✓ | ✓
maloja | ✓ | ✓ | |
@ -145,7 +145,7 @@ You can help translate this project into your language with [Weblate](https://tr
## License
Scotty © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
Scotty © 2023-2024 Philipp Wolfer <phw@uploadedlobster.com>
Scotty is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.

24
go.mod
View file

@ -15,19 +15,19 @@ require (
github.com/manifoldco/promptui v0.9.0
github.com/pelletier/go-toml/v2 v2.2.4
github.com/shkh/lastfm-go v0.0.0-20191215035245-89a801c244e0
github.com/spf13/cast v1.8.0
github.com/spf13/cast v1.7.1
github.com/spf13/cobra v1.9.1
github.com/spf13/viper v1.20.1
github.com/stretchr/testify v1.10.0
github.com/supersonic-app/go-subsonic v0.0.0-20241224013245-9b2841f3711d
github.com/vbauerster/mpb/v8 v8.10.0
github.com/vbauerster/mpb/v8 v8.9.3
go.uploadedlobster.com/mbtypes v0.4.0
go.uploadedlobster.com/musicbrainzws2 v0.14.0
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6
golang.org/x/oauth2 v0.30.0
golang.org/x/text v0.25.0
golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0
golang.org/x/oauth2 v0.29.0
golang.org/x/text v0.24.0
gorm.io/datatypes v1.2.5
gorm.io/gorm v1.26.1
gorm.io/gorm v1.26.0
)
require (
@ -58,15 +58,15 @@ require (
github.com/spf13/pflag v1.0.6 // indirect
github.com/subosito/gotenv v1.6.0 // indirect
go.uber.org/multierr v1.11.0 // indirect
golang.org/x/image v0.27.0 // indirect
golang.org/x/image v0.26.0 // indirect
golang.org/x/mod v0.24.0 // indirect
golang.org/x/net v0.40.0 // indirect
golang.org/x/sync v0.14.0 // indirect
golang.org/x/sys v0.33.0 // indirect
golang.org/x/tools v0.33.0 // indirect
golang.org/x/net v0.39.0 // indirect
golang.org/x/sync v0.13.0 // indirect
golang.org/x/sys v0.32.0 // indirect
golang.org/x/tools v0.32.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
gorm.io/driver/mysql v1.5.7 // indirect
modernc.org/libc v1.65.2 // indirect
modernc.org/libc v1.64.0 // indirect
modernc.org/mathutil v1.7.1 // indirect
modernc.org/memory v1.10.0 // indirect
modernc.org/sqlite v1.37.0 // indirect

60
go.sum
View file

@ -111,8 +111,8 @@ github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9yS
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
github.com/spf13/afero v1.14.0 h1:9tH6MapGnn/j0eb0yIXiLjERO8RB6xIVZRDCX7PtqWA=
github.com/spf13/afero v1.14.0/go.mod h1:acJQ8t0ohCGuMN3O+Pv0V0hgMxNYDlvdk+VTfyZmbYo=
github.com/spf13/cast v1.8.0 h1:gEN9K4b8Xws4EX0+a0reLmhq8moKn7ntRlQYgjPeCDk=
github.com/spf13/cast v1.8.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y=
github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo=
github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0=
github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o=
@ -125,8 +125,8 @@ github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
github.com/supersonic-app/go-subsonic v0.0.0-20241224013245-9b2841f3711d h1:70+Nn7yh+cfeKqqXVTdpneFqXuvrBLyP7U6GVUsjTU4=
github.com/supersonic-app/go-subsonic v0.0.0-20241224013245-9b2841f3711d/go.mod h1:D+OWPXeD9owcdcoXATv5YPBGWxxVvn5k98rt5B4wMc4=
github.com/vbauerster/mpb/v8 v8.10.0 h1:5ZYEWM4ovaZGAibjzW4PlQNb5k+JpzMqVwgNyk+K0M8=
github.com/vbauerster/mpb/v8 v8.10.0/go.mod h1:DYPFebxSahB+f7tuEUGauLQ7w8ij3wMr4clsVuJCV4I=
github.com/vbauerster/mpb/v8 v8.9.3 h1:PnMeF+sMvYv9u23l6DO6Q3+Mdj408mjLRXIzmUmU2Z8=
github.com/vbauerster/mpb/v8 v8.9.3/go.mod h1:hxS8Hz4C6ijnppDSIX6LjG8FYJSoPo9iIOcE53Zik0c=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
@ -136,13 +136,13 @@ go.uploadedlobster.com/musicbrainzws2 v0.14.0 h1:YaEtxNwLSNT1gzFipQ4XlaThNfXjBpz
go.uploadedlobster.com/musicbrainzws2 v0.14.0/go.mod h1:T6sYE7ZHRH3mJWT3g9jdSUPKJLZubnBjKyjMPNdkgao=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8=
golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw=
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6 h1:y5zboxd6LQAqYIhHnB48p0ByQ/GnQx2BE33L8BOHQkI=
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6/go.mod h1:U6Lno4MTRCDY+Ba7aCcauB9T60gsv5s4ralQzP72ZoQ=
golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE=
golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc=
golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0 h1:R84qjqJb5nVJMxqWYb3np9L5ZsaDtB+a39EqjV0JSUM=
golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0/go.mod h1:S9Xr4PYopiDyqSyp5NjCrhFrqg6A5zA2E/iPHPhqnS8=
golang.org/x/image v0.13.0/go.mod h1:6mmbMOeV28HuMTgA6OSRkdXKYw/t5W9Uwn2Yv1r3Yxk=
golang.org/x/image v0.27.0 h1:C8gA4oWU/tKkdCfYT6T2u4faJu3MeNS5O8UPWlPF61w=
golang.org/x/image v0.27.0/go.mod h1:xbdrClrAUway1MUTEZDq9mz/UpRwYAkFFNUslZtcB+g=
golang.org/x/image v0.26.0 h1:4XjIFEZWQmCZi6Wv8BoxsDhRU3RVnLX04dToTDAEPlY=
golang.org/x/image v0.26.0/go.mod h1:lcxbMFAovzpnJxzXS3nyL83K27tmqtKzIJpctK8YO5c=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU=
@ -151,15 +151,15 @@ golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLL
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY=
golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds=
golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI=
golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU=
golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY=
golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E=
golang.org/x/oauth2 v0.29.0 h1:WdYw2tdTK1S8olAzWHdgeqfy+Mtm9XNhv/xJsY65d98=
golang.org/x/oauth2 v0.29.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.14.0 h1:woo0S4Yywslg6hp4eUFjTVOyKt0RookbpAHG4c1HmhQ=
golang.org/x/sync v0.14.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610=
golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@ -169,8 +169,8 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20=
golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
@ -179,16 +179,16 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4=
golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA=
golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0=
golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU=
golang.org/x/time v0.6.0 h1:eTDhh4ZXt5Qf0augr54TN6suAUudPcawVZeIAPU7D4U=
golang.org/x/time v0.6.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.33.0 h1:4qz2S3zmRxbGIhDIAgjxvFutSvH5EfnsYrRBj0UI0bc=
golang.org/x/tools v0.33.0/go.mod h1:CIJMaWEY88juyUfo7UbgPqbC8rU2OqfAV1h2Qp0oMYI=
golang.org/x/tools v0.32.0 h1:Q7N1vhpkQv7ybVzLFtTjvQya2ewbwNDZzUgfXGqtMWU=
golang.org/x/tools v0.32.0/go.mod h1:ZxrU41P/wAbZD8EDa6dDCa6XfpkhJ7HFMjHJXfBDu8s=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
@ -206,18 +206,18 @@ gorm.io/driver/sqlite v1.4.3/go.mod h1:0Aq3iPO+v9ZKbcdiz8gLWRw5VOPcBOPUQJFLq5e2e
gorm.io/driver/sqlserver v1.5.4 h1:xA+Y1KDNspv79q43bPyjDMUgHoYHLhXYmdFcYPobg8g=
gorm.io/driver/sqlserver v1.5.4/go.mod h1:+frZ/qYmuna11zHPlh5oc2O6ZA/lS88Keb0XSH1Zh/g=
gorm.io/gorm v1.25.7/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=
gorm.io/gorm v1.26.1 h1:ghB2gUI9FkS46luZtn6DLZ0f6ooBJ5IbVej2ENFDjRw=
gorm.io/gorm v1.26.1/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE=
modernc.org/cc/v4 v4.26.1 h1:+X5NtzVBn0KgsBCBe+xkDC7twLb/jNVj9FPgiwSQO3s=
modernc.org/cc/v4 v4.26.1/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0=
modernc.org/ccgo/v4 v4.27.1 h1:emhLB4uoOmkZUnTDFcMI3AbkmU/Evjuerit9Taqe6Ss=
modernc.org/ccgo/v4 v4.27.1/go.mod h1:543Q0qQhJWekKVS5P6yL5fO6liNhla9Lbm2/B3rEKDE=
gorm.io/gorm v1.26.0 h1:9lqQVPG5aNNS6AyHdRiwScAVnXHg/L/Srzx55G5fOgs=
gorm.io/gorm v1.26.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE=
modernc.org/cc/v4 v4.26.0 h1:QMYvbVduUGH0rrO+5mqF/PSPPRZNpRtg2CLELy7vUpA=
modernc.org/cc/v4 v4.26.0/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0=
modernc.org/ccgo/v4 v4.26.0 h1:gVzXaDzGeBYJ2uXTOpR8FR7OlksDOe9jxnjhIKCsiTc=
modernc.org/ccgo/v4 v4.26.0/go.mod h1:Sem8f7TFUtVXkG2fiaChQtyyfkqhJBg/zjEJBkmuAVY=
modernc.org/fileutil v1.3.1 h1:8vq5fe7jdtEvoCf3Zf9Nm0Q05sH6kGx0Op2CPx1wTC8=
modernc.org/fileutil v1.3.1/go.mod h1:HxmghZSZVAz/LXcMNwZPA/DRrQZEVP9VX0V4LQGQFOc=
modernc.org/gc/v2 v2.6.5 h1:nyqdV8q46KvTpZlsw66kWqwXRHdjIlJOhG6kxiV/9xI=
modernc.org/gc/v2 v2.6.5/go.mod h1:YgIahr1ypgfe7chRuJi2gD7DBQiKSLMPgBQe9oIiito=
modernc.org/libc v1.65.2 h1:drWL1QO9fKXr3kXDN8y+4lKyBr8bA3mtUBQpftq3IJw=
modernc.org/libc v1.65.2/go.mod h1:VI3V2S5mNka4deJErQ0jsMXe7jgxojE2fOB/mWoHlbc=
modernc.org/libc v1.64.0 h1:U0k8BD2d3cD3e9I8RLcZgJBHAcsJzbXx5mKGSb5pyJA=
modernc.org/libc v1.64.0/go.mod h1:7m9VzGq7APssBTydds2zBcxGREwvIGpuUBaKTXdm2Qs=
modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=
modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg=
modernc.org/memory v1.10.0 h1:fzumd51yQ1DxcOxSO+S6X7+QTuVU+n8/Aj7swYjFfC4=

View file

@ -1,5 +1,5 @@
/*
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
This file is part of Scotty.
@ -18,6 +18,7 @@ Scotty. If not, see <https://www.gnu.org/licenses/>.
package backends_test
import (
"reflect"
"testing"
"github.com/spf13/viper"
@ -32,7 +33,6 @@ import (
"go.uploadedlobster.com/scotty/internal/backends/maloja"
"go.uploadedlobster.com/scotty/internal/backends/scrobblerlog"
"go.uploadedlobster.com/scotty/internal/backends/spotify"
"go.uploadedlobster.com/scotty/internal/backends/spotifyhistory"
"go.uploadedlobster.com/scotty/internal/backends/subsonic"
"go.uploadedlobster.com/scotty/internal/config"
"go.uploadedlobster.com/scotty/internal/i18n"
@ -93,9 +93,9 @@ func TestImplementsInterfaces(t *testing.T) {
expectInterface[models.LovesExport](t, &funkwhale.FunkwhaleApiBackend{})
// expectInterface[models.LovesImport](t, &funkwhale.FunkwhaleApiBackend{})
expectInterface[models.ListensExport](t, &jspf.JSPFBackend{})
// expectInterface[models.ListensExport](t, &jspf.JSPFBackend{})
expectInterface[models.ListensImport](t, &jspf.JSPFBackend{})
expectInterface[models.LovesExport](t, &jspf.JSPFBackend{})
// expectInterface[models.LovesExport](t, &jspf.JSPFBackend{})
expectInterface[models.LovesImport](t, &jspf.JSPFBackend{})
// expectInterface[models.ListensExport](t, &lastfm.LastfmApiBackend{})
@ -115,8 +115,6 @@ func TestImplementsInterfaces(t *testing.T) {
expectInterface[models.LovesExport](t, &spotify.SpotifyApiBackend{})
// expectInterface[models.LovesImport](t, &spotify.SpotifyApiBackend{})
expectInterface[models.ListensExport](t, &spotifyhistory.SpotifyHistoryBackend{})
expectInterface[models.ListensExport](t, &scrobblerlog.ScrobblerLogBackend{})
expectInterface[models.ListensImport](t, &scrobblerlog.ScrobblerLogBackend{})
@ -127,6 +125,6 @@ func TestImplementsInterfaces(t *testing.T) {
func expectInterface[T interface{}](t *testing.T, backend models.Backend) {
ok, name := backends.ImplementsInterface[T](&backend)
if !ok {
t.Errorf("%v expected to implement %v", backend.Name(), name)
t.Errorf("%v expected to implement %v", reflect.TypeOf(backend).Name(), name)
}
}

View file

@ -1,5 +1,5 @@
/*
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
Scotty is free software: you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
@ -77,7 +77,7 @@ func (b *DeezerApiBackend) OAuth2Setup(token oauth2.TokenSource) error {
return nil
}
func (b *DeezerApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
func (b *DeezerApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
// Choose a high offset, we attempt to search the loves backwards starting
// at the oldest one.
offset := math.MaxInt32
@ -88,18 +88,15 @@ func (b *DeezerApiBackend) ExportListens(oldestTimestamp time.Time, results chan
totalDuration := startTime.Sub(oldestTimestamp)
p := models.TransferProgress{
Export: &models.Progress{
Total: int64(totalDuration.Seconds()),
},
}
defer close(results)
p := models.Progress{Total: int64(totalDuration.Seconds())}
out:
for {
result, err := b.client.UserHistory(offset, perPage)
if err != nil {
p.Export.Abort()
progress <- p
progress <- p.Complete()
results <- models.ListensResult{Error: err}
return
}
@ -107,6 +104,7 @@ out:
// The offset was higher then the actual number of tracks. Adjust the offset
// and continue.
if offset >= result.Total {
p.Total = int64(result.Total)
offset = max(result.Total-perPage, 0)
continue
}
@ -132,8 +130,7 @@ out:
}
remainingTime := startTime.Sub(minTime)
p.Export.TotalItems += len(listens)
p.Export.Elapsed = int64(totalDuration.Seconds() - remainingTime.Seconds())
p.Elapsed = int64(totalDuration.Seconds() - remainingTime.Seconds())
progress <- p
results <- models.ListensResult{Items: listens, OldestTimestamp: minTime}
@ -149,29 +146,25 @@ out:
}
results <- models.ListensResult{OldestTimestamp: minTime}
p.Export.Complete()
progress <- p
progress <- p.Complete()
}
func (b *DeezerApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
func (b *DeezerApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
// Choose a high offset, we attempt to search the loves backwards starting
// at the oldest one.
offset := math.MaxInt32
perPage := MaxItemsPerGet
p := models.TransferProgress{
Export: &models.Progress{
Total: int64(perPage),
},
}
defer close(results)
p := models.Progress{Total: int64(perPage)}
var totalCount int
out:
for {
result, err := b.client.UserTracks(offset, perPage)
if err != nil {
p.Export.Abort()
progress <- p
progress <- p.Complete()
results <- models.LovesResult{Error: err}
return
}
@ -179,8 +172,8 @@ out:
// The offset was higher then the actual number of tracks. Adjust the offset
// and continue.
if offset >= result.Total {
p.Total = int64(result.Total)
totalCount = result.Total
p.Export.Total = int64(totalCount)
offset = max(result.Total-perPage, 0)
continue
}
@ -197,14 +190,13 @@ out:
loves = append(loves, love)
} else {
totalCount -= 1
break
}
}
sort.Sort(loves)
results <- models.LovesResult{Items: loves, Total: totalCount}
p.Export.TotalItems = totalCount
p.Export.Total = int64(totalCount)
p.Export.Elapsed += int64(count)
p.Elapsed += int64(count)
progress <- p
if offset <= 0 {
@ -218,8 +210,7 @@ out:
}
}
p.Export.Complete()
progress <- p
progress <- p.Complete()
}
func (t Listen) AsListen() models.Listen {

View file

@ -1,5 +1,5 @@
/*
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
This file is part of Scotty.
@ -36,27 +36,27 @@ func (b *DumpBackend) InitConfig(config *config.ServiceConfig) error {
func (b *DumpBackend) StartImport() error { return nil }
func (b *DumpBackend) FinishImport() error { return nil }
func (b *DumpBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
func (b *DumpBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
for _, listen := range export.Items {
importResult.UpdateTimestamp(listen.ListenedAt)
importResult.ImportCount += 1
msg := fmt.Sprintf("🎶 %v: \"%v\" by %v (%v)",
listen.ListenedAt, listen.TrackName, listen.ArtistName(), listen.RecordingMBID)
importResult.Log(models.Info, msg)
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
progress <- models.Progress{}.FromImportResult(importResult)
}
return importResult, nil
}
func (b *DumpBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
func (b *DumpBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
for _, love := range export.Items {
importResult.UpdateTimestamp(love.Created)
importResult.ImportCount += 1
msg := fmt.Sprintf("❤️ %v: \"%v\" by %v (%v)",
love.Created, love.TrackName, love.ArtistName(), love.RecordingMBID)
importResult.Log(models.Info, msg)
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
progress <- models.Progress{}.FromImportResult(importResult)
}
return importResult, nil

View file

@ -1,5 +1,5 @@
/*
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
Scotty is free software: you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
@ -16,7 +16,6 @@ Scotty. If not, see <https://www.gnu.org/licenses/>.
package backends
import (
"sync"
"time"
"go.uploadedlobster.com/scotty/internal/models"
@ -24,7 +23,7 @@ import (
type ExportProcessor[T models.ListensResult | models.LovesResult] interface {
ExportBackend() models.Backend
Process(wg *sync.WaitGroup, oldestTimestamp time.Time, results chan T, progress chan models.TransferProgress)
Process(oldestTimestamp time.Time, results chan T, progress chan models.Progress)
}
type ListensExportProcessor struct {
@ -35,11 +34,9 @@ func (p ListensExportProcessor) ExportBackend() models.Backend {
return p.Backend
}
func (p ListensExportProcessor) Process(wg *sync.WaitGroup, oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
wg.Add(1)
defer wg.Done()
defer close(results)
func (p ListensExportProcessor) Process(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
p.Backend.ExportListens(oldestTimestamp, results, progress)
close(progress)
}
type LovesExportProcessor struct {
@ -50,9 +47,7 @@ func (p LovesExportProcessor) ExportBackend() models.Backend {
return p.Backend
}
func (p LovesExportProcessor) Process(wg *sync.WaitGroup, oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
wg.Add(1)
defer wg.Done()
defer close(results)
func (p LovesExportProcessor) Process(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
p.Backend.ExportLoves(oldestTimestamp, results, progress)
close(progress)
}

View file

@ -1,5 +1,5 @@
/*
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
This file is part of Scotty.
@ -60,26 +60,21 @@ func (b *FunkwhaleApiBackend) InitConfig(config *config.ServiceConfig) error {
return nil
}
func (b *FunkwhaleApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
func (b *FunkwhaleApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
page := 1
perPage := MaxItemsPerGet
defer close(results)
// We need to gather the full list of listens in order to sort them
listens := make(models.ListensList, 0, 2*perPage)
p := models.TransferProgress{
Export: &models.Progress{
Total: int64(perPage),
},
}
p := models.Progress{Total: int64(perPage)}
out:
for {
result, err := b.client.GetHistoryListenings(b.username, page, perPage)
if err != nil {
p.Export.Abort()
progress <- p
results <- models.ListensResult{Error: err}
return
}
count := len(result.Results)
@ -90,7 +85,7 @@ out:
for _, fwListen := range result.Results {
listen := fwListen.AsListen()
if listen.ListenedAt.After(oldestTimestamp) {
p.Export.Elapsed += 1
p.Elapsed += 1
listens = append(listens, listen)
} else {
break out
@ -99,42 +94,36 @@ out:
if result.Next == "" {
// No further results
p.Export.Total = p.Export.Elapsed
p.Export.Total -= int64(perPage - count)
p.Total = p.Elapsed
p.Total -= int64(perPage - count)
break out
}
p.Export.TotalItems = len(listens)
p.Export.Total += int64(perPage)
p.Total += int64(perPage)
progress <- p
page += 1
}
sort.Sort(listens)
p.Export.TotalItems = len(listens)
p.Export.Complete()
progress <- p
progress <- p.Complete()
results <- models.ListensResult{Items: listens}
}
func (b *FunkwhaleApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
func (b *FunkwhaleApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
page := 1
perPage := MaxItemsPerGet
defer close(results)
// We need to gather the full list of listens in order to sort them
loves := make(models.LovesList, 0, 2*perPage)
p := models.TransferProgress{
Export: &models.Progress{
Total: int64(perPage),
},
}
p := models.Progress{Total: int64(perPage)}
out:
for {
result, err := b.client.GetFavoriteTracks(page, perPage)
if err != nil {
p.Export.Abort()
progress <- p
progress <- p.Complete()
results <- models.LovesResult{Error: err}
return
}
@ -147,7 +136,7 @@ out:
for _, favorite := range result.Results {
love := favorite.AsLove()
if love.Created.After(oldestTimestamp) {
p.Export.Elapsed += 1
p.Elapsed += 1
loves = append(loves, love)
} else {
break out
@ -159,16 +148,13 @@ out:
break out
}
p.Export.TotalItems = len(loves)
p.Export.Total += int64(perPage)
p.Total += int64(perPage)
progress <- p
page += 1
}
sort.Sort(loves)
p.Export.TotalItems = len(loves)
p.Export.Complete()
progress <- p
progress <- p.Complete()
results <- models.LovesResult{Items: loves}
}

View file

@ -1,5 +1,5 @@
/*
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
This file is part of Scotty.
@ -18,15 +18,13 @@ Scotty. If not, see <https://www.gnu.org/licenses/>.
package backends
import (
"sync"
"go.uploadedlobster.com/scotty/internal/models"
)
type ImportProcessor[T models.ListensResult | models.LovesResult] interface {
ImportBackend() models.ImportBackend
Process(wg *sync.WaitGroup, results chan T, out chan models.ImportResult, progress chan models.TransferProgress)
Import(export T, result models.ImportResult, out chan models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error)
Process(results chan T, out chan models.ImportResult, progress chan models.Progress)
Import(export T, result models.ImportResult, out chan models.ImportResult, progress chan models.Progress) (models.ImportResult, error)
}
type ListensImportProcessor struct {
@ -37,13 +35,13 @@ func (p ListensImportProcessor) ImportBackend() models.ImportBackend {
return p.Backend
}
func (p ListensImportProcessor) Process(wg *sync.WaitGroup, results chan models.ListensResult, out chan models.ImportResult, progress chan models.TransferProgress) {
process(wg, p, results, out, progress)
func (p ListensImportProcessor) Process(results chan models.ListensResult, out chan models.ImportResult, progress chan models.Progress) {
process(p, results, out, progress)
}
func (p ListensImportProcessor) Import(export models.ListensResult, result models.ImportResult, out chan models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
func (p ListensImportProcessor) Import(export models.ListensResult, result models.ImportResult, out chan models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
if export.Error != nil {
return result, export.Error
return handleError(result, export.Error, progress), export.Error
}
if export.Total > 0 {
@ -53,7 +51,7 @@ func (p ListensImportProcessor) Import(export models.ListensResult, result model
}
importResult, err := p.Backend.ImportListens(export, result, progress)
if err != nil {
return importResult, err
return handleError(result, err, progress), err
}
return importResult, nil
}
@ -66,13 +64,13 @@ func (p LovesImportProcessor) ImportBackend() models.ImportBackend {
return p.Backend
}
func (p LovesImportProcessor) Process(wg *sync.WaitGroup, results chan models.LovesResult, out chan models.ImportResult, progress chan models.TransferProgress) {
process(wg, p, results, out, progress)
func (p LovesImportProcessor) Process(results chan models.LovesResult, out chan models.ImportResult, progress chan models.Progress) {
process(p, results, out, progress)
}
func (p LovesImportProcessor) Import(export models.LovesResult, result models.ImportResult, out chan models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
func (p LovesImportProcessor) Import(export models.LovesResult, result models.ImportResult, out chan models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
if export.Error != nil {
return result, export.Error
return handleError(result, export.Error, progress), export.Error
}
if export.Total > 0 {
@ -82,47 +80,44 @@ func (p LovesImportProcessor) Import(export models.LovesResult, result models.Im
}
importResult, err := p.Backend.ImportLoves(export, result, progress)
if err != nil {
return importResult, err
return handleError(importResult, err, progress), err
}
return importResult, nil
}
func process[R models.LovesResult | models.ListensResult, P ImportProcessor[R]](wg *sync.WaitGroup, processor P, results chan R, out chan models.ImportResult, progress chan models.TransferProgress) {
wg.Add(1)
defer wg.Done()
func process[R models.LovesResult | models.ListensResult, P ImportProcessor[R]](processor P, results chan R, out chan models.ImportResult, progress chan models.Progress) {
defer close(out)
defer close(progress)
result := models.ImportResult{}
p := models.TransferProgress{}
if err := processor.ImportBackend().StartImport(); err != nil {
err := processor.ImportBackend().StartImport()
if err != nil {
out <- handleError(result, err, progress)
return
}
for exportResult := range results {
importResult, err := processor.Import(exportResult, result, out, progress)
result.Update(importResult)
if err != nil {
processor.ImportBackend().FinishImport()
out <- handleError(result, err, progress)
return
}
progress <- p.FromImportResult(result, false)
result.Update(importResult)
progress <- models.Progress{}.FromImportResult(result)
}
if err := processor.ImportBackend().FinishImport(); err != nil {
err = processor.ImportBackend().FinishImport()
if err != nil {
out <- handleError(result, err, progress)
return
}
progress <- p.FromImportResult(result, true)
progress <- models.Progress{}.FromImportResult(result).Complete()
out <- result
}
func handleError(result models.ImportResult, err error, progress chan models.TransferProgress) models.ImportResult {
func handleError(result models.ImportResult, err error, progress chan models.Progress) models.ImportResult {
result.Error = err
p := models.TransferProgress{}.FromImportResult(result, false)
p.Import.Abort()
progress <- p
progress <- models.Progress{}.FromImportResult(result).Complete()
return result
}

View file

@ -1,5 +1,5 @@
/*
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
Copyright © 2023-2024 Philipp Wolfer <phw@uploadedlobster.com>
This file is part of Scotty.
@ -18,25 +18,15 @@ Scotty. If not, see <https://www.gnu.org/licenses/>.
package jspf
import (
"errors"
"os"
"sort"
"strings"
"time"
"go.uploadedlobster.com/mbtypes"
"go.uploadedlobster.com/scotty/internal/config"
"go.uploadedlobster.com/scotty/internal/i18n"
"go.uploadedlobster.com/scotty/internal/models"
"go.uploadedlobster.com/scotty/pkg/jspf"
)
const (
artistMBIDPrefix = "https://musicbrainz.org/artist/"
recordingMBIDPrefix = "https://musicbrainz.org/recording/"
releaseMBIDPrefix = "https://musicbrainz.org/release/"
)
type JSPFBackend struct {
filePath string
playlist jspf.Playlist
@ -77,11 +67,14 @@ func (b *JSPFBackend) InitConfig(config *config.ServiceConfig) error {
Title: config.GetString("title"),
Creator: config.GetString("username"),
Identifier: config.GetString("identifier"),
Date: time.Now(),
Tracks: make([]jspf.Track, 0),
Extension: map[string]any{
jspf.MusicBrainzPlaylistExtensionID: jspf.MusicBrainzPlaylistExtension{
LastModifiedAt: time.Now(),
Public: true,
},
},
}
b.addMusicBrainzPlaylistExtension()
return nil
}
@ -93,34 +86,7 @@ func (b *JSPFBackend) FinishImport() error {
return b.writeJSPF()
}
func (b *JSPFBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
err := b.readJSPF()
p := models.TransferProgress{
Export: &models.Progress{},
}
if err != nil {
p.Export.Abort()
progress <- p
results <- models.ListensResult{Error: err}
return
}
listens := make(models.ListensList, 0, len(b.playlist.Tracks))
for _, track := range b.playlist.Tracks {
listen, err := trackAsListen(track)
if err == nil && listen != nil && listen.ListenedAt.After(oldestTimestamp) {
listens = append(listens, *listen)
}
}
sort.Sort(listens)
p.Export.Total = int64(len(listens))
p.Export.Complete()
progress <- p
results <- models.ListensResult{Items: listens}
}
func (b *JSPFBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
func (b *JSPFBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
for _, listen := range export.Items {
track := listenAsTrack(listen)
b.playlist.Tracks = append(b.playlist.Tracks, track)
@ -128,38 +94,11 @@ func (b *JSPFBackend) ImportListens(export models.ListensResult, importResult mo
importResult.UpdateTimestamp(listen.ListenedAt)
}
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
progress <- models.Progress{}.FromImportResult(importResult)
return importResult, nil
}
func (b *JSPFBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
err := b.readJSPF()
p := models.TransferProgress{
Export: &models.Progress{},
}
if err != nil {
p.Export.Abort()
progress <- p
results <- models.LovesResult{Error: err}
return
}
loves := make(models.LovesList, 0, len(b.playlist.Tracks))
for _, track := range b.playlist.Tracks {
love, err := trackAsLove(track)
if err == nil && love != nil && love.Created.After(oldestTimestamp) {
loves = append(loves, *love)
}
}
sort.Sort(loves)
p.Export.Total = int64(len(loves))
p.Export.Complete()
progress <- p
results <- models.LovesResult{Items: loves}
}
func (b *JSPFBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
func (b *JSPFBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
for _, love := range export.Items {
track := loveAsTrack(love)
b.playlist.Tracks = append(b.playlist.Tracks, track)
@ -167,42 +106,28 @@ func (b *JSPFBackend) ImportLoves(export models.LovesResult, importResult models
importResult.UpdateTimestamp(love.Created)
}
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
progress <- models.Progress{}.FromImportResult(importResult)
return importResult, nil
}
func listenAsTrack(l models.Listen) jspf.Track {
l.FillAdditionalInfo()
track := trackAsJSPFTrack(l.Track)
track := trackAsTrack(l.Track)
extension := makeMusicBrainzExtension(l.Track)
extension.AddedAt = l.ListenedAt
extension.AddedBy = l.UserName
track.Extension[jspf.MusicBrainzTrackExtensionID] = extension
if l.RecordingMBID != "" {
track.Identifier = append(track.Identifier, recordingMBIDPrefix+string(l.RecordingMBID))
track.Identifier = append(track.Identifier, "https://musicbrainz.org/recording/"+string(l.RecordingMBID))
}
return track
}
func trackAsListen(t jspf.Track) (*models.Listen, error) {
track, ext, err := jspfTrackAsTrack(t)
if err != nil {
return nil, err
}
listen := models.Listen{
ListenedAt: ext.AddedAt,
UserName: ext.AddedBy,
Track: *track,
}
return &listen, err
}
func loveAsTrack(l models.Love) jspf.Track {
l.FillAdditionalInfo()
track := trackAsJSPFTrack(l.Track)
track := trackAsTrack(l.Track)
extension := makeMusicBrainzExtension(l.Track)
extension.AddedAt = l.Created
extension.AddedBy = l.UserName
@ -213,69 +138,24 @@ func loveAsTrack(l models.Love) jspf.Track {
recordingMBID = l.RecordingMBID
}
if recordingMBID != "" {
track.Identifier = append(track.Identifier, recordingMBIDPrefix+string(recordingMBID))
track.Identifier = append(track.Identifier, "https://musicbrainz.org/recording/"+string(recordingMBID))
}
return track
}
func trackAsLove(t jspf.Track) (*models.Love, error) {
track, ext, err := jspfTrackAsTrack(t)
if err != nil {
return nil, err
}
love := models.Love{
Created: ext.AddedAt,
UserName: ext.AddedBy,
RecordingMBID: track.RecordingMBID,
Track: *track,
}
recordingMSID, ok := track.AdditionalInfo["recording_msid"].(string)
if ok {
love.RecordingMSID = mbtypes.MBID(recordingMSID)
}
return &love, err
}
func trackAsJSPFTrack(t models.Track) jspf.Track {
func trackAsTrack(t models.Track) jspf.Track {
track := jspf.Track{
Title: t.TrackName,
Album: t.ReleaseName,
Creator: t.ArtistName(),
TrackNum: t.TrackNumber,
Duration: t.Duration.Milliseconds(),
Extension: jspf.ExtensionMap{},
Extension: map[string]any{},
}
return track
}
func jspfTrackAsTrack(t jspf.Track) (*models.Track, *jspf.MusicBrainzTrackExtension, error) {
track := models.Track{
ArtistNames: []string{t.Creator},
ReleaseName: t.Album,
TrackName: t.Title,
TrackNumber: t.TrackNum,
Duration: time.Duration(t.Duration) * time.Millisecond,
}
for _, id := range t.Identifier {
if strings.HasPrefix(id, recordingMBIDPrefix) {
track.RecordingMBID = mbtypes.MBID(id[len(recordingMBIDPrefix):])
}
}
ext, err := readMusicBrainzExtension(t, &track)
if err != nil {
return nil, nil, err
}
return &track, ext, nil
}
func makeMusicBrainzExtension(t models.Track) jspf.MusicBrainzTrackExtension {
extension := jspf.MusicBrainzTrackExtension{
AdditionalMetadata: t.AdditionalInfo,
@ -283,11 +163,11 @@ func makeMusicBrainzExtension(t models.Track) jspf.MusicBrainzTrackExtension {
}
for i, mbid := range t.ArtistMBIDs {
extension.ArtistIdentifiers[i] = artistMBIDPrefix + string(mbid)
extension.ArtistIdentifiers[i] = "https://musicbrainz.org/artist/" + string(mbid)
}
if t.ReleaseMBID != "" {
extension.ReleaseIdentifier = releaseMBIDPrefix + string(t.ReleaseMBID)
extension.ReleaseIdentifier = "https://musicbrainz.org/release/" + string(t.ReleaseMBID)
}
// The tracknumber tag would be redundant
@ -296,25 +176,6 @@ func makeMusicBrainzExtension(t models.Track) jspf.MusicBrainzTrackExtension {
return extension
}
func readMusicBrainzExtension(jspfTrack jspf.Track, outputTrack *models.Track) (*jspf.MusicBrainzTrackExtension, error) {
ext := jspf.MusicBrainzTrackExtension{}
err := jspfTrack.Extension.Get(jspf.MusicBrainzTrackExtensionID, &ext)
if err != nil {
return nil, errors.New("missing MusicBrainz track extension")
}
outputTrack.AdditionalInfo = ext.AdditionalMetadata
outputTrack.ReleaseMBID = mbtypes.MBID(ext.ReleaseIdentifier)
outputTrack.ArtistMBIDs = make([]mbtypes.MBID, len(ext.ArtistIdentifiers))
for i, mbid := range ext.ArtistIdentifiers {
if strings.HasPrefix(mbid, artistMBIDPrefix) {
outputTrack.ArtistMBIDs[i] = mbtypes.MBID(mbid[len(artistMBIDPrefix):])
}
}
return &ext, nil
}
func (b *JSPFBackend) readJSPF() error {
if b.append {
file, err := os.Open(b.filePath)
@ -338,7 +199,6 @@ func (b *JSPFBackend) readJSPF() error {
return err
}
b.playlist = playlist.Playlist
b.addMusicBrainzPlaylistExtension()
}
}
@ -358,13 +218,3 @@ func (b *JSPFBackend) writeJSPF() error {
defer file.Close()
return playlist.Write(file)
}
func (b *JSPFBackend) addMusicBrainzPlaylistExtension() {
if b.playlist.Extension == nil {
b.playlist.Extension = make(jspf.ExtensionMap, 1)
}
extension := jspf.MusicBrainzPlaylistExtension{Public: true}
b.playlist.Extension.Get(jspf.MusicBrainzPlaylistExtensionID, &extension)
extension.LastModifiedAt = time.Now()
b.playlist.Extension[jspf.MusicBrainzPlaylistExtensionID] = extension
}

View file

@ -1,5 +1,5 @@
/*
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
Scotty is free software: you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
@ -88,17 +88,15 @@ func (b *LastfmApiBackend) OAuth2Setup(token oauth2.TokenSource) error {
return nil
}
func (b *LastfmApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
func (b *LastfmApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
page := MaxPage
minTime := oldestTimestamp
perPage := MaxItemsPerGet
defer close(results)
// We need to gather the full list of listens in order to sort them
p := models.TransferProgress{
Export: &models.Progress{
Total: int64(page),
},
}
p := models.Progress{Total: int64(page)}
out:
for page > 0 {
@ -112,8 +110,7 @@ out:
result, err := b.client.User.GetRecentTracks(args)
if err != nil {
results <- models.ListensResult{Error: err}
p.Export.Abort()
progress <- p
progress <- p.Complete()
return
}
@ -132,12 +129,11 @@ out:
timestamp, err := strconv.ParseInt(scrobble.Date.Uts, 10, 64)
if err != nil {
results <- models.ListensResult{Error: err}
p.Export.Abort()
progress <- p
progress <- p.Complete()
break out
}
if timestamp > oldestTimestamp.Unix() {
p.Export.Elapsed += 1
p.Elapsed += 1
listen := models.Listen{
ListenedAt: time.Unix(timestamp, 0),
UserName: b.username,
@ -171,18 +167,16 @@ out:
Total: result.Total,
OldestTimestamp: minTime,
}
p.Export.Total = int64(result.TotalPages)
p.Export.Elapsed = int64(result.TotalPages - page)
p.Export.TotalItems += len(listens)
p.Total = int64(result.TotalPages)
p.Elapsed = int64(result.TotalPages - page)
progress <- p
}
results <- models.ListensResult{OldestTimestamp: minTime}
p.Export.Complete()
progress <- p
progress <- p.Complete()
}
func (b *LastfmApiBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
func (b *LastfmApiBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
total := len(export.Items)
for i := 0; i < total; i += MaxListensPerSubmission {
listens := export.Items[i:min(i+MaxListensPerSubmission, total)]
@ -252,24 +246,22 @@ func (b *LastfmApiBackend) ImportListens(export models.ListensResult, importResu
importResult.UpdateTimestamp(listens[count-1].ListenedAt)
importResult.ImportCount += accepted
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
progress <- models.Progress{}.FromImportResult(importResult)
}
return importResult, nil
}
func (b *LastfmApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
func (b *LastfmApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
// Choose a high offset, we attempt to search the loves backwards starting
// at the oldest one.
page := 1
perPage := MaxItemsPerGet
defer close(results)
loves := make(models.LovesList, 0, 2*MaxItemsPerGet)
p := models.TransferProgress{
Export: &models.Progress{
Total: int64(perPage),
},
}
p := models.Progress{Total: int64(perPage)}
var totalCount int
out:
@ -280,12 +272,12 @@ out:
"page": page,
})
if err != nil {
p.Export.Abort()
progress <- p
progress <- p.Complete()
results <- models.LovesResult{Error: err}
return
}
p.Total = int64(result.Total)
count := len(result.Tracks)
if count == 0 {
break out
@ -294,8 +286,7 @@ out:
for _, track := range result.Tracks {
timestamp, err := strconv.ParseInt(track.Date.Uts, 10, 64)
if err != nil {
p.Export.Abort()
progress <- p
progress <- p.Complete()
results <- models.LovesResult{Error: err}
return
}
@ -321,21 +312,18 @@ out:
}
}
p.Export.Total += int64(perPage)
p.Export.TotalItems = totalCount
p.Export.Elapsed += int64(count)
p.Elapsed += int64(count)
progress <- p
page += 1
}
sort.Sort(loves)
p.Export.Complete()
progress <- p
results <- models.LovesResult{Items: loves, Total: totalCount}
progress <- p.Complete()
}
func (b *LastfmApiBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
func (b *LastfmApiBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
for _, love := range export.Items {
err := b.client.Track.Love(lastfm.P{
"track": love.TrackName,
@ -351,7 +339,7 @@ func (b *LastfmApiBackend) ImportLoves(export models.LovesResult, importResult m
importResult.Log(models.Error, msg)
}
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
progress <- models.Progress{}.FromImportResult(importResult)
}
return importResult, nil

View file

@ -1,5 +1,5 @@
/*
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
This file is part of Scotty.
@ -72,25 +72,23 @@ func (b *ListenBrainzApiBackend) InitConfig(config *config.ServiceConfig) error
func (b *ListenBrainzApiBackend) StartImport() error { return nil }
func (b *ListenBrainzApiBackend) FinishImport() error { return nil }
func (b *ListenBrainzApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
func (b *ListenBrainzApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
startTime := time.Now()
minTime := oldestTimestamp
if minTime.Unix() < 1 {
minTime = time.Unix(1, 0)
}
totalDuration := startTime.Sub(oldestTimestamp)
p := models.TransferProgress{
Export: &models.Progress{
Total: int64(totalDuration.Seconds()),
},
}
totalDuration := startTime.Sub(minTime)
defer close(results)
p := models.Progress{Total: int64(totalDuration.Seconds())}
for {
result, err := b.client.GetListens(b.username, time.Now(), minTime)
if err != nil {
p.Export.Abort()
progress <- p
progress <- p.Complete()
results <- models.ListensResult{Error: err}
return
}
@ -100,7 +98,7 @@ func (b *ListenBrainzApiBackend) ExportListens(oldestTimestamp time.Time, result
if minTime.Unix() < result.Payload.OldestListenTimestamp {
minTime = time.Unix(result.Payload.OldestListenTimestamp, 0)
totalDuration = startTime.Sub(minTime)
p.Export.Total = int64(totalDuration.Seconds())
p.Total = int64(totalDuration.Seconds())
continue
} else {
break
@ -123,20 +121,18 @@ func (b *ListenBrainzApiBackend) ExportListens(oldestTimestamp time.Time, result
}
sort.Sort(listens)
p.Export.TotalItems += len(listens)
p.Export.Elapsed = int64(totalDuration.Seconds() - remainingTime.Seconds())
p.Elapsed = int64(totalDuration.Seconds() - remainingTime.Seconds())
progress <- p
results <- models.ListensResult{Items: listens, OldestTimestamp: minTime}
}
results <- models.ListensResult{OldestTimestamp: minTime}
p.Export.Complete()
progress <- p
progress <- p.Complete()
}
func (b *ListenBrainzApiBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
func (b *ListenBrainzApiBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
total := len(export.Items)
p := models.TransferProgress{}.FromImportResult(importResult, false)
p := models.Progress{}.FromImportResult(importResult)
for i := 0; i < total; i += MaxListensPerRequest {
listens := export.Items[i:min(i+MaxListensPerRequest, total)]
count := len(listens)
@ -152,7 +148,7 @@ func (b *ListenBrainzApiBackend) ImportListens(export models.ListensResult, impo
for _, l := range listens {
if b.checkDuplicates {
isDupe, err := b.checkDuplicateListen(l)
p.Import.Elapsed += 1
p.Elapsed += 1
progress <- p
if err != nil {
return importResult, err
@ -192,36 +188,31 @@ func (b *ListenBrainzApiBackend) ImportListens(export models.ListensResult, impo
importResult.UpdateTimestamp(listens[count-1].ListenedAt)
}
importResult.ImportCount += count
progress <- p.FromImportResult(importResult, false)
progress <- p.FromImportResult(importResult)
}
return importResult, nil
}
func (b *ListenBrainzApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
func (b *ListenBrainzApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
defer close(results)
exportChan := make(chan models.LovesResult)
p := models.TransferProgress{
Export: &models.Progress{},
}
p := models.Progress{}
go b.exportLoves(oldestTimestamp, exportChan)
for existingLoves := range exportChan {
if existingLoves.Error != nil {
p.Export.Abort()
progress <- p
progress <- p.Complete()
results <- models.LovesResult{Error: existingLoves.Error}
return
}
p.Export.TotalItems = existingLoves.Total
p.Export.Total = int64(existingLoves.Total)
p.Export.Elapsed += int64(len(existingLoves.Items))
p.Total = int64(existingLoves.Total)
p.Elapsed += int64(existingLoves.Items.Len())
progress <- p
results <- existingLoves
}
p.Export.Complete()
progress <- p
progress <- p.Complete()
}
func (b *ListenBrainzApiBackend) exportLoves(oldestTimestamp time.Time, results chan models.LovesResult) {
@ -271,7 +262,7 @@ out:
}
}
func (b *ListenBrainzApiBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
func (b *ListenBrainzApiBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
if len(b.existingMBIDs) == 0 {
existingLovesChan := make(chan models.LovesResult)
go b.exportLoves(time.Unix(0, 0), existingLovesChan)
@ -341,7 +332,7 @@ func (b *ListenBrainzApiBackend) ImportLoves(export models.LovesResult, importRe
importResult.Log(models.Error, msg)
}
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
progress <- models.Progress{}.FromImportResult(importResult)
}
return importResult, nil

View file

@ -1,5 +1,5 @@
/*
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
This file is part of Scotty.
@ -63,24 +63,21 @@ func (b *MalojaApiBackend) InitConfig(config *config.ServiceConfig) error {
func (b *MalojaApiBackend) StartImport() error { return nil }
func (b *MalojaApiBackend) FinishImport() error { return nil }
func (b *MalojaApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
func (b *MalojaApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
page := 0
perPage := MaxItemsPerGet
defer close(results)
// We need to gather the full list of listens in order to sort them
listens := make(models.ListensList, 0, 2*perPage)
p := models.TransferProgress{
Export: &models.Progress{
Total: int64(perPage),
},
}
p := models.Progress{Total: int64(perPage)}
out:
for {
result, err := b.client.GetScrobbles(page, perPage)
if err != nil {
p.Export.Abort()
progress <- p
progress <- p.Complete()
results <- models.ListensResult{Error: err}
return
}
@ -92,27 +89,24 @@ out:
for _, scrobble := range result.List {
if scrobble.ListenedAt > oldestTimestamp.Unix() {
p.Export.Elapsed += 1
p.Elapsed += 1
listens = append(listens, scrobble.AsListen())
} else {
break out
}
}
p.Export.TotalItems = len(listens)
p.Export.Total += int64(perPage)
p.Total += int64(perPage)
progress <- p
page += 1
}
sort.Sort(listens)
p.Export.Complete()
progress <- p
progress <- p.Complete()
results <- models.ListensResult{Items: listens}
}
func (b *MalojaApiBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
p := models.TransferProgress{}.FromImportResult(importResult, false)
func (b *MalojaApiBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
for _, listen := range export.Items {
scrobble := NewScrobble{
Title: listen.TrackName,
@ -133,7 +127,7 @@ func (b *MalojaApiBackend) ImportListens(export models.ListensResult, importResu
importResult.UpdateTimestamp(listen.ListenedAt)
importResult.ImportCount += 1
progress <- p.FromImportResult(importResult, false)
progress <- models.Progress{}.FromImportResult(importResult)
}
return importResult, nil

View file

@ -1,5 +1,5 @@
/*
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
This file is part of Scotty.
@ -67,19 +67,18 @@ func (b *ScrobblerLogBackend) InitConfig(config *config.ServiceConfig) error {
b.filePath = config.GetString("file-path")
b.ignoreSkipped = config.GetBool("ignore-skipped", true)
b.append = config.GetBool("append", true)
b.log = scrobblerlog.ScrobblerLog{
TZ: scrobblerlog.TimezoneUTC,
Client: "Rockbox unknown $Revision$",
}
if timezone := config.GetString("time-zone"); timezone != "" {
timezone := config.GetString("time-zone")
if timezone != "" {
location, err := time.LoadLocation(timezone)
if err != nil {
return fmt.Errorf("Invalid time-zone %q: %w", timezone, err)
}
b.log.FallbackTimezone = location
}
b.log = scrobblerlog.ScrobblerLog{
TZ: scrobblerlog.TimezoneUTC,
Client: "Rockbox unknown $Revision$",
}
return nil
}
@ -131,14 +130,11 @@ func (b *ScrobblerLogBackend) FinishImport() error {
return b.file.Close()
}
func (b *ScrobblerLogBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
func (b *ScrobblerLogBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
defer close(results)
file, err := os.Open(b.filePath)
p := models.TransferProgress{
Export: &models.Progress{},
}
if err != nil {
p.Export.Abort()
progress <- p
progress <- models.Progress{}.Complete()
results <- models.ListensResult{Error: err}
return
}
@ -147,8 +143,7 @@ func (b *ScrobblerLogBackend) ExportListens(oldestTimestamp time.Time, results c
err = b.log.Parse(file, b.ignoreSkipped)
if err != nil {
p.Export.Abort()
progress <- p
progress <- models.Progress{}.Complete()
results <- models.ListensResult{Error: err}
return
}
@ -156,19 +151,14 @@ func (b *ScrobblerLogBackend) ExportListens(oldestTimestamp time.Time, results c
listens := make(models.ListensList, 0, len(b.log.Records))
client := strings.Split(b.log.Client, " ")[0]
for _, record := range b.log.Records {
listen := recordToListen(record, client)
if listen.ListenedAt.After(oldestTimestamp) {
listens = append(listens, recordToListen(record, client))
}
listens = append(listens, recordToListen(record, client))
}
sort.Sort(listens)
p.Export.Total = int64(len(listens))
p.Export.Complete()
progress <- p
sort.Sort(listens.NewerThan(oldestTimestamp))
progress <- models.Progress{Total: int64(len(listens))}.Complete()
results <- models.ListensResult{Items: listens}
}
func (b *ScrobblerLogBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
func (b *ScrobblerLogBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
records := make([]scrobblerlog.Record, len(export.Items))
for i, listen := range export.Items {
records[i] = listenToRecord(listen)
@ -180,7 +170,7 @@ func (b *ScrobblerLogBackend) ImportListens(export models.ListensResult, importR
importResult.UpdateTimestamp(lastTimestamp)
importResult.ImportCount += len(export.Items)
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
progress <- models.Progress{}.FromImportResult(importResult)
return importResult, nil
}

View file

@ -1,5 +1,5 @@
/*
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
This file is part of Scotty.
@ -95,22 +95,20 @@ func (b *SpotifyApiBackend) OAuth2Setup(token oauth2.TokenSource) error {
return nil
}
func (b *SpotifyApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
func (b *SpotifyApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
startTime := time.Now()
minTime := oldestTimestamp
totalDuration := startTime.Sub(oldestTimestamp)
p := models.TransferProgress{
Export: &models.Progress{
Total: int64(totalDuration.Seconds()),
},
}
defer close(results)
p := models.Progress{Total: int64(totalDuration.Seconds())}
for {
result, err := b.client.RecentlyPlayedAfter(minTime, MaxItemsPerGet)
if err != nil {
p.Export.Abort()
progress <- p
progress <- p.Complete()
results <- models.ListensResult{Error: err}
return
}
@ -122,8 +120,7 @@ func (b *SpotifyApiBackend) ExportListens(oldestTimestamp time.Time, results cha
// Set minTime to the newest returned listen
after, err := strconv.ParseInt(result.Cursors.After, 10, 64)
if err != nil {
p.Export.Abort()
progress <- p
progress <- p.Complete()
results <- models.ListensResult{Error: err}
return
} else if after <= minTime.Unix() {
@ -151,28 +148,24 @@ func (b *SpotifyApiBackend) ExportListens(oldestTimestamp time.Time, results cha
}
sort.Sort(listens)
p.Export.TotalItems += len(listens)
p.Export.Elapsed = int64(totalDuration.Seconds() - remainingTime.Seconds())
p.Elapsed = int64(totalDuration.Seconds() - remainingTime.Seconds())
progress <- p
results <- models.ListensResult{Items: listens, OldestTimestamp: minTime}
}
results <- models.ListensResult{OldestTimestamp: minTime}
p.Export.Complete()
progress <- p
progress <- p.Complete()
}
func (b *SpotifyApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
func (b *SpotifyApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
// Choose a high offset, we attempt to search the loves backwards starting
// at the oldest one.
offset := math.MaxInt32
perPage := MaxItemsPerGet
p := models.TransferProgress{
Export: &models.Progress{
Total: int64(perPage),
},
}
defer close(results)
p := models.Progress{Total: int64(perPage)}
totalCount := 0
exportCount := 0
@ -180,8 +173,7 @@ out:
for {
result, err := b.client.UserTracks(offset, perPage)
if err != nil {
p.Export.Abort()
progress <- p
progress <- p.Complete()
results <- models.LovesResult{Error: err}
return
}
@ -189,7 +181,7 @@ out:
// The offset was higher then the actual number of tracks. Adjust the offset
// and continue.
if offset >= result.Total {
p.Export.Total = int64(result.Total)
p.Total = int64(result.Total)
totalCount = result.Total
offset = max(result.Total-perPage, 0)
continue
@ -213,7 +205,7 @@ out:
exportCount += len(loves)
sort.Sort(loves)
results <- models.LovesResult{Items: loves, Total: totalCount}
p.Export.Elapsed += int64(count)
p.Elapsed += int64(count)
progress <- p
if offset <= 0 {
@ -228,8 +220,7 @@ out:
}
results <- models.LovesResult{Total: exportCount}
p.Export.Complete()
progress <- p
progress <- p.Complete()
}
func (l Listen) AsListen() models.Listen {

View file

@ -1,5 +1,5 @@
/*
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
This file is part of Scotty.
@ -72,27 +72,23 @@ func (b *SpotifyHistoryBackend) InitConfig(config *config.ServiceConfig) error {
return nil
}
func (b *SpotifyHistoryBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
files, err := filepath.Glob(path.Join(b.dirPath, historyFileGlob))
p := models.TransferProgress{
Export: &models.Progress{},
}
func (b *SpotifyHistoryBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
defer close(results)
files, err := filepath.Glob(path.Join(b.dirPath, historyFileGlob))
if err != nil {
p.Export.Abort()
progress <- p
progress <- models.Progress{}.Complete()
results <- models.ListensResult{Error: err}
return
}
slices.Sort(files)
fileCount := int64(len(files))
p.Export.Total = fileCount
p := models.Progress{Total: fileCount}
for i, filePath := range files {
history, err := readHistoryFile(filePath)
if err != nil {
p.Export.Abort()
progress <- p
progress <- models.Progress{}.Complete()
results <- models.ListensResult{Error: err}
return
}
@ -103,13 +99,11 @@ func (b *SpotifyHistoryBackend) ExportListens(oldestTimestamp time.Time, results
})
sort.Sort(listens)
results <- models.ListensResult{Items: listens}
p.Export.Elapsed = int64(i)
p.Export.TotalItems += len(listens)
p.Elapsed = int64(i)
progress <- p
}
p.Export.Complete()
progress <- p
progress <- p.Complete()
}
func readHistoryFile(filePath string) (StreamingHistory, error) {

View file

@ -1,5 +1,5 @@
/*
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
This file is part of Scotty.
@ -63,30 +63,26 @@ func (b *SubsonicApiBackend) InitConfig(config *config.ServiceConfig) error {
return nil
}
func (b *SubsonicApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
func (b *SubsonicApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
defer close(results)
err := b.client.Authenticate(b.password)
p := models.TransferProgress{
Export: &models.Progress{},
}
if err != nil {
p.Export.Abort()
progress <- p
progress <- models.Progress{}.Complete()
results <- models.LovesResult{Error: err}
return
}
starred, err := b.client.GetStarred2(map[string]string{})
if err != nil {
p.Export.Abort()
progress <- p
progress <- models.Progress{}.Complete()
results <- models.LovesResult{Error: err}
return
}
loves := b.filterSongs(starred.Song, oldestTimestamp)
p.Export.Total = int64(len(loves))
p.Export.Complete()
progress <- p
progress <- models.Progress{
Total: int64(loves.Len()),
}.Complete()
results <- models.LovesResult{Items: loves}
}

View file

@ -1,5 +1,5 @@
/*
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
This file is part of Scotty.
@ -28,19 +28,7 @@ import (
"go.uploadedlobster.com/scotty/internal/models"
)
type progressBarUpdater struct {
wg *sync.WaitGroup
progress *mpb.Progress
exportBar *mpb.Bar
importBar *mpb.Bar
updateChan chan models.TransferProgress
lastExportUpdate time.Time
totalItems int
importedItems int
}
func setupProgressBars(updateChan chan models.TransferProgress) progressBarUpdater {
wg := &sync.WaitGroup{}
func progressBar(wg *sync.WaitGroup, exportProgress chan models.Progress, importProgress chan models.Progress) *mpb.Progress {
p := mpb.New(
mpb.WithWaitGroup(wg),
mpb.WithOutput(color.Output),
@ -48,81 +36,16 @@ func setupProgressBars(updateChan chan models.TransferProgress) progressBarUpdat
mpb.WithAutoRefresh(),
)
u := progressBarUpdater{
wg: wg,
progress: p,
exportBar: initExportProgressBar(p, i18n.Tr("exporting")),
importBar: initImportProgressBar(p, i18n.Tr("importing")),
updateChan: updateChan,
}
exportBar := setupProgressBar(p, i18n.Tr("exporting"))
importBar := setupProgressBar(p, i18n.Tr("importing"))
go updateProgressBar(exportBar, wg, exportProgress)
go updateProgressBar(importBar, wg, importProgress)
go u.update()
return u
return p
}
func (u *progressBarUpdater) close() {
close(u.updateChan)
u.progress.Wait()
}
func (u *progressBarUpdater) update() {
u.wg.Add(1)
defer u.wg.Done()
u.lastExportUpdate = time.Now()
for progress := range u.updateChan {
if progress.Export != nil {
u.updateExportProgress(progress.Export)
}
if progress.Import != nil {
if int64(u.totalItems) > progress.Import.Total {
progress.Import.Total = int64(u.totalItems)
}
u.updateImportProgress(progress.Import)
}
}
}
func (u *progressBarUpdater) updateExportProgress(progress *models.Progress) {
bar := u.exportBar
u.totalItems = progress.TotalItems
if progress.Aborted {
bar.Abort(false)
return
}
oldIterTime := u.lastExportUpdate
u.lastExportUpdate = time.Now()
elapsedTime := u.lastExportUpdate.Sub(oldIterTime)
bar.EwmaSetCurrent(progress.Elapsed, elapsedTime)
bar.SetTotal(progress.Total, progress.Completed)
}
func (u *progressBarUpdater) updateImportProgress(progress *models.Progress) {
bar := u.importBar
if progress.Aborted {
bar.Abort(false)
return
}
bar.SetCurrent(progress.Elapsed)
bar.SetTotal(progress.Total, progress.Completed)
}
func initExportProgressBar(p *mpb.Progress, name string) *mpb.Bar {
return initProgressBar(p, name,
decor.EwmaETA(decor.ET_STYLE_GO, 0, decor.WC{C: decor.DSyncWidth}))
}
func initImportProgressBar(p *mpb.Progress, name string) *mpb.Bar {
return initProgressBar(p, name, decor.Counters(0, "%d / %d"))
}
func initProgressBar(p *mpb.Progress, name string, progressDecorator decor.Decorator) *mpb.Bar {
func setupProgressBar(p *mpb.Progress, name string) *mpb.Bar {
green := color.New(color.FgGreen).SprintFunc()
red := color.New(color.FgHiRed, color.Bold).SprintFunc()
return p.New(0,
mpb.BarStyle(),
mpb.PrependDecorators(
@ -135,13 +58,23 @@ func initProgressBar(p *mpb.Progress, name string, progressDecorator decor.Decor
),
mpb.AppendDecorators(
decor.OnComplete(
decor.OnAbort(
progressDecorator,
red(i18n.Tr("aborted")),
),
decor.EwmaETA(decor.ET_STYLE_GO, 0, decor.WC{C: decor.DSyncWidth}),
i18n.Tr("done"),
),
// decor.OnComplete(decor.Percentage(decor.WC{W: 5, C: decor.DSyncWidthR}), "done"),
decor.Name(" "),
),
)
}
func updateProgressBar(bar *mpb.Bar, wg *sync.WaitGroup, progressChan chan models.Progress) {
wg.Add(1)
defer wg.Done()
lastIterTime := time.Now()
for progress := range progressChan {
oldIterTime := lastIterTime
lastIterTime = time.Now()
bar.EwmaSetCurrent(progress.Elapsed, lastIterTime.Sub(oldIterTime))
bar.SetTotal(progress.Total, progress.Completed)
}
}

View file

@ -1,5 +1,5 @@
/*
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
Scotty is free software: you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
@ -110,34 +110,36 @@ func (c *TransferCmd[E, I, R]) Transfer(exp backends.ExportProcessor[R], imp bac
printTimestamp("From timestamp: %v (%v)", timestamp)
// Prepare progress bars
progressChan := make(chan models.TransferProgress)
progress := setupProgressBars(progressChan)
wg := &sync.WaitGroup{}
exportProgress := make(chan models.Progress)
importProgress := make(chan models.Progress)
var wg sync.WaitGroup
progress := progressBar(&wg, exportProgress, importProgress)
// Export from source
exportChan := make(chan R, 1000)
go exp.Process(wg, timestamp, exportChan, progressChan)
go exp.Process(timestamp, exportChan, exportProgress)
// Import into target
resultChan := make(chan models.ImportResult)
go imp.Process(wg, exportChan, resultChan, progressChan)
go imp.Process(exportChan, resultChan, importProgress)
result := <-resultChan
wg.Wait()
progress.close()
// Update timestamp
err = c.updateTimestamp(&result, timestamp)
if err != nil {
return err
if timestamp.After(result.LastTimestamp) {
result.LastTimestamp = timestamp
}
fmt.Println(i18n.Tr("Imported %v of %v %s into %v.",
result.ImportCount, result.TotalCount, c.entity, c.targetName))
wg.Wait()
progress.Wait()
if result.Error != nil {
printTimestamp("Import failed, last reported timestamp was %v (%s)", result.LastTimestamp)
return result.Error
}
fmt.Println(i18n.Tr("Imported %v of %v %s into %v.",
result.ImportCount, result.TotalCount, c.entity, c.targetName))
// Update timestamp
err = c.updateTimestamp(result, timestamp)
if err != nil {
return err
}
// Print errors
if len(result.ImportLog) > 0 {
@ -177,7 +179,7 @@ func (c *TransferCmd[E, I, R]) timestamp() (time.Time, error) {
return time.Time{}, errors.New(i18n.Tr("invalid timestamp string \"%v\"", flagValue))
}
func (c *TransferCmd[E, I, R]) updateTimestamp(result *models.ImportResult, oldTimestamp time.Time) error {
func (c *TransferCmd[E, I, R]) updateTimestamp(result models.ImportResult, oldTimestamp time.Time) error {
if oldTimestamp.After(result.LastTimestamp) {
result.LastTimestamp = oldTimestamp
}

View file

@ -1,5 +1,5 @@
/*
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
This file is part of Scotty.
@ -55,7 +55,7 @@ type ListensExport interface {
// Returns a list of all listens newer then oldestTimestamp.
// The returned list of listens is supposed to be ordered by the
// Listen.ListenedAt timestamp, with the oldest entry first.
ExportListens(oldestTimestamp time.Time, results chan ListensResult, progress chan TransferProgress)
ExportListens(oldestTimestamp time.Time, results chan ListensResult, progress chan Progress)
}
// Must be implemented by services supporting the import of listens.
@ -63,7 +63,7 @@ type ListensImport interface {
ImportBackend
// Imports the given list of listens.
ImportListens(export ListensResult, importResult ImportResult, progress chan TransferProgress) (ImportResult, error)
ImportListens(export ListensResult, importResult ImportResult, progress chan Progress) (ImportResult, error)
}
// Must be implemented by services supporting the export of loves.
@ -73,7 +73,7 @@ type LovesExport interface {
// Returns a list of all loves newer then oldestTimestamp.
// The returned list of listens is supposed to be ordered by the
// Love.Created timestamp, with the oldest entry first.
ExportLoves(oldestTimestamp time.Time, results chan LovesResult, progress chan TransferProgress)
ExportLoves(oldestTimestamp time.Time, results chan LovesResult, progress chan Progress)
}
// Must be implemented by services supporting the import of loves.
@ -81,5 +81,5 @@ type LovesImport interface {
ImportBackend
// Imports the given list of loves.
ImportLoves(export LovesResult, importResult ImportResult, progress chan TransferProgress) (ImportResult, error)
ImportLoves(export LovesResult, importResult ImportResult, progress chan Progress) (ImportResult, error)
}

View file

@ -1,5 +1,5 @@
/*
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
@ -209,25 +209,10 @@ func (i *ImportResult) Log(t LogEntryType, msg string) {
})
}
type TransferProgress struct {
Export *Progress
Import *Progress
}
func (p TransferProgress) FromImportResult(result ImportResult, completed bool) TransferProgress {
importProgress := Progress{
Completed: completed,
}.FromImportResult(result)
p.Import = &importProgress
return p
}
type Progress struct {
TotalItems int
Total int64
Elapsed int64
Completed bool
Aborted bool
Total int64
Elapsed int64
Completed bool
}
func (p Progress) FromImportResult(result ImportResult) Progress {
@ -236,11 +221,8 @@ func (p Progress) FromImportResult(result ImportResult) Progress {
return p
}
func (p *Progress) Complete() {
func (p Progress) Complete() Progress {
p.Elapsed = p.Total
p.Completed = true
}
func (p *Progress) Abort() {
p.Aborted = true
return p
}

View file

@ -42,12 +42,12 @@ var messageKeyToIndex = map[string]int{
"\tbackend: %v": 11,
"\texport: %s": 0,
"\timport: %s\n": 1,
"%v: %v": 49,
"%v: %v": 48,
"Aborted": 8,
"Access token": 19,
"Access token received, you can use %v now.\n": 34,
"Append to file": 21,
"Backend": 43,
"Backend": 42,
"Check for duplicate listens on import (slower)": 24,
"Client ID": 15,
"Client secret": 16,
@ -57,46 +57,45 @@ var messageKeyToIndex = map[string]int{
"Error: OAuth state mismatch": 33,
"Failed reading config: %v": 2,
"File path": 20,
"From timestamp: %v (%v)": 45,
"From timestamp: %v (%v)": 44,
"Ignore listens in incognito mode": 30,
"Ignore skipped listens": 27,
"Ignored duplicate listen %v: \"%v\" by %v (%v)": 25,
"Import failed, last reported timestamp was %v (%s)": 47,
"Import log:": 48,
"Import failed, last reported timestamp was %v (%s)": 45,
"Import log:": 47,
"Imported %v of %v %s into %v.": 46,
"Latest timestamp: %v (%v)": 51,
"Latest timestamp: %v (%v)": 50,
"Minimum playback duration for skipped tracks (seconds)": 31,
"No": 40,
"No": 39,
"Playlist title": 22,
"Saved service %v using backend %v": 5,
"Server URL": 17,
"Service": 42,
"Service": 41,
"Service \"%v\" deleted\n": 9,
"Service name": 3,
"Specify a time zone for the listen timestamps": 28,
"The backend %v requires authentication. Authenticate now?": 6,
"Token received, you can close this window now.": 12,
"Transferring %s from %s to %s…": 44,
"Transferring %s from %s to %s…": 43,
"Unique playlist identifier": 23,
"Updated service %v using backend %v\n": 10,
"User name": 18,
"Visit the URL for authorization: %v": 32,
"Yes": 39,
"Yes": 38,
"a service with this name already exists": 4,
"aborted": 37,
"backend %s does not implement %s": 13,
"done": 38,
"exporting": 35,
"importing": 36,
"invalid timestamp string \"%v\"": 50,
"key must only consist of A-Za-z0-9_-": 53,
"no configuration file defined, cannot write config": 52,
"no existing service configurations": 41,
"no service configuration \"%v\"": 54,
"unknown backend \"%s\"": 14,
"backend %s does not implement %s": 13,
"done": 37,
"exporting": 35,
"importing": 36,
"invalid timestamp string \"%v\"": 49,
"key must only consist of A-Za-z0-9_-": 52,
"no configuration file defined, cannot write config": 51,
"no existing service configurations": 40,
"no service configuration \"%v\"": 53,
"unknown backend \"%s\"": 14,
}
var deIndex = []uint32{ // 56 elements
var deIndex = []uint32{ // 55 elements
// Entry 0 - 1F
0x00000000, 0x00000013, 0x00000027, 0x00000052,
0x0000005e, 0x0000008d, 0x000000bd, 0x00000104,
@ -108,14 +107,14 @@ var deIndex = []uint32{ // 56 elements
0x0000037e, 0x000003a4, 0x000003b4, 0x000003da,
// Entry 20 - 3F
0x00000418, 0x00000443, 0x0000046d, 0x000004ad,
0x000004b8, 0x000004c3, 0x000004cf, 0x000004d6,
0x000004d9, 0x000004de, 0x00000507, 0x0000050f,
0x00000517, 0x00000540, 0x0000055e, 0x00000589,
0x000005c6, 0x000005d1, 0x000005de, 0x00000602,
0x00000625, 0x00000676, 0x000006ad, 0x000006d4,
} // Size: 248 bytes
0x000004b8, 0x000004c3, 0x000004ca, 0x000004cd,
0x000004d2, 0x000004fb, 0x00000503, 0x0000050b,
0x00000534, 0x00000552, 0x0000058f, 0x000005ba,
0x000005c5, 0x000005d2, 0x000005f6, 0x00000619,
0x0000066a, 0x000006a1, 0x000006c8,
} // Size: 244 bytes
const deData string = "" + // Size: 1748 bytes
const deData string = "" + // Size: 1736 bytes
"\x04\x01\x09\x00\x0e\x02Export: %[1]s\x04\x01\x09\x01\x0a\x0e\x02Import:" +
" %[1]s\x02Fehler beim Lesen der Konfiguration: %[1]v\x02Servicename\x02e" +
"in Service mit diesem Namen existiert bereits\x02Service %[1]v mit dem B" +
@ -135,17 +134,17 @@ const deData string = "" + // Size: 1748 bytes
"inimale Wiedergabedauer für übersprungene Titel (Sekunden)\x02Zur Anmeld" +
"ung folgende URL aufrufen: %[1]v\x02Fehler: OAuth-State stimmt nicht übe" +
"rein\x04\x00\x01\x0a;\x02Zugriffstoken erhalten, %[1]v kann jetzt verwen" +
"det werden.\x02exportiere\x02importiere\x02abgebrochen\x02fertig\x02Ja" +
"\x02Nein\x02keine bestehenden Servicekonfigurationen\x02Service\x02Backe" +
"nd\x02Übertrage %[1]s von %[2]s nach %[3]s…\x02Ab Zeitstempel: %[1]v (%[" +
"2]v)\x02%[1]v von %[2]v %[3]s in %[4]v importiert.\x02Import fehlgeschla" +
"gen, letzter Zeitstempel war %[1]v (%[2]s)\x02Importlog:\x02%[1]v: %[2]v" +
"\x02ungültiger Zeitstempel „%[1]v“\x02Letzter Zeitstempel: %[1]v (%[2]v)" +
"\x02keine Konfigurationsdatei definiert, Konfiguration kann nicht geschr" +
"ieben werden\x02Schlüssel darf nur die Zeichen A-Za-z0-9_- beinhalten" +
"\x02keine Servicekonfiguration „%[1]v“"
"det werden.\x02exportiere\x02importiere\x02fertig\x02Ja\x02Nein\x02keine" +
" bestehenden Servicekonfigurationen\x02Service\x02Backend\x02Übertrage %" +
"[1]s von %[2]s nach %[3]s…\x02Ab Zeitstempel: %[1]v (%[2]v)\x02Import fe" +
"hlgeschlagen, letzter Zeitstempel war %[1]v (%[2]s)\x02%[1]v von %[2]v %" +
"[3]s in %[4]v importiert.\x02Importlog:\x02%[1]v: %[2]v\x02ungültiger Ze" +
"itstempel „%[1]v“\x02Letzter Zeitstempel: %[1]v (%[2]v)\x02keine Konfigu" +
"rationsdatei definiert, Konfiguration kann nicht geschrieben werden\x02S" +
"chlüssel darf nur die Zeichen A-Za-z0-9_- beinhalten\x02keine Servicekon" +
"figuration „%[1]v“"
var enIndex = []uint32{ // 56 elements
var enIndex = []uint32{ // 55 elements
// Entry 0 - 1F
0x00000000, 0x00000013, 0x00000027, 0x00000044,
0x00000051, 0x00000079, 0x000000a1, 0x000000de,
@ -157,14 +156,14 @@ var enIndex = []uint32{ // 56 elements
0x00000307, 0x00000335, 0x00000344, 0x00000365,
// Entry 20 - 3F
0x0000039c, 0x000003c3, 0x000003df, 0x00000412,
0x0000041c, 0x00000426, 0x0000042e, 0x00000433,
0x00000437, 0x0000043a, 0x0000045d, 0x00000465,
0x0000046d, 0x00000497, 0x000004b5, 0x000004df,
0x00000518, 0x00000524, 0x00000531, 0x00000552,
0x00000572, 0x000005a5, 0x000005ca, 0x000005eb,
} // Size: 248 bytes
0x0000041c, 0x00000426, 0x0000042b, 0x0000042f,
0x00000432, 0x00000455, 0x0000045d, 0x00000465,
0x0000048f, 0x000004ad, 0x000004e6, 0x00000510,
0x0000051c, 0x00000529, 0x0000054a, 0x0000056a,
0x0000059d, 0x000005c2, 0x000005e3,
} // Size: 244 bytes
const enData string = "" + // Size: 1515 bytes
const enData string = "" + // Size: 1507 bytes
"\x04\x01\x09\x00\x0e\x02export: %[1]s\x04\x01\x09\x01\x0a\x0e\x02import:" +
" %[1]s\x02Failed reading config: %[1]v\x02Service name\x02a service with" +
" this name already exists\x02Saved service %[1]v using backend %[2]v\x02" +
@ -182,14 +181,13 @@ const enData string = "" + // Size: 1515 bytes
"mps\x02Directory path\x02Ignore listens in incognito mode\x02Minimum pla" +
"yback duration for skipped tracks (seconds)\x02Visit the URL for authori" +
"zation: %[1]v\x02Error: OAuth state mismatch\x04\x00\x01\x0a.\x02Access " +
"token received, you can use %[1]v now.\x02exporting\x02importing\x02abor" +
"ted\x02done\x02Yes\x02No\x02no existing service configurations\x02Servic" +
"e\x02Backend\x02Transferring %[1]s from %[2]s to %[3]s…\x02From timestam" +
"p: %[1]v (%[2]v)\x02Imported %[1]v of %[2]v %[3]s into %[4]v.\x02Import " +
"failed, last reported timestamp was %[1]v (%[2]s)\x02Import log:\x02%[1]" +
"v: %[2]v\x02invalid timestamp string \x22%[1]v\x22\x02Latest timestamp: " +
"%[1]v (%[2]v)\x02no configuration file defined, cannot write config\x02k" +
"ey must only consist of A-Za-z0-9_-\x02no service configuration \x22%[1]" +
"v\x22"
"token received, you can use %[1]v now.\x02exporting\x02importing\x02done" +
"\x02Yes\x02No\x02no existing service configurations\x02Service\x02Backen" +
"d\x02Transferring %[1]s from %[2]s to %[3]s…\x02From timestamp: %[1]v (%" +
"[2]v)\x02Import failed, last reported timestamp was %[1]v (%[2]s)\x02Imp" +
"orted %[1]v of %[2]v %[3]s into %[4]v.\x02Import log:\x02%[1]v: %[2]v" +
"\x02invalid timestamp string \x22%[1]v\x22\x02Latest timestamp: %[1]v (%" +
"[2]v)\x02no configuration file defined, cannot write config\x02key must " +
"only consist of A-Za-z0-9_-\x02no service configuration \x22%[1]v\x22"
// Total table size 3759 bytes (3KiB); checksum: 7B4CF967
// Total table size 3731 bytes (3KiB); checksum: F7951710

View file

@ -368,23 +368,21 @@
"id": "exporting",
"message": "exporting",
"translatorComment": "Copied from source.",
"fuzzy": true,
"translation": "exportiere"
},
{
"id": "importing",
"message": "importing",
"translatorComment": "Copied from source.",
"fuzzy": true,
"translation": "importiere"
},
{
"id": "aborted",
"message": "aborted",
"translation": "abgebrochen"
},
{
"id": "done",
"message": "done",
"translatorComment": "Copied from source.",
"fuzzy": true,
"translation": "fertig"
},
{
@ -464,6 +462,27 @@
}
]
},
{
"id": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
"message": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
"translation": "Import fehlgeschlagen, letzter Zeitstempel war {Arg_1} ({Arg_2})",
"placeholders": [
{
"id": "Arg_1",
"string": "%[1]v",
"type": "",
"underlyingType": "interface{}",
"argNum": 1
},
{
"id": "Arg_2",
"string": "%[2]s",
"type": "",
"underlyingType": "string",
"argNum": 2
}
]
},
{
"id": "Imported {ImportCount} of {TotalCount} {Entity} into {TargetName}.",
"message": "Imported {ImportCount} of {TotalCount} {Entity} into {TargetName}.",
@ -503,27 +522,6 @@
}
]
},
{
"id": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
"message": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
"translation": "Import fehlgeschlagen, letzter Zeitstempel war {Arg_1} ({Arg_2})",
"placeholders": [
{
"id": "Arg_1",
"string": "%[1]v",
"type": "",
"underlyingType": "interface{}",
"argNum": 1
},
{
"id": "Arg_2",
"string": "%[2]s",
"type": "",
"underlyingType": "string",
"argNum": 2
}
]
},
{
"id": "Import log:",
"message": "Import log:",

View file

@ -368,24 +368,22 @@
"id": "exporting",
"message": "exporting",
"translation": "exportiere",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "importing",
"message": "importing",
"translation": "importiere",
"translatorComment": "Copied from source."
},
{
"id": "aborted",
"message": "aborted",
"translation": "abgebrochen"
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "done",
"message": "done",
"translation": "fertig",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Yes",
@ -464,6 +462,27 @@
}
]
},
{
"id": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
"message": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
"translation": "Import fehlgeschlagen, letzter Zeitstempel war {Arg_1} ({Arg_2})",
"placeholders": [
{
"id": "Arg_1",
"string": "%[1]v",
"type": "",
"underlyingType": "interface{}",
"argNum": 1
},
{
"id": "Arg_2",
"string": "%[2]s",
"type": "",
"underlyingType": "string",
"argNum": 2
}
]
},
{
"id": "Imported {ImportCount} of {TotalCount} {Entity} into {TargetName}.",
"message": "Imported {ImportCount} of {TotalCount} {Entity} into {TargetName}.",
@ -503,27 +522,6 @@
}
]
},
{
"id": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
"message": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
"translation": "Import fehlgeschlagen, letzter Zeitstempel war {Arg_1} ({Arg_2})",
"placeholders": [
{
"id": "Arg_1",
"string": "%[1]v",
"type": "",
"underlyingType": "interface{}",
"argNum": 1
},
{
"id": "Arg_2",
"string": "%[2]s",
"type": "",
"underlyingType": "string",
"argNum": 2
}
]
},
{
"id": "Import log:",
"message": "Import log:",

View file

@ -15,7 +15,8 @@
"argNum": 1,
"expr": "strings.Join(info.ExportCapabilities, \", \")"
}
]
],
"fuzzy": true
},
{
"id": "import: {ImportCapabilities__}",
@ -31,7 +32,8 @@
"argNum": 1,
"expr": "strings.Join(info.ImportCapabilities, \", \")"
}
]
],
"fuzzy": true
},
{
"id": "Failed reading config: {Err}",
@ -47,19 +49,22 @@
"argNum": 1,
"expr": "err"
}
]
],
"fuzzy": true
},
{
"id": "Service name",
"message": "Service name",
"translation": "Service name",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "a service with this name already exists",
"message": "a service with this name already exists",
"translation": "a service with this name already exists",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Saved service {Name} using backend {Backend}",
@ -83,7 +88,8 @@
"argNum": 2,
"expr": "service.Backend"
}
]
],
"fuzzy": true
},
{
"id": "The backend {Backend} requires authentication. Authenticate now?",
@ -99,7 +105,8 @@
"argNum": 1,
"expr": "service.Backend"
}
]
],
"fuzzy": true
},
{
"id": "Delete the service configuration \"{Service}\"?",
@ -115,13 +122,15 @@
"argNum": 1,
"expr": "service"
}
]
],
"fuzzy": true
},
{
"id": "Aborted",
"message": "Aborted",
"translation": "Aborted",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Service \"{Name}\" deleted",
@ -137,7 +146,8 @@
"argNum": 1,
"expr": "service.Name"
}
]
],
"fuzzy": true
},
{
"id": "Updated service {Name} using backend {Backend}",
@ -161,7 +171,8 @@
"argNum": 2,
"expr": "service.Backend"
}
]
],
"fuzzy": true
},
{
"id": "backend: {Backend}",
@ -177,13 +188,15 @@
"argNum": 1,
"expr": "s.Backend"
}
]
],
"fuzzy": true
},
{
"id": "Token received, you can close this window now.",
"message": "Token received, you can close this window now.",
"translation": "Token received, you can close this window now.",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "backend {Backend} does not implement {InterfaceName}",
@ -207,7 +220,8 @@
"argNum": 2,
"expr": "interfaceName"
}
]
],
"fuzzy": true
},
{
"id": "unknown backend \"{BackendName}\"",
@ -223,67 +237,78 @@
"argNum": 1,
"expr": "backendName"
}
]
],
"fuzzy": true
},
{
"id": "Client ID",
"message": "Client ID",
"translation": "Client ID",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Client secret",
"message": "Client secret",
"translation": "Client secret",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Server URL",
"message": "Server URL",
"translation": "Server URL",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "User name",
"message": "User name",
"translation": "User name",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Access token",
"message": "Access token",
"translation": "Access token",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "File path",
"message": "File path",
"translation": "File path",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Append to file",
"message": "Append to file",
"translation": "Append to file",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Playlist title",
"message": "Playlist title",
"translation": "Playlist title",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Unique playlist identifier",
"message": "Unique playlist identifier",
"translation": "Unique playlist identifier",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Check for duplicate listens on import (slower)",
"message": "Check for duplicate listens on import (slower)",
"translation": "Check for duplicate listens on import (slower)",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Ignored duplicate listen {ListenedAt}: \"{TrackName}\" by {ArtistName} ({RecordingMBID})",
@ -323,43 +348,50 @@
"argNum": 4,
"expr": "l.RecordingMBID"
}
]
],
"fuzzy": true
},
{
"id": "Disable auto correction of submitted listens",
"message": "Disable auto correction of submitted listens",
"translation": "Disable auto correction of submitted listens",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Ignore skipped listens",
"message": "Ignore skipped listens",
"translation": "Ignore skipped listens",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Specify a time zone for the listen timestamps",
"message": "Specify a time zone for the listen timestamps",
"translation": "Specify a time zone for the listen timestamps",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Directory path",
"message": "Directory path",
"translation": "Directory path",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Ignore listens in incognito mode",
"message": "Ignore listens in incognito mode",
"translation": "Ignore listens in incognito mode",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Minimum playback duration for skipped tracks (seconds)",
"message": "Minimum playback duration for skipped tracks (seconds)",
"translation": "Minimum playback duration for skipped tracks (seconds)",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Visit the URL for authorization: {URL}",
@ -375,13 +407,15 @@
"argNum": 1,
"expr": "authURL.URL"
}
]
],
"fuzzy": true
},
{
"id": "Error: OAuth state mismatch",
"message": "Error: OAuth state mismatch",
"translation": "Error: OAuth state mismatch",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Access token received, you can use {Name} now.",
@ -397,55 +431,64 @@
"argNum": 1,
"expr": "service.Name"
}
]
],
"fuzzy": true
},
{
"id": "exporting",
"message": "exporting",
"translation": "exporting",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "importing",
"message": "importing",
"translation": "importing",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "done",
"message": "done",
"translation": "done",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Yes",
"message": "Yes",
"translation": "Yes",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "No",
"message": "No",
"translation": "No",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "no existing service configurations",
"message": "no existing service configurations",
"translation": "no existing service configurations",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Service",
"message": "Service",
"translation": "Service",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Backend",
"message": "Backend",
"translation": "Backend",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Transferring {Entity} from {SourceName} to {TargetName}…",
@ -477,7 +520,8 @@
"argNum": 3,
"expr": "c.targetName"
}
]
],
"fuzzy": true
},
{
"id": "From timestamp: {Arg_1} ({Arg_2})",
@ -499,7 +543,8 @@
"underlyingType": "interface{}",
"argNum": 2
}
]
],
"fuzzy": true
},
{
"id": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
@ -521,7 +566,8 @@
"underlyingType": "string",
"argNum": 2
}
]
],
"fuzzy": true
},
{
"id": "Imported {ImportCount} of {TotalCount} {Entity} into {TargetName}.",
@ -561,13 +607,15 @@
"argNum": 4,
"expr": "c.targetName"
}
]
],
"fuzzy": true
},
{
"id": "Import log:",
"message": "Import log:",
"translation": "Import log:",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "{Type}: {Message}",
@ -591,7 +639,8 @@
"argNum": 2,
"expr": "entry.Message"
}
]
],
"fuzzy": true
},
{
"id": "invalid timestamp string \"{FlagValue}\"",
@ -607,7 +656,8 @@
"argNum": 1,
"expr": "flagValue"
}
]
],
"fuzzy": true
},
{
"id": "Latest timestamp: {Arg_1} ({Arg_2})",
@ -629,19 +679,22 @@
"underlyingType": "interface{}",
"argNum": 2
}
]
],
"fuzzy": true
},
{
"id": "no configuration file defined, cannot write config",
"message": "no configuration file defined, cannot write config",
"translation": "no configuration file defined, cannot write config",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "key must only consist of A-Za-z0-9_-",
"message": "key must only consist of A-Za-z0-9_-",
"translation": "key must only consist of A-Za-z0-9_-",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "no service configuration \"{Name}\"",
@ -657,7 +710,8 @@
"argNum": 1,
"expr": "name"
}
]
],
"fuzzy": true
}
]
}

View file

@ -15,7 +15,8 @@
"argNum": 1,
"expr": "strings.Join(info.ExportCapabilities, \", \")"
}
]
],
"fuzzy": true
},
{
"id": "import: {ImportCapabilities__}",
@ -31,7 +32,8 @@
"argNum": 1,
"expr": "strings.Join(info.ImportCapabilities, \", \")"
}
]
],
"fuzzy": true
},
{
"id": "Failed reading config: {Err}",
@ -47,19 +49,22 @@
"argNum": 1,
"expr": "err"
}
]
],
"fuzzy": true
},
{
"id": "Service name",
"message": "Service name",
"translation": "Service name",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "a service with this name already exists",
"message": "a service with this name already exists",
"translation": "a service with this name already exists",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Saved service {Name} using backend {Backend}",
@ -83,7 +88,8 @@
"argNum": 2,
"expr": "service.Backend"
}
]
],
"fuzzy": true
},
{
"id": "The backend {Backend} requires authentication. Authenticate now?",
@ -99,7 +105,8 @@
"argNum": 1,
"expr": "service.Backend"
}
]
],
"fuzzy": true
},
{
"id": "Delete the service configuration \"{Service}\"?",
@ -115,13 +122,15 @@
"argNum": 1,
"expr": "service"
}
]
],
"fuzzy": true
},
{
"id": "Aborted",
"message": "Aborted",
"translation": "Aborted",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Service \"{Name}\" deleted",
@ -137,7 +146,8 @@
"argNum": 1,
"expr": "service.Name"
}
]
],
"fuzzy": true
},
{
"id": "Updated service {Name} using backend {Backend}",
@ -161,7 +171,8 @@
"argNum": 2,
"expr": "service.Backend"
}
]
],
"fuzzy": true
},
{
"id": "backend: {Backend}",
@ -177,13 +188,15 @@
"argNum": 1,
"expr": "s.Backend"
}
]
],
"fuzzy": true
},
{
"id": "Token received, you can close this window now.",
"message": "Token received, you can close this window now.",
"translation": "Token received, you can close this window now.",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "backend {Backend} does not implement {InterfaceName}",
@ -207,7 +220,8 @@
"argNum": 2,
"expr": "interfaceName"
}
]
],
"fuzzy": true
},
{
"id": "unknown backend \"{BackendName}\"",
@ -223,67 +237,78 @@
"argNum": 1,
"expr": "backendName"
}
]
],
"fuzzy": true
},
{
"id": "Client ID",
"message": "Client ID",
"translation": "Client ID",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Client secret",
"message": "Client secret",
"translation": "Client secret",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Server URL",
"message": "Server URL",
"translation": "Server URL",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "User name",
"message": "User name",
"translation": "User name",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Access token",
"message": "Access token",
"translation": "Access token",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "File path",
"message": "File path",
"translation": "File path",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Append to file",
"message": "Append to file",
"translation": "Append to file",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Playlist title",
"message": "Playlist title",
"translation": "Playlist title",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Unique playlist identifier",
"message": "Unique playlist identifier",
"translation": "Unique playlist identifier",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Check for duplicate listens on import (slower)",
"message": "Check for duplicate listens on import (slower)",
"translation": "Check for duplicate listens on import (slower)",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Ignored duplicate listen {ListenedAt}: \"{TrackName}\" by {ArtistName} ({RecordingMBID})",
@ -323,43 +348,50 @@
"argNum": 4,
"expr": "l.RecordingMBID"
}
]
],
"fuzzy": true
},
{
"id": "Disable auto correction of submitted listens",
"message": "Disable auto correction of submitted listens",
"translation": "Disable auto correction of submitted listens",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Ignore skipped listens",
"message": "Ignore skipped listens",
"translation": "Ignore skipped listens",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Specify a time zone for the listen timestamps",
"message": "Specify a time zone for the listen timestamps",
"translation": "Specify a time zone for the listen timestamps",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Directory path",
"message": "Directory path",
"translation": "Directory path",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Ignore listens in incognito mode",
"message": "Ignore listens in incognito mode",
"translation": "Ignore listens in incognito mode",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Minimum playback duration for skipped tracks (seconds)",
"message": "Minimum playback duration for skipped tracks (seconds)",
"translation": "Minimum playback duration for skipped tracks (seconds)",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Visit the URL for authorization: {URL}",
@ -375,13 +407,15 @@
"argNum": 1,
"expr": "authURL.URL"
}
]
],
"fuzzy": true
},
{
"id": "Error: OAuth state mismatch",
"message": "Error: OAuth state mismatch",
"translation": "Error: OAuth state mismatch",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Access token received, you can use {Name} now.",
@ -397,24 +431,20 @@
"argNum": 1,
"expr": "service.Name"
}
]
],
"fuzzy": true
},
{
"id": "exporting",
"message": "exporting",
"translation": "exporting",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "importing",
"message": "importing",
"translation": "importing",
"translatorComment": "Copied from source."
},
{
"id": "aborted",
"message": "aborted",
"translation": "aborted",
"translatorComment": "Copied from source.",
"fuzzy": true
},
@ -422,37 +452,43 @@
"id": "done",
"message": "done",
"translation": "done",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Yes",
"message": "Yes",
"translation": "Yes",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "No",
"message": "No",
"translation": "No",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "no existing service configurations",
"message": "no existing service configurations",
"translation": "no existing service configurations",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Service",
"message": "Service",
"translation": "Service",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Backend",
"message": "Backend",
"translation": "Backend",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "Transferring {Entity} from {SourceName} to {TargetName}…",
@ -484,7 +520,8 @@
"argNum": 3,
"expr": "c.targetName"
}
]
],
"fuzzy": true
},
{
"id": "From timestamp: {Arg_1} ({Arg_2})",
@ -506,7 +543,31 @@
"underlyingType": "interface{}",
"argNum": 2
}
]
],
"fuzzy": true
},
{
"id": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
"message": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
"translation": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
"translatorComment": "Copied from source.",
"placeholders": [
{
"id": "Arg_1",
"string": "%[1]v",
"type": "",
"underlyingType": "interface{}",
"argNum": 1
},
{
"id": "Arg_2",
"string": "%[2]s",
"type": "",
"underlyingType": "string",
"argNum": 2
}
],
"fuzzy": true
},
{
"id": "Imported {ImportCount} of {TotalCount} {Entity} into {TargetName}.",
@ -546,35 +607,15 @@
"argNum": 4,
"expr": "c.targetName"
}
]
},
{
"id": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
"message": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
"translation": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
"translatorComment": "Copied from source.",
"placeholders": [
{
"id": "Arg_1",
"string": "%[1]v",
"type": "",
"underlyingType": "interface{}",
"argNum": 1
},
{
"id": "Arg_2",
"string": "%[2]s",
"type": "",
"underlyingType": "string",
"argNum": 2
}
]
],
"fuzzy": true
},
{
"id": "Import log:",
"message": "Import log:",
"translation": "Import log:",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "{Type}: {Message}",
@ -598,7 +639,8 @@
"argNum": 2,
"expr": "entry.Message"
}
]
],
"fuzzy": true
},
{
"id": "invalid timestamp string \"{FlagValue}\"",
@ -614,7 +656,8 @@
"argNum": 1,
"expr": "flagValue"
}
]
],
"fuzzy": true
},
{
"id": "Latest timestamp: {Arg_1} ({Arg_2})",
@ -636,19 +679,22 @@
"underlyingType": "interface{}",
"argNum": 2
}
]
],
"fuzzy": true
},
{
"id": "no configuration file defined, cannot write config",
"message": "no configuration file defined, cannot write config",
"translation": "no configuration file defined, cannot write config",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "key must only consist of A-Za-z0-9_-",
"message": "key must only consist of A-Za-z0-9_-",
"translation": "key must only consist of A-Za-z0-9_-",
"translatorComment": "Copied from source."
"translatorComment": "Copied from source.",
"fuzzy": true
},
{
"id": "no service configuration \"{Name}\"",
@ -664,7 +710,8 @@
"argNum": 1,
"expr": "name"
}
]
],
"fuzzy": true
}
]
}

View file

@ -1,5 +1,5 @@
/*
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
@ -22,28 +22,7 @@ THE SOFTWARE.
package jspf
import (
"encoding/json"
"fmt"
"time"
)
// Represents a JSPF extension
type Extension any
// A map of JSPF extensions
type ExtensionMap map[string]Extension
// Parses the extension with the given ID and unmarshals it into "v".
// If the extensions is not found or the data cannot be unmarshalled,
// an error is returned.
func (e ExtensionMap) Get(id string, v any) error {
ext, ok := e[id]
if !ok {
return fmt.Errorf("extension %q not found", id)
}
return unmarshalExtension(ext, v)
}
import "time"
const (
// The identifier for the MusicBrainz / ListenBrainz JSPF playlist extension
@ -104,11 +83,3 @@ type MusicBrainzTrackExtension struct {
// this document.
AdditionalMetadata map[string]any `json:"additional_metadata,omitempty"`
}
func unmarshalExtension(ext Extension, v any) error {
asJson, err := json.Marshal(ext)
if err != nil {
return err
}
return json.Unmarshal(asJson, v)
}

View file

@ -1,5 +1,5 @@
/*
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
@ -26,7 +26,6 @@ import (
"bytes"
"fmt"
"log"
"testing"
"time"
"go.uploadedlobster.com/scotty/pkg/jspf"
@ -39,7 +38,7 @@ func ExampleMusicBrainzTrackExtension() {
Tracks: []jspf.Track{
{
Title: "Oweynagat",
Extension: jspf.ExtensionMap{
Extension: map[string]any{
jspf.MusicBrainzTrackExtensionID: jspf.MusicBrainzTrackExtension{
AddedAt: time.Date(2023, 11, 24, 07, 47, 50, 0, time.UTC),
AddedBy: "scotty",
@ -73,29 +72,3 @@ func ExampleMusicBrainzTrackExtension() {
// }
// }
}
func TestExtensionMapGet(t *testing.T) {
ext := jspf.ExtensionMap{
jspf.MusicBrainzTrackExtensionID: jspf.MusicBrainzTrackExtension{
AddedAt: time.Date(2023, 11, 24, 07, 47, 50, 0, time.UTC),
AddedBy: "scotty",
},
}
var trackExt jspf.MusicBrainzTrackExtension
err := ext.Get(jspf.MusicBrainzTrackExtensionID, &trackExt)
if err != nil {
t.Fatal(err)
}
if trackExt.AddedBy != "scotty" {
t.Fatalf("expected 'scotty', got '%s'", trackExt.AddedBy)
}
}
func TestExtensionMapGetNotFound(t *testing.T) {
ext := jspf.ExtensionMap{}
var trackExt jspf.MusicBrainzTrackExtension
err := ext.Get(jspf.MusicBrainzTrackExtensionID, &trackExt)
if err == nil {
t.Fatal("expected ExtensionMap.Get to return an error")
}
}

View file

@ -1,5 +1,5 @@
/*
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
@ -32,35 +32,35 @@ type JSPF struct {
}
type Playlist struct {
Title string `json:"title,omitempty"`
Creator string `json:"creator,omitempty"`
Annotation string `json:"annotation,omitempty"`
Info string `json:"info,omitempty"`
Location string `json:"location,omitempty"`
Identifier string `json:"identifier,omitempty"`
Image string `json:"image,omitempty"`
Date time.Time `json:"date,omitempty"`
License string `json:"license,omitempty"`
Attribution []Attribution `json:"attribution,omitempty"`
Links []Link `json:"link,omitempty"`
Meta []Meta `json:"meta,omitempty"`
Extension ExtensionMap `json:"extension,omitempty"`
Tracks []Track `json:"track"`
Title string `json:"title,omitempty"`
Creator string `json:"creator,omitempty"`
Annotation string `json:"annotation,omitempty"`
Info string `json:"info,omitempty"`
Location string `json:"location,omitempty"`
Identifier string `json:"identifier,omitempty"`
Image string `json:"image,omitempty"`
Date time.Time `json:"date,omitempty"`
License string `json:"license,omitempty"`
Attribution []Attribution `json:"attribution,omitempty"`
Links []Link `json:"link,omitempty"`
Meta []Meta `json:"meta,omitempty"`
Extension map[string]any `json:"extension,omitempty"`
Tracks []Track `json:"track"`
}
type Track struct {
Location []string `json:"location,omitempty"`
Identifier []string `json:"identifier,omitempty"`
Title string `json:"title,omitempty"`
Creator string `json:"creator,omitempty"`
Annotation string `json:"annotation,omitempty"`
Info string `json:"info,omitempty"`
Album string `json:"album,omitempty"`
TrackNum int `json:"trackNum,omitempty"`
Duration int64 `json:"duration,omitempty"`
Links []Link `json:"link,omitempty"`
Meta []Meta `json:"meta,omitempty"`
Extension ExtensionMap `json:"extension,omitempty"`
Location []string `json:"location,omitempty"`
Identifier []string `json:"identifier,omitempty"`
Title string `json:"title,omitempty"`
Creator string `json:"creator,omitempty"`
Annotation string `json:"annotation,omitempty"`
Info string `json:"info,omitempty"`
Album string `json:"album,omitempty"`
TrackNum int `json:"trackNum,omitempty"`
Duration int `json:"duration,omitempty"`
Links []Link `json:"link,omitempty"`
Meta []Meta `json:"meta,omitempty"`
Extension map[string]any `json:"extension,omitempty"`
}
type Attribution map[string]string

View file

@ -13,7 +13,6 @@ You should have received a copy of the GNU General Public License along with
Scotty. If not, see <https://www.gnu.org/licenses/>.
*/
// Helper functions to set up rate limiting with resty.
package ratelimit
import (

View file

@ -20,18 +20,11 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
// Package to parse and write .scrobbler.log files as written by Rockbox.
//
// The parser supports reading version 1.1 and 1.0 of the scrobbler log file
// format. The latter is only supported if encoded in UTF-8.
//
// When written it always writes version 1.1 of the scrobbler log file format,
// which includes the MusicBrainz recording ID as the last field of each row.
// Package to parse and writer .scrobbler.log files as written by Rockbox.
//
// See
// - https://www.rockbox.org/wiki/LastFMLog
// - https://git.rockbox.org/cgit/rockbox.git/tree/apps/plugins/lastfm_scrobbler.c
// - https://web.archive.org/web/20110110053056/http://www.audioscrobbler.net/wiki/Portable_Player_Logging
package scrobblerlog
import (
@ -86,10 +79,6 @@ type ScrobblerLog struct {
FallbackTimezone *time.Location
}
// Parses a scrobbler log file from the given reader.
//
// The reader must provide a valid scrobbler log file with a valid header.
// This function implicitly calls [ScrobblerLog.ReadHeader].
func (l *ScrobblerLog) Parse(data io.Reader, ignoreSkipped bool) error {
l.Records = make([]Record, 0)
@ -117,7 +106,6 @@ func (l *ScrobblerLog) Parse(data io.Reader, ignoreSkipped bool) error {
// fmt.Printf("row: %v\n", row)
// We consider only the last field (recording MBID) optional
// This was added in the 1.1 file format.
if len(row) < 7 {
line, _ := tsvReader.FieldPos(0)
return fmt.Errorf("invalid record in scrobblerlog line %v", line)
@ -138,11 +126,6 @@ func (l *ScrobblerLog) Parse(data io.Reader, ignoreSkipped bool) error {
return nil
}
// Append writes the given records to the writer.
//
// The writer should be for an existing scrobbler log file or
// [ScrobblerLog.WriteHeader] should be called before this function.
// Returns the last timestamp of the records written.
func (l *ScrobblerLog) Append(data io.Writer, records []Record) (lastTimestamp time.Time, err error) {
tsvWriter := csv.NewWriter(data)
tsvWriter.Comma = '\t'
@ -170,9 +153,6 @@ func (l *ScrobblerLog) Append(data io.Writer, records []Record) (lastTimestamp t
return
}
// Parses just the header of a scrobbler log file from the given reader.
//
// This function sets [ScrobblerLog.TZ] and [ScrobblerLog.Client].
func (l *ScrobblerLog) ReadHeader(reader *bufio.Reader) error {
// Skip header
for i := 0; i < 3; i++ {
@ -211,7 +191,6 @@ func (l *ScrobblerLog) ReadHeader(reader *bufio.Reader) error {
return nil
}
// Writes the header of a scrobbler log file to the given writer.
func (l *ScrobblerLog) WriteHeader(writer io.Writer) error {
headers := []string{
"#AUDIOSCROBBLER/1.1\n",