mirror of
https://git.sr.ht/~phw/scotty
synced 2025-05-16 20:52:34 +02:00
Compare commits
75 commits
Author | SHA1 | Date | |
---|---|---|---|
|
97600d8190 | ||
|
a42b5d784d | ||
|
a87c42059f | ||
|
17cee9cb8b | ||
|
b8e6ccffdb | ||
|
1f48abc284 | ||
|
54fffce1d9 | ||
|
cb6a534fa1 | ||
|
05f0e8d172 | ||
|
a8517ea249 | ||
|
dfe6773744 | ||
|
aae5123c3d | ||
|
15d939e150 | ||
|
55ac41b147 | ||
|
069f0de2ee | ||
|
3b1adc9f1f | ||
|
1c3364dad5 | ||
|
9480c69cbb | ||
|
b3136bde9a | ||
|
8885e9cebc | ||
|
bd7a35cd68 | ||
|
d757129bd7 | ||
|
a645ec5c78 | ||
|
cfc3cd522d | ||
|
443734e4c7 | ||
|
588a6cf96f | ||
|
91c28bac0c | ||
|
21d757e37c | ||
|
c305b6c7e3 | ||
|
389c7c6ec1 | ||
|
e973e72bbe | ||
|
0a411fe2fa | ||
|
1e91b684cb | ||
|
19852be68b | ||
|
a6cc8d49ac | ||
|
a5442b477e | ||
|
90e101080f | ||
|
dff34b249c | ||
|
bcb1834994 | ||
|
d51c97c648 | ||
|
39b31fc664 | ||
|
1516a3a9d6 | ||
|
82858315fa | ||
|
e135ea5fa9 | ||
|
597914e6db | ||
|
c817480809 | ||
|
47486ff659 | ||
|
159f486cdc | ||
|
b104c2bc42 | ||
|
ed191d2f15 | ||
|
0f4b04c641 | ||
|
aad542850a | ||
|
aeb3a56982 | ||
|
69665bc286 | ||
|
9184d2c3cf | ||
|
4a30bdf9d9 | ||
|
91f78d04dd | ||
|
9e1c2d8435 | ||
|
db78bfe457 | ||
|
20c9ada6ec | ||
|
7c0774fb8d | ||
|
90bf51a00b | ||
|
910056b0a6 | ||
|
bed60c7cdf | ||
|
2d66d41873 | ||
|
da6c920789 | ||
|
01e7569051 | ||
|
1ea90d2d2b | ||
|
329f696b55 | ||
|
5f9c0f24ab | ||
|
dc834e9b6f | ||
|
0d9bc74bc0 | ||
|
13eb8342ab | ||
|
ad1644672c | ||
|
8fff19ceac |
74 changed files with 2228 additions and 1682 deletions
.build.yml.goreleaser.yaml.weblateCHANGES.mdREADME.mdconfig.example.tomlgo.modgo.sum
internal
auth
backends
backends.gobackends_test.go
deezer
dump
export.gofunkwhale
import.gojspf
lastfm
listenbrainz
maloja
scrobblerlog
spotify
spotifyhistory
subsonic
cli
i18n
models
similarity
translations
version
pkg
jspf
ratelimit
scrobblerlog
13
.build.yml
13
.build.yml
|
@ -5,10 +5,11 @@ packages:
|
||||||
- hut
|
- hut
|
||||||
- weblate-wlc
|
- weblate-wlc
|
||||||
secrets:
|
secrets:
|
||||||
- 2a17e258-3e99-4093-9527-832c350d9c53
|
- 0e2ad815-6c46-4cea-878e-70fc33f71e77
|
||||||
oauth: pages.sr.ht/PAGES:RW
|
oauth: pages.sr.ht/PAGES:RW
|
||||||
tasks:
|
tasks:
|
||||||
- weblate-update: |
|
- weblate-update: |
|
||||||
|
cd scotty
|
||||||
wlc --format text pull scotty
|
wlc --format text pull scotty
|
||||||
- test: |
|
- test: |
|
||||||
cd scotty
|
cd scotty
|
||||||
|
@ -28,5 +29,15 @@ tasks:
|
||||||
- publish-redirect: |
|
- publish-redirect: |
|
||||||
# Update redirect on https://go.uploadedlobster.com/scotty
|
# Update redirect on https://go.uploadedlobster.com/scotty
|
||||||
./scotty/pages/publish.sh
|
./scotty/pages/publish.sh
|
||||||
|
# Skip releasing if this is not a tagged release
|
||||||
|
- only-tags: |
|
||||||
|
cd scotty
|
||||||
|
GIT_REF=$(git describe --always)
|
||||||
|
[[ "$GIT_REF" =~ ^v[0-9]+\.[0-9]+(\.[0-9]+)?$ ]] || complete-build
|
||||||
|
- announce-release: |
|
||||||
|
# Announce new release to Go Module Index
|
||||||
|
cd scotty
|
||||||
|
VERSION=$(git describe --exact-match)
|
||||||
|
curl "https://proxy.golang.org/go.uploadedlobster.com/scotty/@v/${VERSION}.info"
|
||||||
artifacts:
|
artifacts:
|
||||||
- scotty/dist/artifacts.tar
|
- scotty/dist/artifacts.tar
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
# yaml-language-server: $schema=https://goreleaser.com/static/schema.json
|
# yaml-language-server: $schema=https://goreleaser.com/static/schema.json
|
||||||
# vim: set ts=2 sw=2 tw=0 fo=cnqoj
|
# vim: set ts=2 sw=2 tw=0 fo=cnqoj
|
||||||
|
|
||||||
version: 1
|
version: 2
|
||||||
|
|
||||||
before:
|
before:
|
||||||
hooks:
|
hooks:
|
||||||
|
@ -21,6 +21,8 @@ builds:
|
||||||
- windows
|
- windows
|
||||||
- darwin
|
- darwin
|
||||||
ignore:
|
ignore:
|
||||||
|
- goos: linux
|
||||||
|
goarch: "386"
|
||||||
- goos: windows
|
- goos: windows
|
||||||
goarch: "386"
|
goarch: "386"
|
||||||
|
|
||||||
|
@ -28,7 +30,7 @@ universal_binaries:
|
||||||
- replace: true
|
- replace: true
|
||||||
|
|
||||||
archives:
|
archives:
|
||||||
- format: tar.gz
|
- formats: ['tar.gz']
|
||||||
# this name template makes the OS and Arch compatible with the results of `uname`.
|
# this name template makes the OS and Arch compatible with the results of `uname`.
|
||||||
name_template: >-
|
name_template: >-
|
||||||
{{ .ProjectName }}-{{ .Version }}_
|
{{ .ProjectName }}-{{ .Version }}_
|
||||||
|
@ -42,7 +44,7 @@ archives:
|
||||||
# use zip for windows archives
|
# use zip for windows archives
|
||||||
format_overrides:
|
format_overrides:
|
||||||
- goos: windows
|
- goos: windows
|
||||||
format: zip
|
formats: ['zip']
|
||||||
files:
|
files:
|
||||||
- COPYING
|
- COPYING
|
||||||
- README.md
|
- README.md
|
||||||
|
|
3
.weblate
Normal file
3
.weblate
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
[weblate]
|
||||||
|
url = https://translate.uploadedlobster.com/api/
|
||||||
|
translation = scotty/app
|
37
CHANGES.md
37
CHANGES.md
|
@ -1,5 +1,42 @@
|
||||||
# Scotty Changelog
|
# Scotty Changelog
|
||||||
|
|
||||||
|
## 0.6.0 - WIP
|
||||||
|
- Fix program hanging endlessly if import fails (#11)
|
||||||
|
- If import fails still store the last successfully imported timestamp
|
||||||
|
- Show progress bars as aborted on export / import error
|
||||||
|
- JSPF: implemented export as loves and listens
|
||||||
|
- JSPF: write track duration
|
||||||
|
- JSPF: read username and recording MSID
|
||||||
|
- JSPF: add MusicBrainz playlist extension in append mode, if it does not exist
|
||||||
|
in the existing JSPF file
|
||||||
|
- scrobblerlog: fix timezone not being set from config (#6)
|
||||||
|
- scrobblerlog: fix listen export not considering latest timestamp
|
||||||
|
|
||||||
|
|
||||||
|
## 0.5.2 - 2025-05-01
|
||||||
|
- ListenBrainz: fixed loves export not considering latest timestamp
|
||||||
|
|
||||||
|
|
||||||
|
## 0.5.1 - 2025-05-01
|
||||||
|
- scrobblerlog: fixed timezone offset calculation
|
||||||
|
- if system locale detection fails don't abort but fall back to English
|
||||||
|
|
||||||
|
|
||||||
|
## 0.5.0 - 2025-04-29
|
||||||
|
- ListenBrainz: handle missing loves metadata in case of merged recordings
|
||||||
|
- ListenBrainz: fix loves import loading all existing loves
|
||||||
|
- ListenBrainz: fixed progress for loves import
|
||||||
|
- ListenBrainz: log missing recording MBID on love import
|
||||||
|
- Subsonic: support OpenSubsonic fields for recording MBID and genres (#5)
|
||||||
|
- Subsonic: fixed progress for loves export
|
||||||
|
- scrobblerlog: add "time-zone" config option (#6)
|
||||||
|
- scrobblerlog: fixed progress for listen export
|
||||||
|
- scrobblerlog: renamed setting `include-skipped` to `ignore-skipped`
|
||||||
|
|
||||||
|
Note: 386 builds for Linux are not available with this release due to an
|
||||||
|
incompatibility with latest version of gorm.
|
||||||
|
|
||||||
|
|
||||||
## 0.4.1 - 2024-09-16
|
## 0.4.1 - 2024-09-16
|
||||||
- Subsonic: include `subsonic_id` as additional metadata
|
- Subsonic: include `subsonic_id` as additional metadata
|
||||||
- Deezer: fix artist and album ID URIs (#7)
|
- Deezer: fix artist and album ID URIs (#7)
|
||||||
|
|
|
@ -121,7 +121,7 @@ Backend | Listens Export | Listens Import | Loves Export | Loves Import
|
||||||
----------------|----------------|----------------|--------------|-------------
|
----------------|----------------|----------------|--------------|-------------
|
||||||
deezer | ✓ | ⨯ | ✓ | -
|
deezer | ✓ | ⨯ | ✓ | -
|
||||||
funkwhale | ✓ | ⨯ | ✓ | -
|
funkwhale | ✓ | ⨯ | ✓ | -
|
||||||
jspf | - | ✓ | - | ✓
|
jspf | ✓ | ✓ | ✓ | ✓
|
||||||
lastfm | ✓ | ✓ | ✓ | ✓
|
lastfm | ✓ | ✓ | ✓ | ✓
|
||||||
listenbrainz | ✓ | ✓ | ✓ | ✓
|
listenbrainz | ✓ | ✓ | ✓ | ✓
|
||||||
maloja | ✓ | ✓ | ⨯ | ⨯
|
maloja | ✓ | ✓ | ⨯ | ⨯
|
||||||
|
@ -145,7 +145,7 @@ You can help translate this project into your language with [Weblate](https://tr
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
Scotty © 2023-2024 Philipp Wolfer <phw@uploadedlobster.com>
|
Scotty © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
Scotty is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
|
Scotty is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
|
|
@ -56,11 +56,18 @@ backend = "scrobbler-log"
|
||||||
# The file path to the .scrobbler.log file. Relative paths are resolved against
|
# The file path to the .scrobbler.log file. Relative paths are resolved against
|
||||||
# the current working directory when running scotty.
|
# the current working directory when running scotty.
|
||||||
file-path = "./.scrobbler.log"
|
file-path = "./.scrobbler.log"
|
||||||
# If true, reading listens from the file also returns listens marked as "skipped"
|
# If true (default), ignore listens marked as skipped.
|
||||||
include-skipped = true
|
ignore-skipped = true
|
||||||
# If true (default), new listens will be appended to the existing file. Set to
|
# If true (default), new listens will be appended to the existing file. Set to
|
||||||
# false to overwrite the file and create a new scrobbler log on every run.
|
# false to overwrite the file and create a new scrobbler log on every run.
|
||||||
append = true
|
append = true
|
||||||
|
# Specify the time zone of the listens in the scrobbler log. While the log files
|
||||||
|
# are supposed to contain Unix timestamps, which are always in UTC, the player
|
||||||
|
# writing the log might not be time zone aware. This can cause the timestamps
|
||||||
|
# to be in a different time zone. Use the time-zone setting to specify a
|
||||||
|
# different time zone, e.g. "Europe/Berlin" or "America/New_York".
|
||||||
|
# The default is UTC.
|
||||||
|
time-zone = "UTC"
|
||||||
|
|
||||||
[service.jspf]
|
[service.jspf]
|
||||||
# Write listens and loves to JSPF playlist files (https://xspf.org/jspf)
|
# Write listens and loves to JSPF playlist files (https://xspf.org/jspf)
|
||||||
|
@ -98,9 +105,9 @@ dir-path = "./my_spotify_data_extended/Spotify Extended Streaming Histor
|
||||||
ignore-incognito = true
|
ignore-incognito = true
|
||||||
# If true, ignore listens marked as skipped. Default is false.
|
# If true, ignore listens marked as skipped. Default is false.
|
||||||
ignore-skipped = false
|
ignore-skipped = false
|
||||||
# Only consider skipped listens with a playback duration longer than this number
|
# Only consider skipped listens with a playback duration longer than or equal to
|
||||||
# of seconds. Default is 30 seconds. If ignore-skipped is set to false this
|
# this number of seconds. Default is 30 seconds. If ignore-skipped is enabled
|
||||||
# setting has no effect.
|
# this setting has no effect.
|
||||||
ignore-min-duration-seconds = 30
|
ignore-min-duration-seconds = 30
|
||||||
|
|
||||||
[service.deezer]
|
[service.deezer]
|
||||||
|
|
74
go.mod
74
go.mod
|
@ -1,31 +1,33 @@
|
||||||
module go.uploadedlobster.com/scotty
|
module go.uploadedlobster.com/scotty
|
||||||
|
|
||||||
go 1.22.0
|
go 1.23.0
|
||||||
|
|
||||||
toolchain go1.22.2
|
toolchain go1.24.2
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/Xuanwo/go-locale v1.1.2
|
github.com/Xuanwo/go-locale v1.1.3
|
||||||
github.com/agnivade/levenshtein v1.1.1
|
github.com/agnivade/levenshtein v1.2.1
|
||||||
github.com/cli/browser v1.3.0
|
github.com/cli/browser v1.3.0
|
||||||
github.com/delucks/go-subsonic v0.0.0-20240806025900-2a743ec36238
|
github.com/fatih/color v1.18.0
|
||||||
github.com/fatih/color v1.17.0
|
|
||||||
github.com/glebarez/sqlite v1.11.0
|
github.com/glebarez/sqlite v1.11.0
|
||||||
github.com/go-resty/resty/v2 v2.15.0
|
github.com/go-resty/resty/v2 v2.16.5
|
||||||
github.com/jarcoal/httpmock v1.3.1
|
github.com/jarcoal/httpmock v1.3.1
|
||||||
github.com/manifoldco/promptui v0.9.0
|
github.com/manifoldco/promptui v0.9.0
|
||||||
github.com/pelletier/go-toml/v2 v2.2.3
|
github.com/pelletier/go-toml/v2 v2.2.4
|
||||||
github.com/shkh/lastfm-go v0.0.0-20191215035245-89a801c244e0
|
github.com/shkh/lastfm-go v0.0.0-20191215035245-89a801c244e0
|
||||||
github.com/spf13/cast v1.7.0
|
github.com/spf13/cast v1.8.0
|
||||||
github.com/spf13/cobra v1.8.1
|
github.com/spf13/cobra v1.9.1
|
||||||
github.com/spf13/viper v1.19.0
|
github.com/spf13/viper v1.20.1
|
||||||
github.com/stretchr/testify v1.9.0
|
github.com/stretchr/testify v1.10.0
|
||||||
github.com/vbauerster/mpb/v8 v8.8.3
|
github.com/supersonic-app/go-subsonic v0.0.0-20241224013245-9b2841f3711d
|
||||||
golang.org/x/exp v0.0.0-20240909161429-701f63a606c0
|
github.com/vbauerster/mpb/v8 v8.10.0
|
||||||
golang.org/x/oauth2 v0.23.0
|
go.uploadedlobster.com/mbtypes v0.4.0
|
||||||
golang.org/x/text v0.18.0
|
go.uploadedlobster.com/musicbrainzws2 v0.14.0
|
||||||
gorm.io/datatypes v1.2.2
|
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6
|
||||||
gorm.io/gorm v1.25.12
|
golang.org/x/oauth2 v0.30.0
|
||||||
|
golang.org/x/text v0.25.0
|
||||||
|
gorm.io/datatypes v1.2.5
|
||||||
|
gorm.io/gorm v1.26.1
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
@ -35,37 +37,39 @@ require (
|
||||||
github.com/chzyer/readline v1.5.1 // indirect
|
github.com/chzyer/readline v1.5.1 // indirect
|
||||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
||||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||||
github.com/fsnotify/fsnotify v1.7.0 // indirect
|
github.com/fsnotify/fsnotify v1.9.0 // indirect
|
||||||
github.com/glebarez/go-sqlite v1.22.0 // indirect
|
github.com/glebarez/go-sqlite v1.22.0 // indirect
|
||||||
github.com/go-sql-driver/mysql v1.8.1 // indirect
|
github.com/go-sql-driver/mysql v1.9.2 // indirect
|
||||||
|
github.com/go-viper/mapstructure/v2 v2.2.1 // indirect
|
||||||
github.com/google/uuid v1.6.0 // indirect
|
github.com/google/uuid v1.6.0 // indirect
|
||||||
github.com/hashicorp/hcl v1.0.0 // indirect
|
|
||||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||||
github.com/jinzhu/inflection v1.0.0 // indirect
|
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||||
github.com/jinzhu/now v1.1.5 // indirect
|
github.com/jinzhu/now v1.1.5 // indirect
|
||||||
github.com/magiconair/properties v1.8.7 // indirect
|
github.com/mattn/go-colorable v0.1.14 // indirect
|
||||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
|
||||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
github.com/mattn/go-runewidth v0.0.16 // indirect
|
github.com/mattn/go-runewidth v0.0.16 // indirect
|
||||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
|
||||||
github.com/ncruces/go-strftime v0.1.9 // indirect
|
github.com/ncruces/go-strftime v0.1.9 // indirect
|
||||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||||
github.com/rivo/uniseg v0.4.7 // indirect
|
github.com/rivo/uniseg v0.4.7 // indirect
|
||||||
github.com/sagikazarmark/locafero v0.6.0 // indirect
|
github.com/sagikazarmark/locafero v0.9.0 // indirect
|
||||||
github.com/sagikazarmark/slog-shim v0.1.0 // indirect
|
|
||||||
github.com/sourcegraph/conc v0.3.0 // indirect
|
github.com/sourcegraph/conc v0.3.0 // indirect
|
||||||
github.com/spf13/afero v1.11.0 // indirect
|
github.com/spf13/afero v1.14.0 // indirect
|
||||||
github.com/spf13/pflag v1.0.5 // indirect
|
github.com/spf13/pflag v1.0.6 // indirect
|
||||||
github.com/subosito/gotenv v1.6.0 // indirect
|
github.com/subosito/gotenv v1.6.0 // indirect
|
||||||
go.uber.org/multierr v1.11.0 // indirect
|
go.uber.org/multierr v1.11.0 // indirect
|
||||||
golang.org/x/net v0.29.0 // indirect
|
golang.org/x/image v0.27.0 // indirect
|
||||||
golang.org/x/sys v0.25.0 // indirect
|
golang.org/x/mod v0.24.0 // indirect
|
||||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
golang.org/x/net v0.40.0 // indirect
|
||||||
|
golang.org/x/sync v0.14.0 // indirect
|
||||||
|
golang.org/x/sys v0.33.0 // indirect
|
||||||
|
golang.org/x/tools v0.33.0 // indirect
|
||||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||||
gorm.io/driver/mysql v1.5.7 // indirect
|
gorm.io/driver/mysql v1.5.7 // indirect
|
||||||
modernc.org/libc v1.60.1 // indirect
|
modernc.org/libc v1.65.2 // indirect
|
||||||
modernc.org/mathutil v1.6.0 // indirect
|
modernc.org/mathutil v1.7.1 // indirect
|
||||||
modernc.org/memory v1.8.0 // indirect
|
modernc.org/memory v1.10.0 // indirect
|
||||||
modernc.org/sqlite v1.33.1 // indirect
|
modernc.org/sqlite v1.37.0 // indirect
|
||||||
)
|
)
|
||||||
|
|
||||||
|
tool golang.org/x/text/cmd/gotext
|
||||||
|
|
300
go.sum
300
go.sum
|
@ -2,14 +2,12 @@ filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
|
||||||
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
|
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
|
||||||
github.com/VividCortex/ewma v1.2.0 h1:f58SaIzcDXrSy3kWaHNvuJgJ3Nmz59Zji6XoJR/q1ow=
|
github.com/VividCortex/ewma v1.2.0 h1:f58SaIzcDXrSy3kWaHNvuJgJ3Nmz59Zji6XoJR/q1ow=
|
||||||
github.com/VividCortex/ewma v1.2.0/go.mod h1:nz4BbCtbLyFDeC9SUHbtcT5644juEuWfUAUnGx7j5l4=
|
github.com/VividCortex/ewma v1.2.0/go.mod h1:nz4BbCtbLyFDeC9SUHbtcT5644juEuWfUAUnGx7j5l4=
|
||||||
github.com/Xuanwo/go-locale v1.1.0 h1:51gUxhxl66oXAjI9uPGb2O0qwPECpriKQb2hl35mQkg=
|
github.com/Xuanwo/go-locale v1.1.3 h1:EWZZJJt5rqPHHbqPRH1zFCn5D7xHjjebODctA4aUO3A=
|
||||||
github.com/Xuanwo/go-locale v1.1.0/go.mod h1:UKrHoZB3FPIk9wIG2/tVSobnHgNnceGSH3Y8DY5cASs=
|
github.com/Xuanwo/go-locale v1.1.3/go.mod h1:REn+F/c+AtGSWYACBSYZgl23AP+0lfQC+SEFPN+hj30=
|
||||||
github.com/Xuanwo/go-locale v1.1.2 h1:6H+olvrQcyVOZ+GAC2rXu4armacTT4ZrFCA0mB24XVo=
|
|
||||||
github.com/Xuanwo/go-locale v1.1.2/go.mod h1:1JBER4QV7Ji39GJ4AvVlfvqmTUqopzxQxdg2mXYOw94=
|
|
||||||
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8=
|
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8=
|
||||||
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo=
|
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo=
|
||||||
github.com/agnivade/levenshtein v1.1.1 h1:QY8M92nrzkmr798gCo3kmMyqXFzdQVpxLlGPRBij0P8=
|
github.com/agnivade/levenshtein v1.2.1 h1:EHBY3UOn1gwdy/VbFwgo4cxecRznFk7fKWN1KOX7eoM=
|
||||||
github.com/agnivade/levenshtein v1.1.1/go.mod h1:veldBMzWxcCG2ZvUTKD2kJNRdCk5hVbJomOvKkmgYbo=
|
github.com/agnivade/levenshtein v1.2.1/go.mod h1:QVVI16kDrtSuwcpd0p1+xMC6Z/VfhtCyDIjcwga4/DU=
|
||||||
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q=
|
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q=
|
||||||
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE=
|
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE=
|
||||||
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
|
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
|
||||||
|
@ -23,105 +21,78 @@ github.com/chzyer/test v1.0.0 h1:p3BQDXSxOhOG0P9z6/hGnII4LGiEPOYBhs8asl/fC04=
|
||||||
github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38GC8=
|
github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38GC8=
|
||||||
github.com/cli/browser v1.3.0 h1:LejqCrpWr+1pRqmEPDGnTZOjsMe7sehifLynZJuqJpo=
|
github.com/cli/browser v1.3.0 h1:LejqCrpWr+1pRqmEPDGnTZOjsMe7sehifLynZJuqJpo=
|
||||||
github.com/cli/browser v1.3.0/go.mod h1:HH8s+fOAxjhQoBUAsKuPCbqUuxZDhQ2/aD+SzsEfBTk=
|
github.com/cli/browser v1.3.0/go.mod h1:HH8s+fOAxjhQoBUAsKuPCbqUuxZDhQ2/aD+SzsEfBTk=
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
|
||||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
|
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
|
||||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/delucks/go-subsonic v0.0.0-20220915164742-2744002c4be5 h1:RuuxidatioSKGOiBzL1mTY4X22DQD8weEbS3iRLHnAg=
|
github.com/dgryski/trifles v0.0.0-20230903005119-f50d829f2e54 h1:SG7nF6SRlWhcT7cNTs5R6Hk4V2lcmLz2NsG2VnInyNo=
|
||||||
github.com/delucks/go-subsonic v0.0.0-20220915164742-2744002c4be5/go.mod h1:vnbEuj6Z20PLcHB4rrLQAOXGMjtULfMGhRVSFPcSdUo=
|
github.com/dgryski/trifles v0.0.0-20230903005119-f50d829f2e54/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA=
|
||||||
github.com/delucks/go-subsonic v0.0.0-20240806025900-2a743ec36238 h1:uejyepOdHISrJTw7P84Y7yEC0FMyv1q3KNDRxWsviKw=
|
|
||||||
github.com/delucks/go-subsonic v0.0.0-20240806025900-2a743ec36238/go.mod h1:vnbEuj6Z20PLcHB4rrLQAOXGMjtULfMGhRVSFPcSdUo=
|
|
||||||
github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48 h1:fRzb/w+pyskVMQ+UbP35JkH8yB7MYb4q/qhBarqZE6g=
|
|
||||||
github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA=
|
|
||||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||||
github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM=
|
github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=
|
||||||
github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE=
|
github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU=
|
||||||
github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4=
|
|
||||||
github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI=
|
|
||||||
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
|
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
|
||||||
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
|
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
|
||||||
github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA=
|
github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k=
|
||||||
github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM=
|
github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
|
||||||
github.com/glebarez/go-sqlite v1.22.0 h1:uAcMJhaA6r3LHMTFgP0SifzgXg46yJkgxqyuyec+ruQ=
|
github.com/glebarez/go-sqlite v1.22.0 h1:uAcMJhaA6r3LHMTFgP0SifzgXg46yJkgxqyuyec+ruQ=
|
||||||
github.com/glebarez/go-sqlite v1.22.0/go.mod h1:PlBIdHe0+aUEFn+r2/uthrWq4FxbzugL0L8Li6yQJbc=
|
github.com/glebarez/go-sqlite v1.22.0/go.mod h1:PlBIdHe0+aUEFn+r2/uthrWq4FxbzugL0L8Li6yQJbc=
|
||||||
github.com/glebarez/sqlite v1.11.0 h1:wSG0irqzP6VurnMEpFGer5Li19RpIRi2qvQz++w0GMw=
|
github.com/glebarez/sqlite v1.11.0 h1:wSG0irqzP6VurnMEpFGer5Li19RpIRi2qvQz++w0GMw=
|
||||||
github.com/glebarez/sqlite v1.11.0/go.mod h1:h8/o8j5wiAsqSPoWELDUdJXhjAhsVliSn7bWZjOhrgQ=
|
github.com/glebarez/sqlite v1.11.0/go.mod h1:h8/o8j5wiAsqSPoWELDUdJXhjAhsVliSn7bWZjOhrgQ=
|
||||||
github.com/go-resty/resty/v2 v2.12.0 h1:rsVL8P90LFvkUYq/V5BTVe203WfRIU4gvcf+yfzJzGA=
|
github.com/go-resty/resty/v2 v2.16.5 h1:hBKqmWrr7uRc3euHVqmh1HTHcKn99Smr7o5spptdhTM=
|
||||||
github.com/go-resty/resty/v2 v2.12.0/go.mod h1:o0yGPrkS3lOe1+eFajk6kBW8ScXzwU3hD69/gt2yB/0=
|
github.com/go-resty/resty/v2 v2.16.5/go.mod h1:hkJtXbA2iKHzJheXYvQ8snQES5ZLGKMwQ07xAwp/fiA=
|
||||||
github.com/go-resty/resty/v2 v2.15.0 h1:clPQLZ2x9h4yGY81IzpMPnty+xoGyFaDg0XMkCsHf90=
|
|
||||||
github.com/go-resty/resty/v2 v2.15.0/go.mod h1:0fHAoK7JoBy/Ch36N8VFeMsK7xQOHhvWaC3iOktwmIU=
|
|
||||||
github.com/go-sql-driver/mysql v1.7.0/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI=
|
github.com/go-sql-driver/mysql v1.7.0/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI=
|
||||||
github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y=
|
github.com/go-sql-driver/mysql v1.9.2 h1:4cNKDYQ1I84SXslGddlsrMhc8k4LeDVj6Ad6WRjiHuU=
|
||||||
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
|
github.com/go-sql-driver/mysql v1.9.2/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU=
|
||||||
|
github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss=
|
||||||
|
github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
|
||||||
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA=
|
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA=
|
||||||
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
|
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
|
||||||
github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A=
|
github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A=
|
||||||
github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI=
|
github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI=
|
||||||
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
|
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||||
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||||
github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26 h1:Xim43kblpZXfIBQsbuBVKCudVG457BR2GZFIz3uw3hQ=
|
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
|
||||||
github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26/go.mod h1:dDKJzRmX4S37WGHujM7tX//fmj1uioxKzKxz3lo4HJo=
|
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA=
|
||||||
github.com/google/pprof v0.0.0-20240409012703-83162a5b38cd h1:gbpYu9NMq8jhDVbvlGkMFWCjLFlqqEZjEmObmhUy6Vo=
|
|
||||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
|
|
||||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
|
||||||
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
|
|
||||||
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
|
|
||||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||||
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
||||||
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
|
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
|
||||||
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk=
|
|
||||||
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
|
|
||||||
github.com/jackc/pgservicefile v0.0.0-20231201235250-de7065d80cb9 h1:L0QtFUgDarD7Fpv9jeVMgy/+Ec0mtnmYuImjTz6dtDA=
|
github.com/jackc/pgservicefile v0.0.0-20231201235250-de7065d80cb9 h1:L0QtFUgDarD7Fpv9jeVMgy/+Ec0mtnmYuImjTz6dtDA=
|
||||||
github.com/jackc/pgx/v5 v5.3.0 h1:/NQi8KHMpKWHInxXesC8yD4DhkXPrVhmnwYkjp9AmBA=
|
github.com/jackc/pgservicefile v0.0.0-20231201235250-de7065d80cb9/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
|
||||||
github.com/jackc/pgx/v5 v5.3.0/go.mod h1:t3JDKnCBlYIc0ewLF0Q7B8MXmoIaBOZj/ic7iHozM/8=
|
|
||||||
github.com/jackc/pgx/v5 v5.5.5 h1:amBjrZVmksIdNjxGW/IiIMzxMKZFelXbUoPNb+8sjQw=
|
github.com/jackc/pgx/v5 v5.5.5 h1:amBjrZVmksIdNjxGW/IiIMzxMKZFelXbUoPNb+8sjQw=
|
||||||
|
github.com/jackc/pgx/v5 v5.5.5/go.mod h1:ez9gk+OAat140fv9ErkZDYFWmXLfV+++K0uAOiwgm1A=
|
||||||
|
github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk=
|
||||||
|
github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
|
||||||
github.com/jarcoal/httpmock v1.3.1 h1:iUx3whfZWVf3jT01hQTO/Eo5sAYtB2/rqaUuOtpInww=
|
github.com/jarcoal/httpmock v1.3.1 h1:iUx3whfZWVf3jT01hQTO/Eo5sAYtB2/rqaUuOtpInww=
|
||||||
github.com/jarcoal/httpmock v1.3.1/go.mod h1:3yb8rc4BI7TCBhFY8ng0gjuLKJNquuDNiPaZjnENuYg=
|
github.com/jarcoal/httpmock v1.3.1/go.mod h1:3yb8rc4BI7TCBhFY8ng0gjuLKJNquuDNiPaZjnENuYg=
|
||||||
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
|
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
|
||||||
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
|
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
|
||||||
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
|
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
|
||||||
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
||||||
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
|
|
||||||
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
|
||||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||||
github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
|
|
||||||
github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
|
|
||||||
github.com/manifoldco/promptui v0.9.0 h1:3V4HzJk1TtXW1MTZMP7mdlwbBpIinw3HztaIlYthEiA=
|
github.com/manifoldco/promptui v0.9.0 h1:3V4HzJk1TtXW1MTZMP7mdlwbBpIinw3HztaIlYthEiA=
|
||||||
github.com/manifoldco/promptui v0.9.0/go.mod h1:ka04sppxSGFAtxX0qhlYQjISsg9mR4GWtQEhdbn6Pgg=
|
github.com/manifoldco/promptui v0.9.0/go.mod h1:ka04sppxSGFAtxX0qhlYQjISsg9mR4GWtQEhdbn6Pgg=
|
||||||
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
|
||||||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
|
||||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
|
||||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U=
|
|
||||||
github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
|
||||||
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
||||||
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||||
github.com/mattn/go-sqlite3 v1.14.15 h1:vfoHhTN1af61xCRSWzFIWzx2YskyMTwHLrExkBOjvxI=
|
github.com/mattn/go-sqlite3 v1.14.15 h1:vfoHhTN1af61xCRSWzFIWzx2YskyMTwHLrExkBOjvxI=
|
||||||
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
|
github.com/mattn/go-sqlite3 v1.14.15/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
|
||||||
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
|
||||||
github.com/maxatome/go-testdeep v1.12.0 h1:Ql7Go8Tg0C1D/uMMX59LAoYK7LffeJQ6X2T04nTH68g=
|
github.com/maxatome/go-testdeep v1.12.0 h1:Ql7Go8Tg0C1D/uMMX59LAoYK7LffeJQ6X2T04nTH68g=
|
||||||
github.com/maxatome/go-testdeep v1.12.0/go.mod h1:lPZc/HAcJMP92l7yI6TRz1aZN5URwUBUAfUNvrclaNM=
|
github.com/maxatome/go-testdeep v1.12.0/go.mod h1:lPZc/HAcJMP92l7yI6TRz1aZN5URwUBUAfUNvrclaNM=
|
||||||
github.com/microsoft/go-mssqldb v0.17.0 h1:Fto83dMZPnYv1Zwx5vHHxpNraeEaUlQ/hhHLgZiaenE=
|
github.com/microsoft/go-mssqldb v1.7.2 h1:CHkFJiObW7ItKTJfHo1QX7QBBD1iV+mn1eOyRP3b/PA=
|
||||||
github.com/microsoft/go-mssqldb v0.17.0/go.mod h1:OkoNGhGEs8EZqchVTtochlXruEhEOaO4S0d2sB5aeGQ=
|
github.com/microsoft/go-mssqldb v1.7.2/go.mod h1:kOvZKUdrhhFQmxLZqbwUV0rHkNkZpthMITIb2Ko1IoA=
|
||||||
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
|
|
||||||
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
|
||||||
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
|
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
|
||||||
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||||
github.com/pelletier/go-toml/v2 v2.2.1 h1:9TA9+T8+8CUCO2+WYnDLCgrYi9+omqKXyjDtosvtEhg=
|
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
|
||||||
github.com/pelletier/go-toml/v2 v2.2.1/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
|
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
|
||||||
github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M=
|
|
||||||
github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc=
|
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
|
||||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
|
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
|
||||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
||||||
|
@ -132,193 +103,132 @@ github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUc
|
||||||
github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8=
|
github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8=
|
||||||
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
|
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
|
||||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||||
github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6keLGt6kNQ=
|
github.com/sagikazarmark/locafero v0.9.0 h1:GbgQGNtTrEmddYDSAH9QLRyfAHY12md+8YFTqyMTC9k=
|
||||||
github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4=
|
github.com/sagikazarmark/locafero v0.9.0/go.mod h1:UBUyz37V+EdMS3hDF3QWIiVr/2dPrx49OMO0Bn0hJqk=
|
||||||
github.com/sagikazarmark/locafero v0.6.0 h1:ON7AQg37yzcRPU69mt7gwhFEBwxI6P9T4Qu3N51bwOk=
|
|
||||||
github.com/sagikazarmark/locafero v0.6.0/go.mod h1:77OmuIc6VTraTXKXIs/uvUxKGUXjE1GbemJYHqdNjX0=
|
|
||||||
github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE=
|
|
||||||
github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ=
|
|
||||||
github.com/shkh/lastfm-go v0.0.0-20191215035245-89a801c244e0 h1:cgqwZtnR+IQfUYDLJ3Kiy4aE+O/wExTzEIg8xwC4Qfs=
|
github.com/shkh/lastfm-go v0.0.0-20191215035245-89a801c244e0 h1:cgqwZtnR+IQfUYDLJ3Kiy4aE+O/wExTzEIg8xwC4Qfs=
|
||||||
github.com/shkh/lastfm-go v0.0.0-20191215035245-89a801c244e0/go.mod h1:n3nudMl178cEvD44PaopxH9jhJaQzthSxUzLO5iKMy4=
|
github.com/shkh/lastfm-go v0.0.0-20191215035245-89a801c244e0/go.mod h1:n3nudMl178cEvD44PaopxH9jhJaQzthSxUzLO5iKMy4=
|
||||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
|
|
||||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
|
||||||
github.com/smartystreets/goconvey v1.6.7 h1:I6tZjLXD2Q1kjvNbIzB1wvQBsXmKXiVrhpRE8ZjP5jY=
|
|
||||||
github.com/smartystreets/goconvey v1.6.7/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
|
|
||||||
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
|
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
|
||||||
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
|
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
|
||||||
github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8=
|
github.com/spf13/afero v1.14.0 h1:9tH6MapGnn/j0eb0yIXiLjERO8RB6xIVZRDCX7PtqWA=
|
||||||
github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY=
|
github.com/spf13/afero v1.14.0/go.mod h1:acJQ8t0ohCGuMN3O+Pv0V0hgMxNYDlvdk+VTfyZmbYo=
|
||||||
github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0=
|
github.com/spf13/cast v1.8.0 h1:gEN9K4b8Xws4EX0+a0reLmhq8moKn7ntRlQYgjPeCDk=
|
||||||
github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
|
github.com/spf13/cast v1.8.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
|
||||||
github.com/spf13/cast v1.7.0 h1:ntdiHjuueXFgm5nzDRdOS4yfT43P5Fnud6DH50rz/7w=
|
github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo=
|
||||||
github.com/spf13/cast v1.7.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
|
github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0=
|
||||||
github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0=
|
github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o=
|
||||||
github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho=
|
github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||||
github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM=
|
github.com/spf13/viper v1.20.1 h1:ZMi+z/lvLyPSCoNtFCpqjy0S4kPbirhpTMwl8BkW9X4=
|
||||||
github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y=
|
github.com/spf13/viper v1.20.1/go.mod h1:P9Mdzt1zoHIG8m2eZQinpiBjo6kCmZSKBClNNqjJvu4=
|
||||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||||
github.com/spf13/viper v1.18.2 h1:LUXCnvUvSM6FXAsj6nnfc8Q2tp1dIgUfY9Kc8GsSOiQ=
|
|
||||||
github.com/spf13/viper v1.18.2/go.mod h1:EKmWIqdnk5lOcmR72yw6hS+8OPYcwD0jteitLMVB+yk=
|
|
||||||
github.com/spf13/viper v1.19.0 h1:RWq5SEjt8o25SROyN3z2OrDB9l7RPd3lwTWU8EcEdcI=
|
|
||||||
github.com/spf13/viper v1.19.0/go.mod h1:GQUN9bilAbhU/jgc1bKs99f/suXKeUMct8Adx5+Ntkg=
|
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
|
||||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
|
||||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
|
||||||
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
|
|
||||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
|
||||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
|
||||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
|
||||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
|
||||||
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
|
|
||||||
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
|
||||||
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
|
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
|
||||||
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
|
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
|
||||||
github.com/vbauerster/mpb/v8 v8.7.3 h1:n/mKPBav4FFWp5fH4U0lPpXfiOmCEgl5Yx/NM3tKJA0=
|
github.com/supersonic-app/go-subsonic v0.0.0-20241224013245-9b2841f3711d h1:70+Nn7yh+cfeKqqXVTdpneFqXuvrBLyP7U6GVUsjTU4=
|
||||||
github.com/vbauerster/mpb/v8 v8.7.3/go.mod h1:9nFlNpDGVoTmQ4QvNjSLtwLmAFjwmq0XaAF26toHGNM=
|
github.com/supersonic-app/go-subsonic v0.0.0-20241224013245-9b2841f3711d/go.mod h1:D+OWPXeD9owcdcoXATv5YPBGWxxVvn5k98rt5B4wMc4=
|
||||||
github.com/vbauerster/mpb/v8 v8.8.3 h1:dTOByGoqwaTJYPubhVz3lO5O6MK553XVgUo33LdnNsQ=
|
github.com/vbauerster/mpb/v8 v8.10.0 h1:5ZYEWM4ovaZGAibjzW4PlQNb5k+JpzMqVwgNyk+K0M8=
|
||||||
github.com/vbauerster/mpb/v8 v8.8.3/go.mod h1:JfCCrtcMsJwP6ZwMn9e5LMnNyp3TVNpUWWkN+nd4EWk=
|
github.com/vbauerster/mpb/v8 v8.10.0/go.mod h1:DYPFebxSahB+f7tuEUGauLQ7w8ij3wMr4clsVuJCV4I=
|
||||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
|
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
|
||||||
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
|
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
|
||||||
|
go.uploadedlobster.com/mbtypes v0.4.0 h1:D5asCgHsRWufj4Yn5u0IuH2J9z1UuYImYkYIp1Z1Q7s=
|
||||||
|
go.uploadedlobster.com/mbtypes v0.4.0/go.mod h1:Bu1K1Hl77QTAE2Z7QKiW/JAp9KqYWQebkRRfG02dlZM=
|
||||||
|
go.uploadedlobster.com/musicbrainzws2 v0.14.0 h1:YaEtxNwLSNT1gzFipQ4XlaThNfXjBpzzb4I6WhIeUwg=
|
||||||
|
go.uploadedlobster.com/musicbrainzws2 v0.14.0/go.mod h1:T6sYE7ZHRH3mJWT3g9jdSUPKJLZubnBjKyjMPNdkgao=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8=
|
||||||
golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs=
|
golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw=
|
||||||
golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30=
|
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6 h1:y5zboxd6LQAqYIhHnB48p0ByQ/GnQx2BE33L8BOHQkI=
|
||||||
golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M=
|
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6/go.mod h1:U6Lno4MTRCDY+Ba7aCcauB9T60gsv5s4ralQzP72ZoQ=
|
||||||
golang.org/x/crypto v0.27.0 h1:GXm2NjJrPaiv/h1tb2UH8QfgC/hOf/+z0p6PT8o1w7A=
|
golang.org/x/image v0.13.0/go.mod h1:6mmbMOeV28HuMTgA6OSRkdXKYw/t5W9Uwn2Yv1r3Yxk=
|
||||||
golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 h1:ESSUROHIBHg7USnszlcdmjBEwdMj9VUvU+OPk4yl2mc=
|
golang.org/x/image v0.27.0 h1:C8gA4oWU/tKkdCfYT6T2u4faJu3MeNS5O8UPWlPF61w=
|
||||||
golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8/go.mod h1:/lliqkxwWAhPjf5oSOIJup2XcqJaw8RGS6k3TGEc7GI=
|
golang.org/x/image v0.27.0/go.mod h1:xbdrClrAUway1MUTEZDq9mz/UpRwYAkFFNUslZtcB+g=
|
||||||
golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWBtpfqks2bwGcexMxgtk=
|
|
||||||
golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY=
|
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA=
|
golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU=
|
||||||
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww=
|
||||||
golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0=
|
|
||||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
|
||||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY=
|
||||||
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds=
|
||||||
golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
|
golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI=
|
||||||
golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w=
|
golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU=
|
||||||
golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8=
|
|
||||||
golang.org/x/net v0.29.0 h1:5ORfpBpCs4HzDYoodCDBbwHzdR5UrLBZ3sOnUJmFoHo=
|
|
||||||
golang.org/x/net v0.29.0/go.mod h1:gLkgy8jTGERgjzMic6DS9+SP0ajcu6Xu3Orq/SpETg0=
|
|
||||||
golang.org/x/oauth2 v0.19.0 h1:9+E/EZBCbTLNrbN35fHv/a/d/mOBatymz1zbtQrXpIg=
|
|
||||||
golang.org/x/oauth2 v0.19.0/go.mod h1:vYi7skDa1x015PmRRYZ7+s1cWyPgrPiSYRe4rnsexc8=
|
|
||||||
golang.org/x/oauth2 v0.23.0 h1:PbgcYx2W7i4LvjJWEbf0ngHV6qJYr86PkAV3bXdLEbs=
|
|
||||||
golang.org/x/oauth2 v0.23.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
|
||||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
|
golang.org/x/sync v0.14.0 h1:woo0S4Yywslg6hp4eUFjTVOyKt0RookbpAHG4c1HmhQ=
|
||||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
golang.org/x/sync v0.14.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||||
golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ=
|
|
||||||
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20211023085530-d6a326fbbf70/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
|
||||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||||
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
|
||||||
golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o=
|
|
||||||
golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
|
||||||
golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34=
|
|
||||||
golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||||
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
|
||||||
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
|
||||||
golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58=
|
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||||
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
|
golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4=
|
||||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA=
|
||||||
golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224=
|
|
||||||
golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
|
|
||||||
golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk=
|
|
||||||
golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
|
|
||||||
golang.org/x/time v0.6.0 h1:eTDhh4ZXt5Qf0augr54TN6suAUudPcawVZeIAPU7D4U=
|
golang.org/x/time v0.6.0 h1:eTDhh4ZXt5Qf0augr54TN6suAUudPcawVZeIAPU7D4U=
|
||||||
|
golang.org/x/time v0.6.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
|
||||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
|
||||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||||
golang.org/x/tools v0.20.0 h1:hz/CVckiOxybQvFw6h7b/q80NTr9IUQb4s1IIzW7KNY=
|
golang.org/x/tools v0.33.0 h1:4qz2S3zmRxbGIhDIAgjxvFutSvH5EfnsYrRBj0UI0bc=
|
||||||
golang.org/x/tools v0.20.0/go.mod h1:WvitBU7JJf6A4jOdg4S1tviW9bhUxkgeCui/0JHctQg=
|
golang.org/x/tools v0.33.0/go.mod h1:CIJMaWEY88juyUfo7UbgPqbC8rU2OqfAV1h2Qp0oMYI=
|
||||||
golang.org/x/tools v0.25.0 h1:oFU9pkj/iJgs+0DT+VMHrx+oBKs/LJMV+Uvg78sl+fE=
|
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
|
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
|
||||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
|
|
||||||
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
|
||||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
gorm.io/datatypes v1.2.0 h1:5YT+eokWdIxhJgWHdrb2zYUimyk0+TaFth+7a0ybzco=
|
gorm.io/datatypes v1.2.5 h1:9UogU3jkydFVW1bIVVeoYsTpLRgwDVW3rHfJG6/Ek9I=
|
||||||
gorm.io/datatypes v1.2.0/go.mod h1:o1dh0ZvjIjhH/bngTpypG6lVRJ5chTBxE09FH/71k04=
|
gorm.io/datatypes v1.2.5/go.mod h1:I5FUdlKpLb5PMqeMQhm30CQ6jXP8Rj89xkTeCSAaAD4=
|
||||||
gorm.io/datatypes v1.2.2 h1:sdn7ZmG4l7JWtMDUb3L98f2Ym7CO5F8mZLlrQJMfF9g=
|
|
||||||
gorm.io/datatypes v1.2.2/go.mod h1:f4BsLcFAX67szSv8svwLRjklArSHAvHLeE3pXAS5DZI=
|
|
||||||
gorm.io/driver/mysql v1.5.6 h1:Ld4mkIickM+EliaQZQx3uOJDJHtrd70MxAUqWqlx3Y8=
|
|
||||||
gorm.io/driver/mysql v1.5.6/go.mod h1:sEtPWMiqiN1N1cMXoXmBbd8C6/l+TESwriotuRRpkDM=
|
|
||||||
gorm.io/driver/mysql v1.5.7 h1:MndhOPYOfEp2rHKgkZIhJ16eVUIRf2HmzgoPmh7FCWo=
|
gorm.io/driver/mysql v1.5.7 h1:MndhOPYOfEp2rHKgkZIhJ16eVUIRf2HmzgoPmh7FCWo=
|
||||||
gorm.io/driver/mysql v1.5.7/go.mod h1:sEtPWMiqiN1N1cMXoXmBbd8C6/l+TESwriotuRRpkDM=
|
gorm.io/driver/mysql v1.5.7/go.mod h1:sEtPWMiqiN1N1cMXoXmBbd8C6/l+TESwriotuRRpkDM=
|
||||||
gorm.io/driver/postgres v1.5.0 h1:u2FXTy14l45qc3UeCJ7QaAXZmZfDDv0YrthvmRq1l0U=
|
gorm.io/driver/postgres v1.5.0 h1:u2FXTy14l45qc3UeCJ7QaAXZmZfDDv0YrthvmRq1l0U=
|
||||||
gorm.io/driver/postgres v1.5.0/go.mod h1:FUZXzO+5Uqg5zzwzv4KK49R8lvGIyscBOqYrtI1Ce9A=
|
gorm.io/driver/postgres v1.5.0/go.mod h1:FUZXzO+5Uqg5zzwzv4KK49R8lvGIyscBOqYrtI1Ce9A=
|
||||||
gorm.io/driver/sqlite v1.4.3 h1:HBBcZSDnWi5BW3B3rwvVTc510KGkBkexlOg0QrmLUuU=
|
gorm.io/driver/sqlite v1.4.3 h1:HBBcZSDnWi5BW3B3rwvVTc510KGkBkexlOg0QrmLUuU=
|
||||||
gorm.io/driver/sqlite v1.4.3/go.mod h1:0Aq3iPO+v9ZKbcdiz8gLWRw5VOPcBOPUQJFLq5e2ecI=
|
gorm.io/driver/sqlite v1.4.3/go.mod h1:0Aq3iPO+v9ZKbcdiz8gLWRw5VOPcBOPUQJFLq5e2ecI=
|
||||||
gorm.io/driver/sqlserver v1.4.1 h1:t4r4r6Jam5E6ejqP7N82qAJIJAht27EGT41HyPfXRw0=
|
gorm.io/driver/sqlserver v1.5.4 h1:xA+Y1KDNspv79q43bPyjDMUgHoYHLhXYmdFcYPobg8g=
|
||||||
gorm.io/driver/sqlserver v1.4.1/go.mod h1:DJ4P+MeZbc5rvY58PnmN1Lnyvb5gw5NPzGshHDnJLig=
|
gorm.io/driver/sqlserver v1.5.4/go.mod h1:+frZ/qYmuna11zHPlh5oc2O6ZA/lS88Keb0XSH1Zh/g=
|
||||||
gorm.io/gorm v1.25.7/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=
|
gorm.io/gorm v1.25.7/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=
|
||||||
gorm.io/gorm v1.25.9 h1:wct0gxZIELDk8+ZqF/MVnHLkA1rvYlBWUMv2EdsK1g8=
|
gorm.io/gorm v1.26.1 h1:ghB2gUI9FkS46luZtn6DLZ0f6ooBJ5IbVej2ENFDjRw=
|
||||||
gorm.io/gorm v1.25.9/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=
|
gorm.io/gorm v1.26.1/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE=
|
||||||
gorm.io/gorm v1.25.12 h1:I0u8i2hWQItBq1WfE0o2+WuL9+8L21K9e2HHSTE/0f8=
|
modernc.org/cc/v4 v4.26.1 h1:+X5NtzVBn0KgsBCBe+xkDC7twLb/jNVj9FPgiwSQO3s=
|
||||||
gorm.io/gorm v1.25.12/go.mod h1:xh7N7RHfYlNc5EmcI/El95gXusucDrQnHXe0+CgWcLQ=
|
modernc.org/cc/v4 v4.26.1/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0=
|
||||||
modernc.org/cc/v4 v4.20.0 h1:45Or8mQfbUqJOG9WaxvlFYOAQO0lQ5RvqBcFCXngjxk=
|
modernc.org/ccgo/v4 v4.27.1 h1:emhLB4uoOmkZUnTDFcMI3AbkmU/Evjuerit9Taqe6Ss=
|
||||||
modernc.org/cc/v4 v4.20.0/go.mod h1:HM7VJTZbUCR3rV8EYBi9wxnJ0ZBRiGE5OeGXNA0IsLQ=
|
modernc.org/ccgo/v4 v4.27.1/go.mod h1:543Q0qQhJWekKVS5P6yL5fO6liNhla9Lbm2/B3rEKDE=
|
||||||
modernc.org/cc/v4 v4.21.4 h1:3Be/Rdo1fpr8GrQ7IVw9OHtplU4gWbb+wNgeoBMmGLQ=
|
modernc.org/fileutil v1.3.1 h1:8vq5fe7jdtEvoCf3Zf9Nm0Q05sH6kGx0Op2CPx1wTC8=
|
||||||
modernc.org/ccgo/v4 v4.16.0 h1:ofwORa6vx2FMm0916/CkZjpFPSR70VwTjUCe2Eg5BnA=
|
modernc.org/fileutil v1.3.1/go.mod h1:HxmghZSZVAz/LXcMNwZPA/DRrQZEVP9VX0V4LQGQFOc=
|
||||||
modernc.org/ccgo/v4 v4.16.0/go.mod h1:dkNyWIjFrVIZ68DTo36vHK+6/ShBn4ysU61So6PIqCI=
|
modernc.org/gc/v2 v2.6.5 h1:nyqdV8q46KvTpZlsw66kWqwXRHdjIlJOhG6kxiV/9xI=
|
||||||
modernc.org/ccgo/v4 v4.21.0 h1:kKPI3dF7RIag8YcToh5ZwDcVMIv6VGa0ED5cvh0LMW4=
|
modernc.org/gc/v2 v2.6.5/go.mod h1:YgIahr1ypgfe7chRuJi2gD7DBQiKSLMPgBQe9oIiito=
|
||||||
modernc.org/fileutil v1.3.0 h1:gQ5SIzK3H9kdfai/5x41oQiKValumqNTDXMvKo62HvE=
|
modernc.org/libc v1.65.2 h1:drWL1QO9fKXr3kXDN8y+4lKyBr8bA3mtUBQpftq3IJw=
|
||||||
modernc.org/fileutil v1.3.0/go.mod h1:XatxS8fZi3pS8/hKG2GH/ArUogfxjpEKs3Ku3aK4JyQ=
|
modernc.org/libc v1.65.2/go.mod h1:VI3V2S5mNka4deJErQ0jsMXe7jgxojE2fOB/mWoHlbc=
|
||||||
modernc.org/gc/v2 v2.4.1 h1:9cNzOqPyMJBvrUipmynX0ZohMhcxPtMccYgGOJdOiBw=
|
modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=
|
||||||
modernc.org/gc/v2 v2.4.1/go.mod h1:wzN5dK1AzVGoH6XOzc3YZ+ey/jPgYHLuVckd62P0GYU=
|
modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg=
|
||||||
modernc.org/gc/v2 v2.5.0 h1:bJ9ChznK1L1mUtAQtxi0wi5AtAs5jQuw4PrPHO5pb6M=
|
modernc.org/memory v1.10.0 h1:fzumd51yQ1DxcOxSO+S6X7+QTuVU+n8/Aj7swYjFfC4=
|
||||||
modernc.org/libc v1.49.3 h1:j2MRCRdwJI2ls/sGbeSk0t2bypOG/uvPZUsGQFDulqg=
|
modernc.org/memory v1.10.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw=
|
||||||
modernc.org/libc v1.49.3/go.mod h1:yMZuGkn7pXbKfoT/M35gFJOAEdSKdxL0q64sF7KqCDo=
|
modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8=
|
||||||
modernc.org/libc v1.60.1 h1:at373l8IFRTkJIkAU85BIuUoBM4T1b51ds0E1ovPG2s=
|
modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=
|
||||||
modernc.org/libc v1.60.1/go.mod h1:xJuobKuNxKH3RUatS7GjR+suWj+5c2K7bi4m/S5arOY=
|
modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w=
|
||||||
modernc.org/mathutil v1.6.0 h1:fRe9+AmYlaej+64JsEEhoWuAYBkOtQiMEU7n/XgfYi4=
|
modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE=
|
||||||
modernc.org/mathutil v1.6.0/go.mod h1:Ui5Q9q1TR2gFm0AQRqQUaBWFLAhQpCwNcuhBOSedWPo=
|
modernc.org/sqlite v1.37.0 h1:s1TMe7T3Q3ovQiK2Ouz4Jwh7dw4ZDqbebSDTlSJdfjI=
|
||||||
modernc.org/memory v1.8.0 h1:IqGTL6eFMaDZZhEWwcREgeMXYwmW83LYW8cROZYkg+E=
|
modernc.org/sqlite v1.37.0/go.mod h1:5YiWv+YviqGMuGw4V+PNplcyaJ5v+vQd7TQOgkACoJM=
|
||||||
modernc.org/memory v1.8.0/go.mod h1:XPZ936zp5OMKGWPqbD3JShgd/ZoQ7899TUuQqxY+peU=
|
modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=
|
||||||
modernc.org/opt v0.1.3 h1:3XOZf2yznlhC+ibLltsDGzABUGVx8J6pnFMS3E4dcq4=
|
modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A=
|
||||||
modernc.org/opt v0.1.3/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0=
|
|
||||||
modernc.org/sortutil v1.2.0 h1:jQiD3PfS2REGJNzNCMMaLSp/wdMNieTbKX920Cqdgqc=
|
|
||||||
modernc.org/sortutil v1.2.0/go.mod h1:TKU2s7kJMf1AE84OoiGppNHJwvB753OYfNl2WRb++Ss=
|
|
||||||
modernc.org/sqlite v1.29.6 h1:0lOXGrycJPptfHDuohfYgNqoe4hu+gYuN/pKgY5XjS4=
|
|
||||||
modernc.org/sqlite v1.29.6/go.mod h1:S02dvcmm7TnTRvGhv8IGYyLnIt7AS2KPaB1F/71p75U=
|
|
||||||
modernc.org/sqlite v1.33.1 h1:trb6Z3YYoeM9eDL1O8do81kP+0ejv+YzgyFo+Gwy0nM=
|
|
||||||
modernc.org/sqlite v1.33.1/go.mod h1:pXV2xHxhzXZsgT/RtTFAPY6JJDEvOTcTdwADQCCWD4k=
|
|
||||||
modernc.org/strutil v1.2.0 h1:agBi9dp1I+eOnxXeiZawM8F4LawKv4NzGWSaLfyeNZA=
|
|
||||||
modernc.org/strutil v1.2.0/go.mod h1:/mdcBmfOibveCTBxUl5B5l6W+TTH1FXPLHZE6bTosX0=
|
|
||||||
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
|
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
|
||||||
modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=
|
modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=
|
||||||
|
|
|
@ -27,7 +27,7 @@ type OAuth2Authenticator interface {
|
||||||
models.Backend
|
models.Backend
|
||||||
|
|
||||||
// Returns OAuth2 config suitable for this backend
|
// Returns OAuth2 config suitable for this backend
|
||||||
OAuth2Strategy(redirectUrl *url.URL) OAuth2Strategy
|
OAuth2Strategy(redirectURL *url.URL) OAuth2Strategy
|
||||||
|
|
||||||
// Setup the OAuth2 client
|
// Setup the OAuth2 client
|
||||||
OAuth2Setup(token oauth2.TokenSource) error
|
OAuth2Setup(token oauth2.TokenSource) error
|
||||||
|
|
|
@ -24,14 +24,14 @@ import (
|
||||||
type OAuth2Strategy interface {
|
type OAuth2Strategy interface {
|
||||||
Config() oauth2.Config
|
Config() oauth2.Config
|
||||||
|
|
||||||
AuthCodeURL(verifier string, state string) AuthUrl
|
AuthCodeURL(verifier string, state string) AuthURL
|
||||||
|
|
||||||
ExchangeToken(code CodeResponse, verifier string) (*oauth2.Token, error)
|
ExchangeToken(code CodeResponse, verifier string) (*oauth2.Token, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type AuthUrl struct {
|
type AuthURL struct {
|
||||||
// The URL the user must visit to approve access
|
// The URL the user must visit to approve access
|
||||||
Url string
|
URL string
|
||||||
// Random state string passed on to the callback.
|
// Random state string passed on to the callback.
|
||||||
// Leave empty if the service does not support state.
|
// Leave empty if the service does not support state.
|
||||||
State string
|
State string
|
||||||
|
@ -56,10 +56,10 @@ func (s StandardStrategy) Config() oauth2.Config {
|
||||||
return s.conf
|
return s.conf
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s StandardStrategy) AuthCodeURL(verifier string, state string) AuthUrl {
|
func (s StandardStrategy) AuthCodeURL(verifier string, state string) AuthURL {
|
||||||
url := s.conf.AuthCodeURL(state, oauth2.AccessTypeOffline, oauth2.S256ChallengeOption(verifier))
|
url := s.conf.AuthCodeURL(state, oauth2.AccessTypeOffline, oauth2.S256ChallengeOption(verifier))
|
||||||
return AuthUrl{
|
return AuthURL{
|
||||||
Url: url,
|
URL: url,
|
||||||
State: state,
|
State: state,
|
||||||
Param: "code",
|
Param: "code",
|
||||||
}
|
}
|
||||||
|
|
|
@ -123,7 +123,11 @@ func backendWithConfig(config config.ServiceConfig) (models.Backend, error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return backend.FromConfig(&config), nil
|
err = backend.InitConfig(&config)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return backend, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func ImplementsInterface[T interface{}](backend *models.Backend) (bool, string) {
|
func ImplementsInterface[T interface{}](backend *models.Backend) (bool, string) {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
This file is part of Scotty.
|
This file is part of Scotty.
|
||||||
|
|
||||||
|
@ -18,7 +18,6 @@ Scotty. If not, see <https://www.gnu.org/licenses/>.
|
||||||
package backends_test
|
package backends_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"reflect"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/spf13/viper"
|
"github.com/spf13/viper"
|
||||||
|
@ -33,6 +32,7 @@ import (
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/maloja"
|
"go.uploadedlobster.com/scotty/internal/backends/maloja"
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/scrobblerlog"
|
"go.uploadedlobster.com/scotty/internal/backends/scrobblerlog"
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/spotify"
|
"go.uploadedlobster.com/scotty/internal/backends/spotify"
|
||||||
|
"go.uploadedlobster.com/scotty/internal/backends/spotifyhistory"
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/subsonic"
|
"go.uploadedlobster.com/scotty/internal/backends/subsonic"
|
||||||
"go.uploadedlobster.com/scotty/internal/config"
|
"go.uploadedlobster.com/scotty/internal/config"
|
||||||
"go.uploadedlobster.com/scotty/internal/i18n"
|
"go.uploadedlobster.com/scotty/internal/i18n"
|
||||||
|
@ -93,9 +93,9 @@ func TestImplementsInterfaces(t *testing.T) {
|
||||||
expectInterface[models.LovesExport](t, &funkwhale.FunkwhaleApiBackend{})
|
expectInterface[models.LovesExport](t, &funkwhale.FunkwhaleApiBackend{})
|
||||||
// expectInterface[models.LovesImport](t, &funkwhale.FunkwhaleApiBackend{})
|
// expectInterface[models.LovesImport](t, &funkwhale.FunkwhaleApiBackend{})
|
||||||
|
|
||||||
// expectInterface[models.ListensExport](t, &jspf.JSPFBackend{})
|
expectInterface[models.ListensExport](t, &jspf.JSPFBackend{})
|
||||||
expectInterface[models.ListensImport](t, &jspf.JSPFBackend{})
|
expectInterface[models.ListensImport](t, &jspf.JSPFBackend{})
|
||||||
// expectInterface[models.LovesExport](t, &jspf.JSPFBackend{})
|
expectInterface[models.LovesExport](t, &jspf.JSPFBackend{})
|
||||||
expectInterface[models.LovesImport](t, &jspf.JSPFBackend{})
|
expectInterface[models.LovesImport](t, &jspf.JSPFBackend{})
|
||||||
|
|
||||||
// expectInterface[models.ListensExport](t, &lastfm.LastfmApiBackend{})
|
// expectInterface[models.ListensExport](t, &lastfm.LastfmApiBackend{})
|
||||||
|
@ -115,6 +115,8 @@ func TestImplementsInterfaces(t *testing.T) {
|
||||||
expectInterface[models.LovesExport](t, &spotify.SpotifyApiBackend{})
|
expectInterface[models.LovesExport](t, &spotify.SpotifyApiBackend{})
|
||||||
// expectInterface[models.LovesImport](t, &spotify.SpotifyApiBackend{})
|
// expectInterface[models.LovesImport](t, &spotify.SpotifyApiBackend{})
|
||||||
|
|
||||||
|
expectInterface[models.ListensExport](t, &spotifyhistory.SpotifyHistoryBackend{})
|
||||||
|
|
||||||
expectInterface[models.ListensExport](t, &scrobblerlog.ScrobblerLogBackend{})
|
expectInterface[models.ListensExport](t, &scrobblerlog.ScrobblerLogBackend{})
|
||||||
expectInterface[models.ListensImport](t, &scrobblerlog.ScrobblerLogBackend{})
|
expectInterface[models.ListensImport](t, &scrobblerlog.ScrobblerLogBackend{})
|
||||||
|
|
||||||
|
@ -125,6 +127,6 @@ func TestImplementsInterfaces(t *testing.T) {
|
||||||
func expectInterface[T interface{}](t *testing.T, backend models.Backend) {
|
func expectInterface[T interface{}](t *testing.T, backend models.Backend) {
|
||||||
ok, name := backends.ImplementsInterface[T](&backend)
|
ok, name := backends.ImplementsInterface[T](&backend)
|
||||||
if !ok {
|
if !ok {
|
||||||
t.Errorf("%v expected to implement %v", reflect.TypeOf(backend).Name(), name)
|
t.Errorf("%v expected to implement %v", backend.Name(), name)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,10 +33,10 @@ func (s deezerStrategy) Config() oauth2.Config {
|
||||||
return s.conf
|
return s.conf
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s deezerStrategy) AuthCodeURL(verifier string, state string) auth.AuthUrl {
|
func (s deezerStrategy) AuthCodeURL(verifier string, state string) auth.AuthURL {
|
||||||
url := s.conf.AuthCodeURL(state, oauth2.AccessTypeOffline, oauth2.S256ChallengeOption(verifier))
|
url := s.conf.AuthCodeURL(state, oauth2.AccessTypeOffline, oauth2.S256ChallengeOption(verifier))
|
||||||
return auth.AuthUrl{
|
return auth.AuthURL{
|
||||||
Url: url,
|
URL: url,
|
||||||
State: state,
|
State: state,
|
||||||
Param: "code",
|
Param: "code",
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,7 +36,7 @@ const MaxItemsPerGet = 1000
|
||||||
const DefaultRateLimitWaitSeconds = 5
|
const DefaultRateLimitWaitSeconds = 5
|
||||||
|
|
||||||
type Client struct {
|
type Client struct {
|
||||||
HttpClient *resty.Client
|
HTTPClient *resty.Client
|
||||||
token oauth2.TokenSource
|
token oauth2.TokenSource
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -47,7 +47,7 @@ func NewClient(token oauth2.TokenSource) Client {
|
||||||
client.SetHeader("User-Agent", version.UserAgent())
|
client.SetHeader("User-Agent", version.UserAgent())
|
||||||
client.SetRetryCount(5)
|
client.SetRetryCount(5)
|
||||||
return Client{
|
return Client{
|
||||||
HttpClient: client,
|
HTTPClient: client,
|
||||||
token: token,
|
token: token,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -73,7 +73,7 @@ func (c Client) setToken(req *resty.Request) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func listRequest[T Result](c Client, path string, offset int, limit int) (result T, err error) {
|
func listRequest[T Result](c Client, path string, offset int, limit int) (result T, err error) {
|
||||||
request := c.HttpClient.R().
|
request := c.HTTPClient.R().
|
||||||
SetQueryParams(map[string]string{
|
SetQueryParams(map[string]string{
|
||||||
"index": strconv.Itoa(offset),
|
"index": strconv.Itoa(offset),
|
||||||
"limit": strconv.Itoa(limit),
|
"limit": strconv.Itoa(limit),
|
||||||
|
|
|
@ -44,7 +44,7 @@ func TestGetUserHistory(t *testing.T) {
|
||||||
|
|
||||||
token := oauth2.StaticTokenSource(&oauth2.Token{})
|
token := oauth2.StaticTokenSource(&oauth2.Token{})
|
||||||
client := deezer.NewClient(token)
|
client := deezer.NewClient(token)
|
||||||
setupHttpMock(t, client.HttpClient.GetClient(),
|
setupHTTPMock(t, client.HTTPClient.GetClient(),
|
||||||
"https://api.deezer.com/user/me/history",
|
"https://api.deezer.com/user/me/history",
|
||||||
"testdata/user-history.json")
|
"testdata/user-history.json")
|
||||||
|
|
||||||
|
@ -65,7 +65,7 @@ func TestGetUserTracks(t *testing.T) {
|
||||||
|
|
||||||
token := oauth2.StaticTokenSource(&oauth2.Token{})
|
token := oauth2.StaticTokenSource(&oauth2.Token{})
|
||||||
client := deezer.NewClient(token)
|
client := deezer.NewClient(token)
|
||||||
setupHttpMock(t, client.HttpClient.GetClient(),
|
setupHTTPMock(t, client.HTTPClient.GetClient(),
|
||||||
"https://api.deezer.com/user/me/tracks",
|
"https://api.deezer.com/user/me/tracks",
|
||||||
"testdata/user-tracks.json")
|
"testdata/user-tracks.json")
|
||||||
|
|
||||||
|
@ -81,7 +81,7 @@ func TestGetUserTracks(t *testing.T) {
|
||||||
assert.Equal("Outland", track1.Track.Album.Title)
|
assert.Equal("Outland", track1.Track.Album.Title)
|
||||||
}
|
}
|
||||||
|
|
||||||
func setupHttpMock(t *testing.T, client *http.Client, url string, testDataPath string) {
|
func setupHTTPMock(t *testing.T, client *http.Client, url string, testDataPath string) {
|
||||||
httpmock.ActivateNonDefault(client)
|
httpmock.ActivateNonDefault(client)
|
||||||
|
|
||||||
responder, err := httpmock.NewJsonResponder(200, httpmock.File(testDataPath))
|
responder, err := httpmock.NewJsonResponder(200, httpmock.File(testDataPath))
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
Scotty is free software: you can redistribute it and/or modify it under the
|
Scotty is free software: you can redistribute it and/or modify it under the
|
||||||
terms of the GNU General Public License as published by the Free Software
|
terms of the GNU General Public License as published by the Free Software
|
||||||
|
@ -31,7 +31,7 @@ import (
|
||||||
|
|
||||||
type DeezerApiBackend struct {
|
type DeezerApiBackend struct {
|
||||||
client Client
|
client Client
|
||||||
clientId string
|
clientID string
|
||||||
clientSecret string
|
clientSecret string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -49,20 +49,20 @@ func (b *DeezerApiBackend) Options() []models.BackendOption {
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *DeezerApiBackend) FromConfig(config *config.ServiceConfig) models.Backend {
|
func (b *DeezerApiBackend) InitConfig(config *config.ServiceConfig) error {
|
||||||
b.clientId = config.GetString("client-id")
|
b.clientID = config.GetString("client-id")
|
||||||
b.clientSecret = config.GetString("client-secret")
|
b.clientSecret = config.GetString("client-secret")
|
||||||
return b
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *DeezerApiBackend) OAuth2Strategy(redirectUrl *url.URL) auth.OAuth2Strategy {
|
func (b *DeezerApiBackend) OAuth2Strategy(redirectURL *url.URL) auth.OAuth2Strategy {
|
||||||
conf := oauth2.Config{
|
conf := oauth2.Config{
|
||||||
ClientID: b.clientId,
|
ClientID: b.clientID,
|
||||||
ClientSecret: b.clientSecret,
|
ClientSecret: b.clientSecret,
|
||||||
Scopes: []string{
|
Scopes: []string{
|
||||||
"offline_access,basic_access,listening_history",
|
"offline_access,basic_access,listening_history",
|
||||||
},
|
},
|
||||||
RedirectURL: redirectUrl.String(),
|
RedirectURL: redirectURL.String(),
|
||||||
Endpoint: oauth2.Endpoint{
|
Endpoint: oauth2.Endpoint{
|
||||||
AuthURL: "https://connect.deezer.com/oauth/auth.php",
|
AuthURL: "https://connect.deezer.com/oauth/auth.php",
|
||||||
TokenURL: "https://connect.deezer.com/oauth/access_token.php",
|
TokenURL: "https://connect.deezer.com/oauth/access_token.php",
|
||||||
|
@ -77,7 +77,7 @@ func (b *DeezerApiBackend) OAuth2Setup(token oauth2.TokenSource) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *DeezerApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
func (b *DeezerApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
||||||
// Choose a high offset, we attempt to search the loves backwards starting
|
// Choose a high offset, we attempt to search the loves backwards starting
|
||||||
// at the oldest one.
|
// at the oldest one.
|
||||||
offset := math.MaxInt32
|
offset := math.MaxInt32
|
||||||
|
@ -88,15 +88,18 @@ func (b *DeezerApiBackend) ExportListens(oldestTimestamp time.Time, results chan
|
||||||
|
|
||||||
totalDuration := startTime.Sub(oldestTimestamp)
|
totalDuration := startTime.Sub(oldestTimestamp)
|
||||||
|
|
||||||
defer close(results)
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{
|
||||||
p := models.Progress{Total: int64(totalDuration.Seconds())}
|
Total: int64(totalDuration.Seconds()),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
out:
|
out:
|
||||||
for {
|
for {
|
||||||
result, err := b.client.UserHistory(offset, perPage)
|
result, err := b.client.UserHistory(offset, perPage)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- p.Complete()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -104,7 +107,6 @@ out:
|
||||||
// The offset was higher then the actual number of tracks. Adjust the offset
|
// The offset was higher then the actual number of tracks. Adjust the offset
|
||||||
// and continue.
|
// and continue.
|
||||||
if offset >= result.Total {
|
if offset >= result.Total {
|
||||||
p.Total = int64(result.Total)
|
|
||||||
offset = max(result.Total-perPage, 0)
|
offset = max(result.Total-perPage, 0)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -117,7 +119,7 @@ out:
|
||||||
listens := make(models.ListensList, 0, perPage)
|
listens := make(models.ListensList, 0, perPage)
|
||||||
for _, track := range result.Tracks {
|
for _, track := range result.Tracks {
|
||||||
listen := track.AsListen()
|
listen := track.AsListen()
|
||||||
if listen.ListenedAt.Unix() > oldestTimestamp.Unix() {
|
if listen.ListenedAt.After(oldestTimestamp) {
|
||||||
listens = append(listens, listen)
|
listens = append(listens, listen)
|
||||||
} else {
|
} else {
|
||||||
break
|
break
|
||||||
|
@ -130,7 +132,8 @@ out:
|
||||||
}
|
}
|
||||||
|
|
||||||
remainingTime := startTime.Sub(minTime)
|
remainingTime := startTime.Sub(minTime)
|
||||||
p.Elapsed = int64(totalDuration.Seconds() - remainingTime.Seconds())
|
p.Export.TotalItems += len(listens)
|
||||||
|
p.Export.Elapsed = int64(totalDuration.Seconds() - remainingTime.Seconds())
|
||||||
progress <- p
|
progress <- p
|
||||||
results <- models.ListensResult{Items: listens, OldestTimestamp: minTime}
|
results <- models.ListensResult{Items: listens, OldestTimestamp: minTime}
|
||||||
|
|
||||||
|
@ -146,25 +149,29 @@ out:
|
||||||
}
|
}
|
||||||
|
|
||||||
results <- models.ListensResult{OldestTimestamp: minTime}
|
results <- models.ListensResult{OldestTimestamp: minTime}
|
||||||
progress <- p.Complete()
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *DeezerApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
|
func (b *DeezerApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
|
||||||
// Choose a high offset, we attempt to search the loves backwards starting
|
// Choose a high offset, we attempt to search the loves backwards starting
|
||||||
// at the oldest one.
|
// at the oldest one.
|
||||||
offset := math.MaxInt32
|
offset := math.MaxInt32
|
||||||
perPage := MaxItemsPerGet
|
perPage := MaxItemsPerGet
|
||||||
|
|
||||||
defer close(results)
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{
|
||||||
p := models.Progress{Total: int64(perPage)}
|
Total: int64(perPage),
|
||||||
|
},
|
||||||
|
}
|
||||||
var totalCount int
|
var totalCount int
|
||||||
|
|
||||||
out:
|
out:
|
||||||
for {
|
for {
|
||||||
result, err := b.client.UserTracks(offset, perPage)
|
result, err := b.client.UserTracks(offset, perPage)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- p.Complete()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.LovesResult{Error: err}
|
results <- models.LovesResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -172,8 +179,8 @@ out:
|
||||||
// The offset was higher then the actual number of tracks. Adjust the offset
|
// The offset was higher then the actual number of tracks. Adjust the offset
|
||||||
// and continue.
|
// and continue.
|
||||||
if offset >= result.Total {
|
if offset >= result.Total {
|
||||||
p.Total = int64(result.Total)
|
|
||||||
totalCount = result.Total
|
totalCount = result.Total
|
||||||
|
p.Export.Total = int64(totalCount)
|
||||||
offset = max(result.Total-perPage, 0)
|
offset = max(result.Total-perPage, 0)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -186,17 +193,18 @@ out:
|
||||||
loves := make(models.LovesList, 0, perPage)
|
loves := make(models.LovesList, 0, perPage)
|
||||||
for _, track := range result.Tracks {
|
for _, track := range result.Tracks {
|
||||||
love := track.AsLove()
|
love := track.AsLove()
|
||||||
if love.Created.Unix() > oldestTimestamp.Unix() {
|
if love.Created.After(oldestTimestamp) {
|
||||||
loves = append(loves, love)
|
loves = append(loves, love)
|
||||||
} else {
|
} else {
|
||||||
totalCount -= 1
|
totalCount -= 1
|
||||||
break
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
sort.Sort(loves)
|
sort.Sort(loves)
|
||||||
results <- models.LovesResult{Items: loves, Total: totalCount}
|
results <- models.LovesResult{Items: loves, Total: totalCount}
|
||||||
p.Elapsed += int64(count)
|
p.Export.TotalItems = totalCount
|
||||||
|
p.Export.Total = int64(totalCount)
|
||||||
|
p.Export.Elapsed += int64(count)
|
||||||
progress <- p
|
progress <- p
|
||||||
|
|
||||||
if offset <= 0 {
|
if offset <= 0 {
|
||||||
|
@ -210,7 +218,8 @@ out:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
progress <- p.Complete()
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t Listen) AsListen() models.Listen {
|
func (t Listen) AsListen() models.Listen {
|
||||||
|
@ -244,8 +253,8 @@ func (t Track) AsTrack() models.Track {
|
||||||
info["music_service"] = "deezer.com"
|
info["music_service"] = "deezer.com"
|
||||||
info["origin_url"] = t.Link
|
info["origin_url"] = t.Link
|
||||||
info["deezer_id"] = t.Link
|
info["deezer_id"] = t.Link
|
||||||
info["deezer_album_id"] = fmt.Sprintf("https://www.deezer.com/album/%v", t.Album.Id)
|
info["deezer_album_id"] = fmt.Sprintf("https://www.deezer.com/album/%v", t.Album.ID)
|
||||||
info["deezer_artist_id"] = fmt.Sprintf("https://www.deezer.com/artist/%v", t.Artist.Id)
|
info["deezer_artist_id"] = fmt.Sprintf("https://www.deezer.com/artist/%v", t.Artist.ID)
|
||||||
|
|
||||||
return track
|
return track
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,13 +35,14 @@ var (
|
||||||
testTrack []byte
|
testTrack []byte
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestFromConfig(t *testing.T) {
|
func TestInitConfig(t *testing.T) {
|
||||||
c := viper.New()
|
c := viper.New()
|
||||||
c.Set("client-id", "someclientid")
|
c.Set("client-id", "someclientid")
|
||||||
c.Set("client-secret", "someclientsecret")
|
c.Set("client-secret", "someclientsecret")
|
||||||
service := config.NewServiceConfig("test", c)
|
service := config.NewServiceConfig("test", c)
|
||||||
backend := (&deezer.DeezerApiBackend{}).FromConfig(&service)
|
backend := deezer.DeezerApiBackend{}
|
||||||
assert.IsType(t, &deezer.DeezerApiBackend{}, backend)
|
err := backend.InitConfig(&service)
|
||||||
|
assert.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestListenAsListen(t *testing.T) {
|
func TestListenAsListen(t *testing.T) {
|
||||||
|
|
|
@ -51,7 +51,7 @@ type HistoryResult struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type Track struct {
|
type Track struct {
|
||||||
Id int `json:"id"`
|
ID int `json:"id"`
|
||||||
Type string `json:"type"`
|
Type string `json:"type"`
|
||||||
Link string `json:"link"`
|
Link string `json:"link"`
|
||||||
Title string `json:"title"`
|
Title string `json:"title"`
|
||||||
|
@ -75,7 +75,7 @@ type LovedTrack struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type Album struct {
|
type Album struct {
|
||||||
Id int `json:"id"`
|
ID int `json:"id"`
|
||||||
Type string `json:"type"`
|
Type string `json:"type"`
|
||||||
Link string `json:"link"`
|
Link string `json:"link"`
|
||||||
Title string `json:"title"`
|
Title string `json:"title"`
|
||||||
|
@ -83,7 +83,7 @@ type Album struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type Artist struct {
|
type Artist struct {
|
||||||
Id int `json:"id"`
|
ID int `json:"id"`
|
||||||
Type string `json:"type"`
|
Type string `json:"type"`
|
||||||
Link string `json:"link"`
|
Link string `json:"link"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
This file is part of Scotty.
|
This file is part of Scotty.
|
||||||
|
|
||||||
|
@ -29,34 +29,34 @@ func (b *DumpBackend) Name() string { return "dump" }
|
||||||
|
|
||||||
func (b *DumpBackend) Options() []models.BackendOption { return nil }
|
func (b *DumpBackend) Options() []models.BackendOption { return nil }
|
||||||
|
|
||||||
func (b *DumpBackend) FromConfig(config *config.ServiceConfig) models.Backend {
|
func (b *DumpBackend) InitConfig(config *config.ServiceConfig) error {
|
||||||
return b
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *DumpBackend) StartImport() error { return nil }
|
func (b *DumpBackend) StartImport() error { return nil }
|
||||||
func (b *DumpBackend) FinishImport() error { return nil }
|
func (b *DumpBackend) FinishImport() error { return nil }
|
||||||
|
|
||||||
func (b *DumpBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
|
func (b *DumpBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
for _, listen := range export.Items {
|
for _, listen := range export.Items {
|
||||||
importResult.UpdateTimestamp(listen.ListenedAt)
|
importResult.UpdateTimestamp(listen.ListenedAt)
|
||||||
importResult.ImportCount += 1
|
importResult.ImportCount += 1
|
||||||
msg := fmt.Sprintf("🎶 %v: \"%v\" by %v (%v)",
|
msg := fmt.Sprintf("🎶 %v: \"%v\" by %v (%v)",
|
||||||
listen.ListenedAt, listen.TrackName, listen.ArtistName(), listen.RecordingMbid)
|
listen.ListenedAt, listen.TrackName, listen.ArtistName(), listen.RecordingMBID)
|
||||||
importResult.Log(models.Info, msg)
|
importResult.Log(models.Info, msg)
|
||||||
progress <- models.Progress{}.FromImportResult(importResult)
|
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
return importResult, nil
|
return importResult, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *DumpBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
|
func (b *DumpBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
for _, love := range export.Items {
|
for _, love := range export.Items {
|
||||||
importResult.UpdateTimestamp(love.Created)
|
importResult.UpdateTimestamp(love.Created)
|
||||||
importResult.ImportCount += 1
|
importResult.ImportCount += 1
|
||||||
msg := fmt.Sprintf("❤️ %v: \"%v\" by %v (%v)",
|
msg := fmt.Sprintf("❤️ %v: \"%v\" by %v (%v)",
|
||||||
love.Created, love.TrackName, love.ArtistName(), love.RecordingMbid)
|
love.Created, love.TrackName, love.ArtistName(), love.RecordingMBID)
|
||||||
importResult.Log(models.Info, msg)
|
importResult.Log(models.Info, msg)
|
||||||
progress <- models.Progress{}.FromImportResult(importResult)
|
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
return importResult, nil
|
return importResult, nil
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
Scotty is free software: you can redistribute it and/or modify it under the
|
Scotty is free software: you can redistribute it and/or modify it under the
|
||||||
terms of the GNU General Public License as published by the Free Software
|
terms of the GNU General Public License as published by the Free Software
|
||||||
|
@ -16,6 +16,7 @@ Scotty. If not, see <https://www.gnu.org/licenses/>.
|
||||||
package backends
|
package backends
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"go.uploadedlobster.com/scotty/internal/models"
|
"go.uploadedlobster.com/scotty/internal/models"
|
||||||
|
@ -23,7 +24,7 @@ import (
|
||||||
|
|
||||||
type ExportProcessor[T models.ListensResult | models.LovesResult] interface {
|
type ExportProcessor[T models.ListensResult | models.LovesResult] interface {
|
||||||
ExportBackend() models.Backend
|
ExportBackend() models.Backend
|
||||||
Process(oldestTimestamp time.Time, results chan T, progress chan models.Progress)
|
Process(wg *sync.WaitGroup, oldestTimestamp time.Time, results chan T, progress chan models.TransferProgress)
|
||||||
}
|
}
|
||||||
|
|
||||||
type ListensExportProcessor struct {
|
type ListensExportProcessor struct {
|
||||||
|
@ -34,9 +35,11 @@ func (p ListensExportProcessor) ExportBackend() models.Backend {
|
||||||
return p.Backend
|
return p.Backend
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p ListensExportProcessor) Process(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
func (p ListensExportProcessor) Process(wg *sync.WaitGroup, oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
||||||
|
wg.Add(1)
|
||||||
|
defer wg.Done()
|
||||||
|
defer close(results)
|
||||||
p.Backend.ExportListens(oldestTimestamp, results, progress)
|
p.Backend.ExportListens(oldestTimestamp, results, progress)
|
||||||
close(progress)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type LovesExportProcessor struct {
|
type LovesExportProcessor struct {
|
||||||
|
@ -47,7 +50,9 @@ func (p LovesExportProcessor) ExportBackend() models.Backend {
|
||||||
return p.Backend
|
return p.Backend
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p LovesExportProcessor) Process(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
|
func (p LovesExportProcessor) Process(wg *sync.WaitGroup, oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
|
||||||
|
wg.Add(1)
|
||||||
|
defer wg.Done()
|
||||||
|
defer close(results)
|
||||||
p.Backend.ExportLoves(oldestTimestamp, results, progress)
|
p.Backend.ExportLoves(oldestTimestamp, results, progress)
|
||||||
close(progress)
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,13 +33,13 @@ import (
|
||||||
const MaxItemsPerGet = 50
|
const MaxItemsPerGet = 50
|
||||||
|
|
||||||
type Client struct {
|
type Client struct {
|
||||||
HttpClient *resty.Client
|
HTTPClient *resty.Client
|
||||||
token string
|
token string
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewClient(serverUrl string, token string) Client {
|
func NewClient(serverURL string, token string) Client {
|
||||||
client := resty.New()
|
client := resty.New()
|
||||||
client.SetBaseURL(serverUrl)
|
client.SetBaseURL(serverURL)
|
||||||
client.SetAuthScheme("Bearer")
|
client.SetAuthScheme("Bearer")
|
||||||
client.SetAuthToken(token)
|
client.SetAuthToken(token)
|
||||||
client.SetHeader("Accept", "application/json")
|
client.SetHeader("Accept", "application/json")
|
||||||
|
@ -49,14 +49,14 @@ func NewClient(serverUrl string, token string) Client {
|
||||||
ratelimit.EnableHTTPHeaderRateLimit(client, "Retry-After")
|
ratelimit.EnableHTTPHeaderRateLimit(client, "Retry-After")
|
||||||
|
|
||||||
return Client{
|
return Client{
|
||||||
HttpClient: client,
|
HTTPClient: client,
|
||||||
token: token,
|
token: token,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Client) GetHistoryListenings(user string, page int, perPage int) (result ListeningsResult, err error) {
|
func (c Client) GetHistoryListenings(user string, page int, perPage int) (result ListeningsResult, err error) {
|
||||||
const path = "/api/v1/history/listenings"
|
const path = "/api/v1/history/listenings"
|
||||||
response, err := c.HttpClient.R().
|
response, err := c.HTTPClient.R().
|
||||||
SetQueryParams(map[string]string{
|
SetQueryParams(map[string]string{
|
||||||
"username": user,
|
"username": user,
|
||||||
"page": strconv.Itoa(page),
|
"page": strconv.Itoa(page),
|
||||||
|
@ -75,7 +75,7 @@ func (c Client) GetHistoryListenings(user string, page int, perPage int) (result
|
||||||
|
|
||||||
func (c Client) GetFavoriteTracks(page int, perPage int) (result FavoriteTracksResult, err error) {
|
func (c Client) GetFavoriteTracks(page int, perPage int) (result FavoriteTracksResult, err error) {
|
||||||
const path = "/api/v1/favorites/tracks"
|
const path = "/api/v1/favorites/tracks"
|
||||||
response, err := c.HttpClient.R().
|
response, err := c.HTTPClient.R().
|
||||||
SetQueryParams(map[string]string{
|
SetQueryParams(map[string]string{
|
||||||
"page": strconv.Itoa(page),
|
"page": strconv.Itoa(page),
|
||||||
"page_size": strconv.Itoa(perPage),
|
"page_size": strconv.Itoa(perPage),
|
||||||
|
|
|
@ -32,20 +32,20 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestNewClient(t *testing.T) {
|
func TestNewClient(t *testing.T) {
|
||||||
serverUrl := "https://funkwhale.example.com"
|
serverURL := "https://funkwhale.example.com"
|
||||||
token := "foobar123"
|
token := "foobar123"
|
||||||
client := funkwhale.NewClient(serverUrl, token)
|
client := funkwhale.NewClient(serverURL, token)
|
||||||
assert.Equal(t, serverUrl, client.HttpClient.BaseURL)
|
assert.Equal(t, serverURL, client.HTTPClient.BaseURL)
|
||||||
assert.Equal(t, token, client.HttpClient.Token)
|
assert.Equal(t, token, client.HTTPClient.Token)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGetHistoryListenings(t *testing.T) {
|
func TestGetHistoryListenings(t *testing.T) {
|
||||||
defer httpmock.DeactivateAndReset()
|
defer httpmock.DeactivateAndReset()
|
||||||
|
|
||||||
serverUrl := "https://funkwhale.example.com"
|
serverURL := "https://funkwhale.example.com"
|
||||||
token := "thetoken"
|
token := "thetoken"
|
||||||
client := funkwhale.NewClient(serverUrl, token)
|
client := funkwhale.NewClient(serverURL, token)
|
||||||
setupHttpMock(t, client.HttpClient.GetClient(),
|
setupHTTPMock(t, client.HTTPClient.GetClient(),
|
||||||
"https://funkwhale.example.com/api/v1/history/listenings",
|
"https://funkwhale.example.com/api/v1/history/listenings",
|
||||||
"testdata/listenings.json")
|
"testdata/listenings.json")
|
||||||
|
|
||||||
|
@ -67,9 +67,9 @@ func TestGetFavoriteTracks(t *testing.T) {
|
||||||
defer httpmock.DeactivateAndReset()
|
defer httpmock.DeactivateAndReset()
|
||||||
|
|
||||||
token := "thetoken"
|
token := "thetoken"
|
||||||
serverUrl := "https://funkwhale.example.com"
|
serverURL := "https://funkwhale.example.com"
|
||||||
client := funkwhale.NewClient(serverUrl, token)
|
client := funkwhale.NewClient(serverURL, token)
|
||||||
setupHttpMock(t, client.HttpClient.GetClient(),
|
setupHTTPMock(t, client.HTTPClient.GetClient(),
|
||||||
"https://funkwhale.example.com/api/v1/favorites/tracks",
|
"https://funkwhale.example.com/api/v1/favorites/tracks",
|
||||||
"testdata/favorite-tracks.json")
|
"testdata/favorite-tracks.json")
|
||||||
|
|
||||||
|
@ -87,7 +87,7 @@ func TestGetFavoriteTracks(t *testing.T) {
|
||||||
assert.Equal("phw", fav1.User.UserName)
|
assert.Equal("phw", fav1.User.UserName)
|
||||||
}
|
}
|
||||||
|
|
||||||
func setupHttpMock(t *testing.T, client *http.Client, url string, testDataPath string) {
|
func setupHTTPMock(t *testing.T, client *http.Client, url string, testDataPath string) {
|
||||||
httpmock.ActivateNonDefault(client)
|
httpmock.ActivateNonDefault(client)
|
||||||
|
|
||||||
responder, err := httpmock.NewJsonResponder(200, httpmock.File(testDataPath))
|
responder, err := httpmock.NewJsonResponder(200, httpmock.File(testDataPath))
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
This file is part of Scotty.
|
This file is part of Scotty.
|
||||||
|
|
||||||
|
@ -20,6 +20,7 @@ import (
|
||||||
"sort"
|
"sort"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"go.uploadedlobster.com/mbtypes"
|
||||||
"go.uploadedlobster.com/scotty/internal/config"
|
"go.uploadedlobster.com/scotty/internal/config"
|
||||||
"go.uploadedlobster.com/scotty/internal/i18n"
|
"go.uploadedlobster.com/scotty/internal/i18n"
|
||||||
"go.uploadedlobster.com/scotty/internal/models"
|
"go.uploadedlobster.com/scotty/internal/models"
|
||||||
|
@ -50,30 +51,35 @@ func (b *FunkwhaleApiBackend) Options() []models.BackendOption {
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *FunkwhaleApiBackend) FromConfig(config *config.ServiceConfig) models.Backend {
|
func (b *FunkwhaleApiBackend) InitConfig(config *config.ServiceConfig) error {
|
||||||
b.client = NewClient(
|
b.client = NewClient(
|
||||||
config.GetString("server-url"),
|
config.GetString("server-url"),
|
||||||
config.GetString("token"),
|
config.GetString("token"),
|
||||||
)
|
)
|
||||||
b.username = config.GetString("username")
|
b.username = config.GetString("username")
|
||||||
return b
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *FunkwhaleApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
func (b *FunkwhaleApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
||||||
page := 1
|
page := 1
|
||||||
perPage := MaxItemsPerGet
|
perPage := MaxItemsPerGet
|
||||||
|
|
||||||
defer close(results)
|
|
||||||
|
|
||||||
// We need to gather the full list of listens in order to sort them
|
// We need to gather the full list of listens in order to sort them
|
||||||
listens := make(models.ListensList, 0, 2*perPage)
|
listens := make(models.ListensList, 0, 2*perPage)
|
||||||
p := models.Progress{Total: int64(perPage)}
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{
|
||||||
|
Total: int64(perPage),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
out:
|
out:
|
||||||
for {
|
for {
|
||||||
result, err := b.client.GetHistoryListenings(b.username, page, perPage)
|
result, err := b.client.GetHistoryListenings(b.username, page, perPage)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
count := len(result.Results)
|
count := len(result.Results)
|
||||||
|
@ -83,8 +89,8 @@ out:
|
||||||
|
|
||||||
for _, fwListen := range result.Results {
|
for _, fwListen := range result.Results {
|
||||||
listen := fwListen.AsListen()
|
listen := fwListen.AsListen()
|
||||||
if listen.ListenedAt.Unix() > oldestTimestamp.Unix() {
|
if listen.ListenedAt.After(oldestTimestamp) {
|
||||||
p.Elapsed += 1
|
p.Export.Elapsed += 1
|
||||||
listens = append(listens, listen)
|
listens = append(listens, listen)
|
||||||
} else {
|
} else {
|
||||||
break out
|
break out
|
||||||
|
@ -93,36 +99,42 @@ out:
|
||||||
|
|
||||||
if result.Next == "" {
|
if result.Next == "" {
|
||||||
// No further results
|
// No further results
|
||||||
p.Total = p.Elapsed
|
p.Export.Total = p.Export.Elapsed
|
||||||
p.Total -= int64(perPage - count)
|
p.Export.Total -= int64(perPage - count)
|
||||||
break out
|
break out
|
||||||
}
|
}
|
||||||
|
|
||||||
p.Total += int64(perPage)
|
p.Export.TotalItems = len(listens)
|
||||||
|
p.Export.Total += int64(perPage)
|
||||||
progress <- p
|
progress <- p
|
||||||
page += 1
|
page += 1
|
||||||
}
|
}
|
||||||
|
|
||||||
sort.Sort(listens)
|
sort.Sort(listens)
|
||||||
progress <- p.Complete()
|
p.Export.TotalItems = len(listens)
|
||||||
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Items: listens}
|
results <- models.ListensResult{Items: listens}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *FunkwhaleApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
|
func (b *FunkwhaleApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
|
||||||
page := 1
|
page := 1
|
||||||
perPage := MaxItemsPerGet
|
perPage := MaxItemsPerGet
|
||||||
|
|
||||||
defer close(results)
|
|
||||||
|
|
||||||
// We need to gather the full list of listens in order to sort them
|
// We need to gather the full list of listens in order to sort them
|
||||||
loves := make(models.LovesList, 0, 2*perPage)
|
loves := make(models.LovesList, 0, 2*perPage)
|
||||||
p := models.Progress{Total: int64(perPage)}
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{
|
||||||
|
Total: int64(perPage),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
out:
|
out:
|
||||||
for {
|
for {
|
||||||
result, err := b.client.GetFavoriteTracks(page, perPage)
|
result, err := b.client.GetFavoriteTracks(page, perPage)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- p.Complete()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.LovesResult{Error: err}
|
results <- models.LovesResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -134,8 +146,8 @@ out:
|
||||||
|
|
||||||
for _, favorite := range result.Results {
|
for _, favorite := range result.Results {
|
||||||
love := favorite.AsLove()
|
love := favorite.AsLove()
|
||||||
if love.Created.Unix() > oldestTimestamp.Unix() {
|
if love.Created.After(oldestTimestamp) {
|
||||||
p.Elapsed += 1
|
p.Export.Elapsed += 1
|
||||||
loves = append(loves, love)
|
loves = append(loves, love)
|
||||||
} else {
|
} else {
|
||||||
break out
|
break out
|
||||||
|
@ -147,13 +159,16 @@ out:
|
||||||
break out
|
break out
|
||||||
}
|
}
|
||||||
|
|
||||||
p.Total += int64(perPage)
|
p.Export.TotalItems = len(loves)
|
||||||
|
p.Export.Total += int64(perPage)
|
||||||
progress <- p
|
progress <- p
|
||||||
page += 1
|
page += 1
|
||||||
}
|
}
|
||||||
|
|
||||||
sort.Sort(loves)
|
sort.Sort(loves)
|
||||||
progress <- p.Complete()
|
p.Export.TotalItems = len(loves)
|
||||||
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
results <- models.LovesResult{Items: loves}
|
results <- models.LovesResult{Items: loves}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -175,7 +190,7 @@ func (f FavoriteTrack) AsLove() models.Love {
|
||||||
track := f.Track.AsTrack()
|
track := f.Track.AsTrack()
|
||||||
love := models.Love{
|
love := models.Love{
|
||||||
UserName: f.User.UserName,
|
UserName: f.User.UserName,
|
||||||
RecordingMbid: track.RecordingMbid,
|
RecordingMBID: track.RecordingMBID,
|
||||||
Track: track,
|
Track: track,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -188,16 +203,15 @@ func (f FavoriteTrack) AsLove() models.Love {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t Track) AsTrack() models.Track {
|
func (t Track) AsTrack() models.Track {
|
||||||
recordingMbid := models.MBID(t.RecordingMbid)
|
|
||||||
track := models.Track{
|
track := models.Track{
|
||||||
TrackName: t.Title,
|
TrackName: t.Title,
|
||||||
ReleaseName: t.Album.Title,
|
ReleaseName: t.Album.Title,
|
||||||
ArtistNames: []string{t.Artist.Name},
|
ArtistNames: []string{t.Artist.Name},
|
||||||
TrackNumber: t.Position,
|
TrackNumber: t.Position,
|
||||||
DiscNumber: t.DiscNumber,
|
DiscNumber: t.DiscNumber,
|
||||||
RecordingMbid: recordingMbid,
|
RecordingMBID: t.RecordingMBID,
|
||||||
ReleaseMbid: models.MBID(t.Album.ReleaseMbid),
|
ReleaseMBID: t.Album.ReleaseMBID,
|
||||||
ArtistMbids: []models.MBID{models.MBID(t.Artist.ArtistMbid)},
|
ArtistMBIDs: []mbtypes.MBID{t.Artist.ArtistMBID},
|
||||||
Tags: t.Tags,
|
Tags: t.Tags,
|
||||||
AdditionalInfo: map[string]any{
|
AdditionalInfo: map[string]any{
|
||||||
"media_player": FunkwhaleClientName,
|
"media_player": FunkwhaleClientName,
|
||||||
|
|
|
@ -25,15 +25,15 @@ import (
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/funkwhale"
|
"go.uploadedlobster.com/scotty/internal/backends/funkwhale"
|
||||||
"go.uploadedlobster.com/scotty/internal/config"
|
"go.uploadedlobster.com/scotty/internal/config"
|
||||||
"go.uploadedlobster.com/scotty/internal/models"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestFromConfig(t *testing.T) {
|
func TestInitConfig(t *testing.T) {
|
||||||
c := viper.New()
|
c := viper.New()
|
||||||
c.Set("token", "thetoken")
|
c.Set("token", "thetoken")
|
||||||
service := config.NewServiceConfig("test", c)
|
service := config.NewServiceConfig("test", c)
|
||||||
backend := (&funkwhale.FunkwhaleApiBackend{}).FromConfig(&service)
|
backend := funkwhale.FunkwhaleApiBackend{}
|
||||||
assert.IsType(t, &funkwhale.FunkwhaleApiBackend{}, backend)
|
err := backend.InitConfig(&service)
|
||||||
|
assert.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestFunkwhaleListeningAsListen(t *testing.T) {
|
func TestFunkwhaleListeningAsListen(t *testing.T) {
|
||||||
|
@ -44,17 +44,17 @@ func TestFunkwhaleListeningAsListen(t *testing.T) {
|
||||||
},
|
},
|
||||||
Track: funkwhale.Track{
|
Track: funkwhale.Track{
|
||||||
Title: "Oweynagat",
|
Title: "Oweynagat",
|
||||||
RecordingMbid: "c0a1fc94-5f04-4a5f-bc09-e5de0c49cd12",
|
RecordingMBID: "c0a1fc94-5f04-4a5f-bc09-e5de0c49cd12",
|
||||||
Position: 5,
|
Position: 5,
|
||||||
DiscNumber: 1,
|
DiscNumber: 1,
|
||||||
Tags: []string{"foo", "bar"},
|
Tags: []string{"foo", "bar"},
|
||||||
Artist: funkwhale.Artist{
|
Artist: funkwhale.Artist{
|
||||||
Name: "Dool",
|
Name: "Dool",
|
||||||
ArtistMbid: "24412926-c7bd-48e8-afad-8a285b42e131",
|
ArtistMBID: "24412926-c7bd-48e8-afad-8a285b42e131",
|
||||||
},
|
},
|
||||||
Album: funkwhale.Album{
|
Album: funkwhale.Album{
|
||||||
Title: "Here Now, There Then",
|
Title: "Here Now, There Then",
|
||||||
ReleaseMbid: "d7f22677-9803-4d21-ba42-081b633a6f68",
|
ReleaseMBID: "d7f22677-9803-4d21-ba42-081b633a6f68",
|
||||||
},
|
},
|
||||||
Uploads: []funkwhale.Upload{
|
Uploads: []funkwhale.Upload{
|
||||||
{
|
{
|
||||||
|
@ -75,9 +75,9 @@ func TestFunkwhaleListeningAsListen(t *testing.T) {
|
||||||
assert.Equal(fwListen.Track.DiscNumber, listen.Track.DiscNumber)
|
assert.Equal(fwListen.Track.DiscNumber, listen.Track.DiscNumber)
|
||||||
assert.Equal(fwListen.Track.Tags, listen.Track.Tags)
|
assert.Equal(fwListen.Track.Tags, listen.Track.Tags)
|
||||||
// assert.Equal(backends.FunkwhaleClientName, listen.AdditionalInfo["disc_number"])
|
// assert.Equal(backends.FunkwhaleClientName, listen.AdditionalInfo["disc_number"])
|
||||||
assert.Equal(models.MBID(fwListen.Track.RecordingMbid), listen.RecordingMbid)
|
assert.Equal(fwListen.Track.RecordingMBID, listen.RecordingMBID)
|
||||||
assert.Equal(models.MBID(fwListen.Track.Album.ReleaseMbid), listen.ReleaseMbid)
|
assert.Equal(fwListen.Track.Album.ReleaseMBID, listen.ReleaseMBID)
|
||||||
assert.Equal(models.MBID(fwListen.Track.Artist.ArtistMbid), listen.ArtistMbids[0])
|
assert.Equal(fwListen.Track.Artist.ArtistMBID, listen.ArtistMBIDs[0])
|
||||||
assert.Equal(funkwhale.FunkwhaleClientName, listen.AdditionalInfo["media_player"])
|
assert.Equal(funkwhale.FunkwhaleClientName, listen.AdditionalInfo["media_player"])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -89,17 +89,17 @@ func TestFunkwhaleFavoriteTrackAsLove(t *testing.T) {
|
||||||
},
|
},
|
||||||
Track: funkwhale.Track{
|
Track: funkwhale.Track{
|
||||||
Title: "Oweynagat",
|
Title: "Oweynagat",
|
||||||
RecordingMbid: "c0a1fc94-5f04-4a5f-bc09-e5de0c49cd12",
|
RecordingMBID: "c0a1fc94-5f04-4a5f-bc09-e5de0c49cd12",
|
||||||
Position: 5,
|
Position: 5,
|
||||||
DiscNumber: 1,
|
DiscNumber: 1,
|
||||||
Tags: []string{"foo", "bar"},
|
Tags: []string{"foo", "bar"},
|
||||||
Artist: funkwhale.Artist{
|
Artist: funkwhale.Artist{
|
||||||
Name: "Dool",
|
Name: "Dool",
|
||||||
ArtistMbid: "24412926-c7bd-48e8-afad-8a285b42e131",
|
ArtistMBID: "24412926-c7bd-48e8-afad-8a285b42e131",
|
||||||
},
|
},
|
||||||
Album: funkwhale.Album{
|
Album: funkwhale.Album{
|
||||||
Title: "Here Now, There Then",
|
Title: "Here Now, There Then",
|
||||||
ReleaseMbid: "d7f22677-9803-4d21-ba42-081b633a6f68",
|
ReleaseMBID: "d7f22677-9803-4d21-ba42-081b633a6f68",
|
||||||
},
|
},
|
||||||
Uploads: []funkwhale.Upload{
|
Uploads: []funkwhale.Upload{
|
||||||
{
|
{
|
||||||
|
@ -119,10 +119,10 @@ func TestFunkwhaleFavoriteTrackAsLove(t *testing.T) {
|
||||||
assert.Equal(favorite.Track.Position, love.Track.TrackNumber)
|
assert.Equal(favorite.Track.Position, love.Track.TrackNumber)
|
||||||
assert.Equal(favorite.Track.DiscNumber, love.Track.DiscNumber)
|
assert.Equal(favorite.Track.DiscNumber, love.Track.DiscNumber)
|
||||||
assert.Equal(favorite.Track.Tags, love.Track.Tags)
|
assert.Equal(favorite.Track.Tags, love.Track.Tags)
|
||||||
assert.Equal(models.MBID(favorite.Track.RecordingMbid), love.RecordingMbid)
|
assert.Equal(favorite.Track.RecordingMBID, love.RecordingMBID)
|
||||||
assert.Equal(models.MBID(favorite.Track.RecordingMbid), love.Track.RecordingMbid)
|
assert.Equal(favorite.Track.RecordingMBID, love.Track.RecordingMBID)
|
||||||
assert.Equal(models.MBID(favorite.Track.Album.ReleaseMbid), love.ReleaseMbid)
|
assert.Equal(favorite.Track.Album.ReleaseMBID, love.ReleaseMBID)
|
||||||
require.Len(t, love.Track.ArtistMbids, 1)
|
require.Len(t, love.Track.ArtistMBIDs, 1)
|
||||||
assert.Equal(models.MBID(favorite.Track.Artist.ArtistMbid), love.ArtistMbids[0])
|
assert.Equal(favorite.Track.Artist.ArtistMBID, love.ArtistMBIDs[0])
|
||||||
assert.Equal(funkwhale.FunkwhaleClientName, love.AdditionalInfo["media_player"])
|
assert.Equal(funkwhale.FunkwhaleClientName, love.AdditionalInfo["media_player"])
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,6 +21,8 @@ THE SOFTWARE.
|
||||||
*/
|
*/
|
||||||
package funkwhale
|
package funkwhale
|
||||||
|
|
||||||
|
import "go.uploadedlobster.com/mbtypes"
|
||||||
|
|
||||||
type ListeningsResult struct {
|
type ListeningsResult struct {
|
||||||
Count int `json:"count"`
|
Count int `json:"count"`
|
||||||
Previous string `json:"previous"`
|
Previous string `json:"previous"`
|
||||||
|
@ -29,7 +31,7 @@ type ListeningsResult struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type Listening struct {
|
type Listening struct {
|
||||||
Id int `json:"int"`
|
ID int `json:"int"`
|
||||||
User User `json:"user"`
|
User User `json:"user"`
|
||||||
Track Track `json:"track"`
|
Track Track `json:"track"`
|
||||||
CreationDate string `json:"creation_date"`
|
CreationDate string `json:"creation_date"`
|
||||||
|
@ -43,41 +45,41 @@ type FavoriteTracksResult struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type FavoriteTrack struct {
|
type FavoriteTrack struct {
|
||||||
Id int `json:"int"`
|
ID int `json:"int"`
|
||||||
User User `json:"user"`
|
User User `json:"user"`
|
||||||
Track Track `json:"track"`
|
Track Track `json:"track"`
|
||||||
CreationDate string `json:"creation_date"`
|
CreationDate string `json:"creation_date"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Track struct {
|
type Track struct {
|
||||||
Id int `json:"int"`
|
ID int `json:"int"`
|
||||||
Artist Artist `json:"artist"`
|
Artist Artist `json:"artist"`
|
||||||
Album Album `json:"album"`
|
Album Album `json:"album"`
|
||||||
Title string `json:"title"`
|
Title string `json:"title"`
|
||||||
Position int `json:"position"`
|
Position int `json:"position"`
|
||||||
DiscNumber int `json:"disc_number"`
|
DiscNumber int `json:"disc_number"`
|
||||||
RecordingMbid string `json:"mbid"`
|
RecordingMBID mbtypes.MBID `json:"mbid"`
|
||||||
Tags []string `json:"tags"`
|
Tags []string `json:"tags"`
|
||||||
Uploads []Upload `json:"uploads"`
|
Uploads []Upload `json:"uploads"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Artist struct {
|
type Artist struct {
|
||||||
Id int `json:"int"`
|
ID int `json:"int"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
ArtistMbid string `json:"mbid"`
|
ArtistMBID mbtypes.MBID `json:"mbid"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Album struct {
|
type Album struct {
|
||||||
Id int `json:"int"`
|
ID int `json:"int"`
|
||||||
Title string `json:"title"`
|
Title string `json:"title"`
|
||||||
AlbumArtist Artist `json:"artist"`
|
AlbumArtist Artist `json:"artist"`
|
||||||
ReleaseDate string `json:"release_date"`
|
ReleaseDate string `json:"release_date"`
|
||||||
TrackCount int `json:"track_count"`
|
TrackCount int `json:"track_count"`
|
||||||
ReleaseMbid string `json:"mbid"`
|
ReleaseMBID mbtypes.MBID `json:"mbid"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type User struct {
|
type User struct {
|
||||||
Id int `json:"int"`
|
ID int `json:"int"`
|
||||||
UserName string `json:"username"`
|
UserName string `json:"username"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
This file is part of Scotty.
|
This file is part of Scotty.
|
||||||
|
|
||||||
|
@ -18,13 +18,15 @@ Scotty. If not, see <https://www.gnu.org/licenses/>.
|
||||||
package backends
|
package backends
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"sync"
|
||||||
|
|
||||||
"go.uploadedlobster.com/scotty/internal/models"
|
"go.uploadedlobster.com/scotty/internal/models"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ImportProcessor[T models.ListensResult | models.LovesResult] interface {
|
type ImportProcessor[T models.ListensResult | models.LovesResult] interface {
|
||||||
ImportBackend() models.ImportBackend
|
ImportBackend() models.ImportBackend
|
||||||
Process(results chan T, out chan models.ImportResult, progress chan models.Progress)
|
Process(wg *sync.WaitGroup, results chan T, out chan models.ImportResult, progress chan models.TransferProgress)
|
||||||
Import(export T, result models.ImportResult, out chan models.ImportResult, progress chan models.Progress) (models.ImportResult, error)
|
Import(export T, result models.ImportResult, out chan models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type ListensImportProcessor struct {
|
type ListensImportProcessor struct {
|
||||||
|
@ -35,13 +37,13 @@ func (p ListensImportProcessor) ImportBackend() models.ImportBackend {
|
||||||
return p.Backend
|
return p.Backend
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p ListensImportProcessor) Process(results chan models.ListensResult, out chan models.ImportResult, progress chan models.Progress) {
|
func (p ListensImportProcessor) Process(wg *sync.WaitGroup, results chan models.ListensResult, out chan models.ImportResult, progress chan models.TransferProgress) {
|
||||||
process(p, results, out, progress)
|
process(wg, p, results, out, progress)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p ListensImportProcessor) Import(export models.ListensResult, result models.ImportResult, out chan models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
|
func (p ListensImportProcessor) Import(export models.ListensResult, result models.ImportResult, out chan models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
if export.Error != nil {
|
if export.Error != nil {
|
||||||
return handleError(result, export.Error, progress), export.Error
|
return result, export.Error
|
||||||
}
|
}
|
||||||
|
|
||||||
if export.Total > 0 {
|
if export.Total > 0 {
|
||||||
|
@ -51,7 +53,7 @@ func (p ListensImportProcessor) Import(export models.ListensResult, result model
|
||||||
}
|
}
|
||||||
importResult, err := p.Backend.ImportListens(export, result, progress)
|
importResult, err := p.Backend.ImportListens(export, result, progress)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return handleError(result, err, progress), err
|
return importResult, err
|
||||||
}
|
}
|
||||||
return importResult, nil
|
return importResult, nil
|
||||||
}
|
}
|
||||||
|
@ -64,13 +66,13 @@ func (p LovesImportProcessor) ImportBackend() models.ImportBackend {
|
||||||
return p.Backend
|
return p.Backend
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p LovesImportProcessor) Process(results chan models.LovesResult, out chan models.ImportResult, progress chan models.Progress) {
|
func (p LovesImportProcessor) Process(wg *sync.WaitGroup, results chan models.LovesResult, out chan models.ImportResult, progress chan models.TransferProgress) {
|
||||||
process(p, results, out, progress)
|
process(wg, p, results, out, progress)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p LovesImportProcessor) Import(export models.LovesResult, result models.ImportResult, out chan models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
|
func (p LovesImportProcessor) Import(export models.LovesResult, result models.ImportResult, out chan models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
if export.Error != nil {
|
if export.Error != nil {
|
||||||
return handleError(result, export.Error, progress), export.Error
|
return result, export.Error
|
||||||
}
|
}
|
||||||
|
|
||||||
if export.Total > 0 {
|
if export.Total > 0 {
|
||||||
|
@ -80,44 +82,47 @@ func (p LovesImportProcessor) Import(export models.LovesResult, result models.Im
|
||||||
}
|
}
|
||||||
importResult, err := p.Backend.ImportLoves(export, result, progress)
|
importResult, err := p.Backend.ImportLoves(export, result, progress)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return handleError(importResult, err, progress), err
|
return importResult, err
|
||||||
}
|
}
|
||||||
return importResult, nil
|
return importResult, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func process[R models.LovesResult | models.ListensResult, P ImportProcessor[R]](processor P, results chan R, out chan models.ImportResult, progress chan models.Progress) {
|
func process[R models.LovesResult | models.ListensResult, P ImportProcessor[R]](wg *sync.WaitGroup, processor P, results chan R, out chan models.ImportResult, progress chan models.TransferProgress) {
|
||||||
|
wg.Add(1)
|
||||||
|
defer wg.Done()
|
||||||
defer close(out)
|
defer close(out)
|
||||||
defer close(progress)
|
|
||||||
result := models.ImportResult{}
|
result := models.ImportResult{}
|
||||||
|
p := models.TransferProgress{}
|
||||||
|
|
||||||
err := processor.ImportBackend().StartImport()
|
if err := processor.ImportBackend().StartImport(); err != nil {
|
||||||
if err != nil {
|
|
||||||
out <- handleError(result, err, progress)
|
out <- handleError(result, err, progress)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
for exportResult := range results {
|
for exportResult := range results {
|
||||||
importResult, err := processor.Import(exportResult, result, out, progress)
|
importResult, err := processor.Import(exportResult, result, out, progress)
|
||||||
if err != nil {
|
|
||||||
out <- handleError(result, err, progress)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
result.Update(importResult)
|
result.Update(importResult)
|
||||||
progress <- models.Progress{}.FromImportResult(result)
|
if err != nil {
|
||||||
|
processor.ImportBackend().FinishImport()
|
||||||
|
out <- handleError(result, err, progress)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
progress <- p.FromImportResult(result, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
err = processor.ImportBackend().FinishImport()
|
if err := processor.ImportBackend().FinishImport(); err != nil {
|
||||||
if err != nil {
|
|
||||||
out <- handleError(result, err, progress)
|
out <- handleError(result, err, progress)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
progress <- models.Progress{}.FromImportResult(result).Complete()
|
progress <- p.FromImportResult(result, true)
|
||||||
out <- result
|
out <- result
|
||||||
}
|
}
|
||||||
|
|
||||||
func handleError(result models.ImportResult, err error, progress chan models.Progress) models.ImportResult {
|
func handleError(result models.ImportResult, err error, progress chan models.TransferProgress) models.ImportResult {
|
||||||
result.Error = err
|
result.Error = err
|
||||||
progress <- models.Progress{}.FromImportResult(result).Complete()
|
p := models.TransferProgress{}.FromImportResult(result, false)
|
||||||
|
p.Import.Abort()
|
||||||
|
progress <- p
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023-2024 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
This file is part of Scotty.
|
This file is part of Scotty.
|
||||||
|
|
||||||
|
@ -18,15 +18,25 @@ Scotty. If not, see <https://www.gnu.org/licenses/>.
|
||||||
package jspf
|
package jspf
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"errors"
|
||||||
"os"
|
"os"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"go.uploadedlobster.com/mbtypes"
|
||||||
"go.uploadedlobster.com/scotty/internal/config"
|
"go.uploadedlobster.com/scotty/internal/config"
|
||||||
"go.uploadedlobster.com/scotty/internal/i18n"
|
"go.uploadedlobster.com/scotty/internal/i18n"
|
||||||
"go.uploadedlobster.com/scotty/internal/models"
|
"go.uploadedlobster.com/scotty/internal/models"
|
||||||
"go.uploadedlobster.com/scotty/pkg/jspf"
|
"go.uploadedlobster.com/scotty/pkg/jspf"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
artistMBIDPrefix = "https://musicbrainz.org/artist/"
|
||||||
|
recordingMBIDPrefix = "https://musicbrainz.org/recording/"
|
||||||
|
releaseMBIDPrefix = "https://musicbrainz.org/release/"
|
||||||
|
)
|
||||||
|
|
||||||
type JSPFBackend struct {
|
type JSPFBackend struct {
|
||||||
filePath string
|
filePath string
|
||||||
playlist jspf.Playlist
|
playlist jspf.Playlist
|
||||||
|
@ -60,22 +70,19 @@ func (b *JSPFBackend) Options() []models.BackendOption {
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *JSPFBackend) FromConfig(config *config.ServiceConfig) models.Backend {
|
func (b *JSPFBackend) InitConfig(config *config.ServiceConfig) error {
|
||||||
b.filePath = config.GetString("file-path")
|
b.filePath = config.GetString("file-path")
|
||||||
b.append = config.GetBool("append", true)
|
b.append = config.GetBool("append", true)
|
||||||
b.playlist = jspf.Playlist{
|
b.playlist = jspf.Playlist{
|
||||||
Title: config.GetString("title"),
|
Title: config.GetString("title"),
|
||||||
Creator: config.GetString("username"),
|
Creator: config.GetString("username"),
|
||||||
Identifier: config.GetString("identifier"),
|
Identifier: config.GetString("identifier"),
|
||||||
|
Date: time.Now(),
|
||||||
Tracks: make([]jspf.Track, 0),
|
Tracks: make([]jspf.Track, 0),
|
||||||
Extension: map[string]any{
|
|
||||||
jspf.MusicBrainzPlaylistExtensionId: jspf.MusicBrainzPlaylistExtension{
|
|
||||||
LastModifiedAt: time.Now(),
|
|
||||||
Public: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
return b
|
|
||||||
|
b.addMusicBrainzPlaylistExtension()
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *JSPFBackend) StartImport() error {
|
func (b *JSPFBackend) StartImport() error {
|
||||||
|
@ -86,7 +93,34 @@ func (b *JSPFBackend) FinishImport() error {
|
||||||
return b.writeJSPF()
|
return b.writeJSPF()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *JSPFBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
|
func (b *JSPFBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
||||||
|
err := b.readJSPF()
|
||||||
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{},
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
|
results <- models.ListensResult{Error: err}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
listens := make(models.ListensList, 0, len(b.playlist.Tracks))
|
||||||
|
for _, track := range b.playlist.Tracks {
|
||||||
|
listen, err := trackAsListen(track)
|
||||||
|
if err == nil && listen != nil && listen.ListenedAt.After(oldestTimestamp) {
|
||||||
|
listens = append(listens, *listen)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sort.Sort(listens)
|
||||||
|
p.Export.Total = int64(len(listens))
|
||||||
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
|
results <- models.ListensResult{Items: listens}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *JSPFBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
for _, listen := range export.Items {
|
for _, listen := range export.Items {
|
||||||
track := listenAsTrack(listen)
|
track := listenAsTrack(listen)
|
||||||
b.playlist.Tracks = append(b.playlist.Tracks, track)
|
b.playlist.Tracks = append(b.playlist.Tracks, track)
|
||||||
|
@ -94,11 +128,38 @@ func (b *JSPFBackend) ImportListens(export models.ListensResult, importResult mo
|
||||||
importResult.UpdateTimestamp(listen.ListenedAt)
|
importResult.UpdateTimestamp(listen.ListenedAt)
|
||||||
}
|
}
|
||||||
|
|
||||||
progress <- models.Progress{}.FromImportResult(importResult)
|
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
|
||||||
return importResult, nil
|
return importResult, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *JSPFBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
|
func (b *JSPFBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
|
||||||
|
err := b.readJSPF()
|
||||||
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{},
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
|
results <- models.LovesResult{Error: err}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
loves := make(models.LovesList, 0, len(b.playlist.Tracks))
|
||||||
|
for _, track := range b.playlist.Tracks {
|
||||||
|
love, err := trackAsLove(track)
|
||||||
|
if err == nil && love != nil && love.Created.After(oldestTimestamp) {
|
||||||
|
loves = append(loves, *love)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sort.Sort(loves)
|
||||||
|
p.Export.Total = int64(len(loves))
|
||||||
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
|
results <- models.LovesResult{Items: loves}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *JSPFBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
for _, love := range export.Items {
|
for _, love := range export.Items {
|
||||||
track := loveAsTrack(love)
|
track := loveAsTrack(love)
|
||||||
b.playlist.Tracks = append(b.playlist.Tracks, track)
|
b.playlist.Tracks = append(b.playlist.Tracks, track)
|
||||||
|
@ -106,68 +167,127 @@ func (b *JSPFBackend) ImportLoves(export models.LovesResult, importResult models
|
||||||
importResult.UpdateTimestamp(love.Created)
|
importResult.UpdateTimestamp(love.Created)
|
||||||
}
|
}
|
||||||
|
|
||||||
progress <- models.Progress{}.FromImportResult(importResult)
|
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
|
||||||
return importResult, nil
|
return importResult, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func listenAsTrack(l models.Listen) jspf.Track {
|
func listenAsTrack(l models.Listen) jspf.Track {
|
||||||
l.FillAdditionalInfo()
|
l.FillAdditionalInfo()
|
||||||
track := trackAsTrack(l.Track)
|
track := trackAsJSPFTrack(l.Track)
|
||||||
extension := makeMusicBrainzExtension(l.Track)
|
extension := makeMusicBrainzExtension(l.Track)
|
||||||
extension.AddedAt = l.ListenedAt
|
extension.AddedAt = l.ListenedAt
|
||||||
extension.AddedBy = l.UserName
|
extension.AddedBy = l.UserName
|
||||||
track.Extension[jspf.MusicBrainzTrackExtensionId] = extension
|
track.Extension[jspf.MusicBrainzTrackExtensionID] = extension
|
||||||
|
|
||||||
if l.RecordingMbid != "" {
|
if l.RecordingMBID != "" {
|
||||||
track.Identifier = append(track.Identifier, "https://musicbrainz.org/recording/"+string(l.RecordingMbid))
|
track.Identifier = append(track.Identifier, recordingMBIDPrefix+string(l.RecordingMBID))
|
||||||
}
|
}
|
||||||
|
|
||||||
return track
|
return track
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func trackAsListen(t jspf.Track) (*models.Listen, error) {
|
||||||
|
track, ext, err := jspfTrackAsTrack(t)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
listen := models.Listen{
|
||||||
|
ListenedAt: ext.AddedAt,
|
||||||
|
UserName: ext.AddedBy,
|
||||||
|
Track: *track,
|
||||||
|
}
|
||||||
|
return &listen, err
|
||||||
|
}
|
||||||
|
|
||||||
func loveAsTrack(l models.Love) jspf.Track {
|
func loveAsTrack(l models.Love) jspf.Track {
|
||||||
l.FillAdditionalInfo()
|
l.FillAdditionalInfo()
|
||||||
track := trackAsTrack(l.Track)
|
track := trackAsJSPFTrack(l.Track)
|
||||||
extension := makeMusicBrainzExtension(l.Track)
|
extension := makeMusicBrainzExtension(l.Track)
|
||||||
extension.AddedAt = l.Created
|
extension.AddedAt = l.Created
|
||||||
extension.AddedBy = l.UserName
|
extension.AddedBy = l.UserName
|
||||||
track.Extension[jspf.MusicBrainzTrackExtensionId] = extension
|
track.Extension[jspf.MusicBrainzTrackExtensionID] = extension
|
||||||
|
|
||||||
recordingMbid := l.Track.RecordingMbid
|
recordingMBID := l.Track.RecordingMBID
|
||||||
if l.RecordingMbid != "" {
|
if l.RecordingMBID != "" {
|
||||||
recordingMbid = l.RecordingMbid
|
recordingMBID = l.RecordingMBID
|
||||||
}
|
}
|
||||||
if recordingMbid != "" {
|
if recordingMBID != "" {
|
||||||
track.Identifier = append(track.Identifier, "https://musicbrainz.org/recording/"+string(recordingMbid))
|
track.Identifier = append(track.Identifier, recordingMBIDPrefix+string(recordingMBID))
|
||||||
}
|
}
|
||||||
|
|
||||||
return track
|
return track
|
||||||
}
|
}
|
||||||
|
|
||||||
func trackAsTrack(t models.Track) jspf.Track {
|
func trackAsLove(t jspf.Track) (*models.Love, error) {
|
||||||
|
track, ext, err := jspfTrackAsTrack(t)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
love := models.Love{
|
||||||
|
Created: ext.AddedAt,
|
||||||
|
UserName: ext.AddedBy,
|
||||||
|
RecordingMBID: track.RecordingMBID,
|
||||||
|
Track: *track,
|
||||||
|
}
|
||||||
|
|
||||||
|
recordingMSID, ok := track.AdditionalInfo["recording_msid"].(string)
|
||||||
|
if ok {
|
||||||
|
love.RecordingMSID = mbtypes.MBID(recordingMSID)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &love, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func trackAsJSPFTrack(t models.Track) jspf.Track {
|
||||||
track := jspf.Track{
|
track := jspf.Track{
|
||||||
Title: t.TrackName,
|
Title: t.TrackName,
|
||||||
Album: t.ReleaseName,
|
Album: t.ReleaseName,
|
||||||
Creator: t.ArtistName(),
|
Creator: t.ArtistName(),
|
||||||
TrackNum: t.TrackNumber,
|
TrackNum: t.TrackNumber,
|
||||||
Extension: map[string]any{},
|
Duration: t.Duration.Milliseconds(),
|
||||||
|
Extension: jspf.ExtensionMap{},
|
||||||
}
|
}
|
||||||
|
|
||||||
return track
|
return track
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func jspfTrackAsTrack(t jspf.Track) (*models.Track, *jspf.MusicBrainzTrackExtension, error) {
|
||||||
|
track := models.Track{
|
||||||
|
ArtistNames: []string{t.Creator},
|
||||||
|
ReleaseName: t.Album,
|
||||||
|
TrackName: t.Title,
|
||||||
|
TrackNumber: t.TrackNum,
|
||||||
|
Duration: time.Duration(t.Duration) * time.Millisecond,
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, id := range t.Identifier {
|
||||||
|
if strings.HasPrefix(id, recordingMBIDPrefix) {
|
||||||
|
track.RecordingMBID = mbtypes.MBID(id[len(recordingMBIDPrefix):])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ext, err := readMusicBrainzExtension(t, &track)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &track, ext, nil
|
||||||
|
}
|
||||||
|
|
||||||
func makeMusicBrainzExtension(t models.Track) jspf.MusicBrainzTrackExtension {
|
func makeMusicBrainzExtension(t models.Track) jspf.MusicBrainzTrackExtension {
|
||||||
extension := jspf.MusicBrainzTrackExtension{
|
extension := jspf.MusicBrainzTrackExtension{
|
||||||
AdditionalMetadata: t.AdditionalInfo,
|
AdditionalMetadata: t.AdditionalInfo,
|
||||||
ArtistIdentifiers: make([]string, len(t.ArtistMbids)),
|
ArtistIdentifiers: make([]string, len(t.ArtistMBIDs)),
|
||||||
}
|
}
|
||||||
|
|
||||||
for i, mbid := range t.ArtistMbids {
|
for i, mbid := range t.ArtistMBIDs {
|
||||||
extension.ArtistIdentifiers[i] = "https://musicbrainz.org/artist/" + string(mbid)
|
extension.ArtistIdentifiers[i] = artistMBIDPrefix + string(mbid)
|
||||||
}
|
}
|
||||||
|
|
||||||
if t.ReleaseMbid != "" {
|
if t.ReleaseMBID != "" {
|
||||||
extension.ReleaseIdentifier = "https://musicbrainz.org/release/" + string(t.ReleaseMbid)
|
extension.ReleaseIdentifier = releaseMBIDPrefix + string(t.ReleaseMBID)
|
||||||
}
|
}
|
||||||
|
|
||||||
// The tracknumber tag would be redundant
|
// The tracknumber tag would be redundant
|
||||||
|
@ -176,6 +296,25 @@ func makeMusicBrainzExtension(t models.Track) jspf.MusicBrainzTrackExtension {
|
||||||
return extension
|
return extension
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func readMusicBrainzExtension(jspfTrack jspf.Track, outputTrack *models.Track) (*jspf.MusicBrainzTrackExtension, error) {
|
||||||
|
ext := jspf.MusicBrainzTrackExtension{}
|
||||||
|
err := jspfTrack.Extension.Get(jspf.MusicBrainzTrackExtensionID, &ext)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.New("missing MusicBrainz track extension")
|
||||||
|
}
|
||||||
|
|
||||||
|
outputTrack.AdditionalInfo = ext.AdditionalMetadata
|
||||||
|
outputTrack.ReleaseMBID = mbtypes.MBID(ext.ReleaseIdentifier)
|
||||||
|
outputTrack.ArtistMBIDs = make([]mbtypes.MBID, len(ext.ArtistIdentifiers))
|
||||||
|
for i, mbid := range ext.ArtistIdentifiers {
|
||||||
|
if strings.HasPrefix(mbid, artistMBIDPrefix) {
|
||||||
|
outputTrack.ArtistMBIDs[i] = mbtypes.MBID(mbid[len(artistMBIDPrefix):])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &ext, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (b *JSPFBackend) readJSPF() error {
|
func (b *JSPFBackend) readJSPF() error {
|
||||||
if b.append {
|
if b.append {
|
||||||
file, err := os.Open(b.filePath)
|
file, err := os.Open(b.filePath)
|
||||||
|
@ -199,6 +338,7 @@ func (b *JSPFBackend) readJSPF() error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
b.playlist = playlist.Playlist
|
b.playlist = playlist.Playlist
|
||||||
|
b.addMusicBrainzPlaylistExtension()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -218,3 +358,13 @@ func (b *JSPFBackend) writeJSPF() error {
|
||||||
defer file.Close()
|
defer file.Close()
|
||||||
return playlist.Write(file)
|
return playlist.Write(file)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (b *JSPFBackend) addMusicBrainzPlaylistExtension() {
|
||||||
|
if b.playlist.Extension == nil {
|
||||||
|
b.playlist.Extension = make(jspf.ExtensionMap, 1)
|
||||||
|
}
|
||||||
|
extension := jspf.MusicBrainzPlaylistExtension{Public: true}
|
||||||
|
b.playlist.Extension.Get(jspf.MusicBrainzPlaylistExtensionID, &extension)
|
||||||
|
extension.LastModifiedAt = time.Now()
|
||||||
|
b.playlist.Extension[jspf.MusicBrainzPlaylistExtensionID] = extension
|
||||||
|
}
|
||||||
|
|
|
@ -26,13 +26,14 @@ import (
|
||||||
"go.uploadedlobster.com/scotty/internal/config"
|
"go.uploadedlobster.com/scotty/internal/config"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestFromConfig(t *testing.T) {
|
func TestInitConfig(t *testing.T) {
|
||||||
c := viper.New()
|
c := viper.New()
|
||||||
c.Set("file-path", "/foo/bar.jspf")
|
c.Set("file-path", "/foo/bar.jspf")
|
||||||
c.Set("title", "My Playlist")
|
c.Set("title", "My Playlist")
|
||||||
c.Set("username", "outsidecontext")
|
c.Set("username", "outsidecontext")
|
||||||
c.Set("identifier", "http://example.com/playlist1")
|
c.Set("identifier", "http://example.com/playlist1")
|
||||||
service := config.NewServiceConfig("test", c)
|
service := config.NewServiceConfig("test", c)
|
||||||
backend := (&jspf.JSPFBackend{}).FromConfig(&service)
|
backend := jspf.JSPFBackend{}
|
||||||
assert.IsType(t, &jspf.JSPFBackend{}, backend)
|
err := backend.InitConfig(&service)
|
||||||
|
assert.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,21 +25,21 @@ import (
|
||||||
|
|
||||||
type lastfmStrategy struct {
|
type lastfmStrategy struct {
|
||||||
client *lastfm.Api
|
client *lastfm.Api
|
||||||
redirectUrl *url.URL
|
redirectURL *url.URL
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s lastfmStrategy) Config() oauth2.Config {
|
func (s lastfmStrategy) Config() oauth2.Config {
|
||||||
return oauth2.Config{}
|
return oauth2.Config{}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s lastfmStrategy) AuthCodeURL(verifier string, state string) auth.AuthUrl {
|
func (s lastfmStrategy) AuthCodeURL(verifier string, state string) auth.AuthURL {
|
||||||
// Last.fm does not use OAuth2, but the provided authorization flow with
|
// Last.fm does not use OAuth2, but the provided authorization flow with
|
||||||
// callback URL is close enough we can shoehorn it into the existing
|
// callback URL is close enough we can shoehorn it into the existing
|
||||||
// authentication strategy.
|
// authentication strategy.
|
||||||
// TODO: Investigate and use callback-less flow with api.GetAuthTokenUrl(token)
|
// TODO: Investigate and use callback-less flow with api.GetAuthTokenUrl(token)
|
||||||
url := s.client.GetAuthRequestUrl(s.redirectUrl.String())
|
url := s.client.GetAuthRequestUrl(s.redirectURL.String())
|
||||||
return auth.AuthUrl{
|
return auth.AuthURL{
|
||||||
Url: url,
|
URL: url,
|
||||||
State: "", // last.fm does not use state
|
State: "", // last.fm does not use state
|
||||||
Param: "token",
|
Param: "token",
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
Scotty is free software: you can redistribute it and/or modify it under the
|
Scotty is free software: you can redistribute it and/or modify it under the
|
||||||
terms of the GNU General Public License as published by the Free Software
|
terms of the GNU General Public License as published by the Free Software
|
||||||
|
@ -23,6 +23,7 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/shkh/lastfm-go/lastfm"
|
"github.com/shkh/lastfm-go/lastfm"
|
||||||
|
"go.uploadedlobster.com/mbtypes"
|
||||||
"go.uploadedlobster.com/scotty/internal/auth"
|
"go.uploadedlobster.com/scotty/internal/auth"
|
||||||
"go.uploadedlobster.com/scotty/internal/config"
|
"go.uploadedlobster.com/scotty/internal/config"
|
||||||
"go.uploadedlobster.com/scotty/internal/i18n"
|
"go.uploadedlobster.com/scotty/internal/i18n"
|
||||||
|
@ -60,21 +61,21 @@ func (b *LastfmApiBackend) Options() []models.BackendOption {
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *LastfmApiBackend) FromConfig(config *config.ServiceConfig) models.Backend {
|
func (b *LastfmApiBackend) InitConfig(config *config.ServiceConfig) error {
|
||||||
clientId := config.GetString("client-id")
|
clientID := config.GetString("client-id")
|
||||||
clientSecret := config.GetString("client-secret")
|
clientSecret := config.GetString("client-secret")
|
||||||
b.client = lastfm.New(clientId, clientSecret)
|
b.client = lastfm.New(clientID, clientSecret)
|
||||||
b.username = config.GetString("username")
|
b.username = config.GetString("username")
|
||||||
return b
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *LastfmApiBackend) StartImport() error { return nil }
|
func (b *LastfmApiBackend) StartImport() error { return nil }
|
||||||
func (b *LastfmApiBackend) FinishImport() error { return nil }
|
func (b *LastfmApiBackend) FinishImport() error { return nil }
|
||||||
|
|
||||||
func (b *LastfmApiBackend) OAuth2Strategy(redirectUrl *url.URL) auth.OAuth2Strategy {
|
func (b *LastfmApiBackend) OAuth2Strategy(redirectURL *url.URL) auth.OAuth2Strategy {
|
||||||
return lastfmStrategy{
|
return lastfmStrategy{
|
||||||
client: b.client,
|
client: b.client,
|
||||||
redirectUrl: redirectUrl,
|
redirectURL: redirectURL,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -87,15 +88,17 @@ func (b *LastfmApiBackend) OAuth2Setup(token oauth2.TokenSource) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *LastfmApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
func (b *LastfmApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
||||||
page := MaxPage
|
page := MaxPage
|
||||||
minTime := oldestTimestamp
|
minTime := oldestTimestamp
|
||||||
perPage := MaxItemsPerGet
|
perPage := MaxItemsPerGet
|
||||||
|
|
||||||
defer close(results)
|
|
||||||
|
|
||||||
// We need to gather the full list of listens in order to sort them
|
// We need to gather the full list of listens in order to sort them
|
||||||
p := models.Progress{Total: int64(page)}
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{
|
||||||
|
Total: int64(page),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
out:
|
out:
|
||||||
for page > 0 {
|
for page > 0 {
|
||||||
|
@ -109,7 +112,8 @@ out:
|
||||||
result, err := b.client.User.GetRecentTracks(args)
|
result, err := b.client.User.GetRecentTracks(args)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
progress <- p.Complete()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -128,11 +132,12 @@ out:
|
||||||
timestamp, err := strconv.ParseInt(scrobble.Date.Uts, 10, 64)
|
timestamp, err := strconv.ParseInt(scrobble.Date.Uts, 10, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
progress <- p.Complete()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
break out
|
break out
|
||||||
}
|
}
|
||||||
if timestamp > oldestTimestamp.Unix() {
|
if timestamp > oldestTimestamp.Unix() {
|
||||||
p.Elapsed += 1
|
p.Export.Elapsed += 1
|
||||||
listen := models.Listen{
|
listen := models.Listen{
|
||||||
ListenedAt: time.Unix(timestamp, 0),
|
ListenedAt: time.Unix(timestamp, 0),
|
||||||
UserName: b.username,
|
UserName: b.username,
|
||||||
|
@ -140,16 +145,16 @@ out:
|
||||||
TrackName: scrobble.Name,
|
TrackName: scrobble.Name,
|
||||||
ArtistNames: []string{},
|
ArtistNames: []string{},
|
||||||
ReleaseName: scrobble.Album.Name,
|
ReleaseName: scrobble.Album.Name,
|
||||||
RecordingMbid: models.MBID(scrobble.Mbid),
|
RecordingMBID: mbtypes.MBID(scrobble.Mbid),
|
||||||
ArtistMbids: []models.MBID{},
|
ArtistMBIDs: []mbtypes.MBID{},
|
||||||
ReleaseMbid: models.MBID(scrobble.Album.Mbid),
|
ReleaseMBID: mbtypes.MBID(scrobble.Album.Mbid),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
if scrobble.Artist.Name != "" {
|
if scrobble.Artist.Name != "" {
|
||||||
listen.Track.ArtistNames = []string{scrobble.Artist.Name}
|
listen.Track.ArtistNames = []string{scrobble.Artist.Name}
|
||||||
}
|
}
|
||||||
if scrobble.Artist.Mbid != "" {
|
if scrobble.Artist.Mbid != "" {
|
||||||
listen.Track.ArtistMbids = []models.MBID{models.MBID(scrobble.Artist.Mbid)}
|
listen.Track.ArtistMBIDs = []mbtypes.MBID{mbtypes.MBID(scrobble.Artist.Mbid)}
|
||||||
}
|
}
|
||||||
listens = append(listens, listen)
|
listens = append(listens, listen)
|
||||||
} else {
|
} else {
|
||||||
|
@ -166,16 +171,18 @@ out:
|
||||||
Total: result.Total,
|
Total: result.Total,
|
||||||
OldestTimestamp: minTime,
|
OldestTimestamp: minTime,
|
||||||
}
|
}
|
||||||
p.Total = int64(result.TotalPages)
|
p.Export.Total = int64(result.TotalPages)
|
||||||
p.Elapsed = int64(result.TotalPages - page)
|
p.Export.Elapsed = int64(result.TotalPages - page)
|
||||||
|
p.Export.TotalItems += len(listens)
|
||||||
progress <- p
|
progress <- p
|
||||||
}
|
}
|
||||||
|
|
||||||
results <- models.ListensResult{OldestTimestamp: minTime}
|
results <- models.ListensResult{OldestTimestamp: minTime}
|
||||||
progress <- p.Complete()
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *LastfmApiBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
|
func (b *LastfmApiBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
total := len(export.Items)
|
total := len(export.Items)
|
||||||
for i := 0; i < total; i += MaxListensPerSubmission {
|
for i := 0; i < total; i += MaxListensPerSubmission {
|
||||||
listens := export.Items[i:min(i+MaxListensPerSubmission, total)]
|
listens := export.Items[i:min(i+MaxListensPerSubmission, total)]
|
||||||
|
@ -203,8 +210,8 @@ func (b *LastfmApiBackend) ImportListens(export models.ListensResult, importResu
|
||||||
if l.TrackNumber > 0 {
|
if l.TrackNumber > 0 {
|
||||||
trackNumbers = append(trackNumbers, strconv.Itoa(l.TrackNumber))
|
trackNumbers = append(trackNumbers, strconv.Itoa(l.TrackNumber))
|
||||||
}
|
}
|
||||||
if l.RecordingMbid != "" {
|
if l.RecordingMBID != "" {
|
||||||
mbids = append(mbids, string(l.RecordingMbid))
|
mbids = append(mbids, string(l.RecordingMBID))
|
||||||
}
|
}
|
||||||
// if l.ReleaseArtist != "" {
|
// if l.ReleaseArtist != "" {
|
||||||
// albumArtists = append(albums, l.ReleaseArtist)
|
// albumArtists = append(albums, l.ReleaseArtist)
|
||||||
|
@ -245,22 +252,24 @@ func (b *LastfmApiBackend) ImportListens(export models.ListensResult, importResu
|
||||||
|
|
||||||
importResult.UpdateTimestamp(listens[count-1].ListenedAt)
|
importResult.UpdateTimestamp(listens[count-1].ListenedAt)
|
||||||
importResult.ImportCount += accepted
|
importResult.ImportCount += accepted
|
||||||
progress <- models.Progress{}.FromImportResult(importResult)
|
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
return importResult, nil
|
return importResult, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *LastfmApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
|
func (b *LastfmApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
|
||||||
// Choose a high offset, we attempt to search the loves backwards starting
|
// Choose a high offset, we attempt to search the loves backwards starting
|
||||||
// at the oldest one.
|
// at the oldest one.
|
||||||
page := 1
|
page := 1
|
||||||
perPage := MaxItemsPerGet
|
perPage := MaxItemsPerGet
|
||||||
|
|
||||||
defer close(results)
|
|
||||||
|
|
||||||
loves := make(models.LovesList, 0, 2*MaxItemsPerGet)
|
loves := make(models.LovesList, 0, 2*MaxItemsPerGet)
|
||||||
p := models.Progress{Total: int64(perPage)}
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{
|
||||||
|
Total: int64(perPage),
|
||||||
|
},
|
||||||
|
}
|
||||||
var totalCount int
|
var totalCount int
|
||||||
|
|
||||||
out:
|
out:
|
||||||
|
@ -271,12 +280,12 @@ out:
|
||||||
"page": page,
|
"page": page,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- p.Complete()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.LovesResult{Error: err}
|
results <- models.LovesResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
p.Total = int64(result.Total)
|
|
||||||
count := len(result.Tracks)
|
count := len(result.Tracks)
|
||||||
if count == 0 {
|
if count == 0 {
|
||||||
break out
|
break out
|
||||||
|
@ -285,7 +294,8 @@ out:
|
||||||
for _, track := range result.Tracks {
|
for _, track := range result.Tracks {
|
||||||
timestamp, err := strconv.ParseInt(track.Date.Uts, 10, 64)
|
timestamp, err := strconv.ParseInt(track.Date.Uts, 10, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- p.Complete()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.LovesResult{Error: err}
|
results <- models.LovesResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -294,12 +304,12 @@ out:
|
||||||
love := models.Love{
|
love := models.Love{
|
||||||
Created: time.Unix(timestamp, 0),
|
Created: time.Unix(timestamp, 0),
|
||||||
UserName: result.User,
|
UserName: result.User,
|
||||||
RecordingMbid: models.MBID(track.Mbid),
|
RecordingMBID: mbtypes.MBID(track.Mbid),
|
||||||
Track: models.Track{
|
Track: models.Track{
|
||||||
TrackName: track.Name,
|
TrackName: track.Name,
|
||||||
ArtistNames: []string{track.Artist.Name},
|
ArtistNames: []string{track.Artist.Name},
|
||||||
RecordingMbid: models.MBID(track.Mbid),
|
RecordingMBID: mbtypes.MBID(track.Mbid),
|
||||||
ArtistMbids: []models.MBID{models.MBID(track.Artist.Mbid)},
|
ArtistMBIDs: []mbtypes.MBID{mbtypes.MBID(track.Artist.Mbid)},
|
||||||
AdditionalInfo: models.AdditionalInfo{
|
AdditionalInfo: models.AdditionalInfo{
|
||||||
"lastfm_url": track.Url,
|
"lastfm_url": track.Url,
|
||||||
},
|
},
|
||||||
|
@ -311,18 +321,21 @@ out:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
p.Elapsed += int64(count)
|
p.Export.Total += int64(perPage)
|
||||||
|
p.Export.TotalItems = totalCount
|
||||||
|
p.Export.Elapsed += int64(count)
|
||||||
progress <- p
|
progress <- p
|
||||||
|
|
||||||
page += 1
|
page += 1
|
||||||
}
|
}
|
||||||
|
|
||||||
sort.Sort(loves)
|
sort.Sort(loves)
|
||||||
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
results <- models.LovesResult{Items: loves, Total: totalCount}
|
results <- models.LovesResult{Items: loves, Total: totalCount}
|
||||||
progress <- p.Complete()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *LastfmApiBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
|
func (b *LastfmApiBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
for _, love := range export.Items {
|
for _, love := range export.Items {
|
||||||
err := b.client.Track.Love(lastfm.P{
|
err := b.client.Track.Love(lastfm.P{
|
||||||
"track": love.TrackName,
|
"track": love.TrackName,
|
||||||
|
@ -338,7 +351,7 @@ func (b *LastfmApiBackend) ImportLoves(export models.LovesResult, importResult m
|
||||||
importResult.Log(models.Error, msg)
|
importResult.Log(models.Error, msg)
|
||||||
}
|
}
|
||||||
|
|
||||||
progress <- models.Progress{}.FromImportResult(importResult)
|
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
return importResult, nil
|
return importResult, nil
|
||||||
|
|
|
@ -39,7 +39,7 @@ const (
|
||||||
)
|
)
|
||||||
|
|
||||||
type Client struct {
|
type Client struct {
|
||||||
HttpClient *resty.Client
|
HTTPClient *resty.Client
|
||||||
MaxResults int
|
MaxResults int
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -55,7 +55,7 @@ func NewClient(token string) Client {
|
||||||
ratelimit.EnableHTTPHeaderRateLimit(client, "X-RateLimit-Reset-In")
|
ratelimit.EnableHTTPHeaderRateLimit(client, "X-RateLimit-Reset-In")
|
||||||
|
|
||||||
return Client{
|
return Client{
|
||||||
HttpClient: client,
|
HTTPClient: client,
|
||||||
MaxResults: DefaultItemsPerGet,
|
MaxResults: DefaultItemsPerGet,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -63,7 +63,7 @@ func NewClient(token string) Client {
|
||||||
func (c Client) GetListens(user string, maxTime time.Time, minTime time.Time) (result GetListensResult, err error) {
|
func (c Client) GetListens(user string, maxTime time.Time, minTime time.Time) (result GetListensResult, err error) {
|
||||||
const path = "/user/{username}/listens"
|
const path = "/user/{username}/listens"
|
||||||
errorResult := ErrorResult{}
|
errorResult := ErrorResult{}
|
||||||
response, err := c.HttpClient.R().
|
response, err := c.HTTPClient.R().
|
||||||
SetPathParam("username", user).
|
SetPathParam("username", user).
|
||||||
SetQueryParams(map[string]string{
|
SetQueryParams(map[string]string{
|
||||||
"max_ts": strconv.FormatInt(maxTime.Unix(), 10),
|
"max_ts": strconv.FormatInt(maxTime.Unix(), 10),
|
||||||
|
@ -84,7 +84,7 @@ func (c Client) GetListens(user string, maxTime time.Time, minTime time.Time) (r
|
||||||
func (c Client) SubmitListens(listens ListenSubmission) (result StatusResult, err error) {
|
func (c Client) SubmitListens(listens ListenSubmission) (result StatusResult, err error) {
|
||||||
const path = "/submit-listens"
|
const path = "/submit-listens"
|
||||||
errorResult := ErrorResult{}
|
errorResult := ErrorResult{}
|
||||||
response, err := c.HttpClient.R().
|
response, err := c.HTTPClient.R().
|
||||||
SetBody(listens).
|
SetBody(listens).
|
||||||
SetResult(&result).
|
SetResult(&result).
|
||||||
SetError(&errorResult).
|
SetError(&errorResult).
|
||||||
|
@ -100,7 +100,7 @@ func (c Client) SubmitListens(listens ListenSubmission) (result StatusResult, er
|
||||||
func (c Client) GetFeedback(user string, status int, offset int) (result GetFeedbackResult, err error) {
|
func (c Client) GetFeedback(user string, status int, offset int) (result GetFeedbackResult, err error) {
|
||||||
const path = "/feedback/user/{username}/get-feedback"
|
const path = "/feedback/user/{username}/get-feedback"
|
||||||
errorResult := ErrorResult{}
|
errorResult := ErrorResult{}
|
||||||
response, err := c.HttpClient.R().
|
response, err := c.HTTPClient.R().
|
||||||
SetPathParam("username", user).
|
SetPathParam("username", user).
|
||||||
SetQueryParams(map[string]string{
|
SetQueryParams(map[string]string{
|
||||||
"status": strconv.Itoa(status),
|
"status": strconv.Itoa(status),
|
||||||
|
@ -122,7 +122,7 @@ func (c Client) GetFeedback(user string, status int, offset int) (result GetFeed
|
||||||
func (c Client) SendFeedback(feedback Feedback) (result StatusResult, err error) {
|
func (c Client) SendFeedback(feedback Feedback) (result StatusResult, err error) {
|
||||||
const path = "/feedback/recording-feedback"
|
const path = "/feedback/recording-feedback"
|
||||||
errorResult := ErrorResult{}
|
errorResult := ErrorResult{}
|
||||||
response, err := c.HttpClient.R().
|
response, err := c.HTTPClient.R().
|
||||||
SetBody(feedback).
|
SetBody(feedback).
|
||||||
SetResult(&result).
|
SetResult(&result).
|
||||||
SetError(&errorResult).
|
SetError(&errorResult).
|
||||||
|
@ -138,7 +138,7 @@ func (c Client) SendFeedback(feedback Feedback) (result StatusResult, err error)
|
||||||
func (c Client) Lookup(recordingName string, artistName string) (result LookupResult, err error) {
|
func (c Client) Lookup(recordingName string, artistName string) (result LookupResult, err error) {
|
||||||
const path = "/metadata/lookup"
|
const path = "/metadata/lookup"
|
||||||
errorResult := ErrorResult{}
|
errorResult := ErrorResult{}
|
||||||
response, err := c.HttpClient.R().
|
response, err := c.HTTPClient.R().
|
||||||
SetQueryParams(map[string]string{
|
SetQueryParams(map[string]string{
|
||||||
"recording_name": recordingName,
|
"recording_name": recordingName,
|
||||||
"artist_name": artistName,
|
"artist_name": artistName,
|
||||||
|
|
|
@ -29,13 +29,14 @@ import (
|
||||||
"github.com/jarcoal/httpmock"
|
"github.com/jarcoal/httpmock"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
"go.uploadedlobster.com/mbtypes"
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/listenbrainz"
|
"go.uploadedlobster.com/scotty/internal/backends/listenbrainz"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestNewClient(t *testing.T) {
|
func TestNewClient(t *testing.T) {
|
||||||
token := "foobar123"
|
token := "foobar123"
|
||||||
client := listenbrainz.NewClient(token)
|
client := listenbrainz.NewClient(token)
|
||||||
assert.Equal(t, token, client.HttpClient.Token)
|
assert.Equal(t, token, client.HTTPClient.Token)
|
||||||
assert.Equal(t, listenbrainz.DefaultItemsPerGet, client.MaxResults)
|
assert.Equal(t, listenbrainz.DefaultItemsPerGet, client.MaxResults)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -44,7 +45,7 @@ func TestGetListens(t *testing.T) {
|
||||||
|
|
||||||
client := listenbrainz.NewClient("thetoken")
|
client := listenbrainz.NewClient("thetoken")
|
||||||
client.MaxResults = 2
|
client.MaxResults = 2
|
||||||
setupHttpMock(t, client.HttpClient.GetClient(),
|
setupHTTPMock(t, client.HTTPClient.GetClient(),
|
||||||
"https://api.listenbrainz.org/1/user/outsidecontext/listens",
|
"https://api.listenbrainz.org/1/user/outsidecontext/listens",
|
||||||
"testdata/listens.json")
|
"testdata/listens.json")
|
||||||
|
|
||||||
|
@ -61,7 +62,7 @@ func TestGetListens(t *testing.T) {
|
||||||
|
|
||||||
func TestSubmitListens(t *testing.T) {
|
func TestSubmitListens(t *testing.T) {
|
||||||
client := listenbrainz.NewClient("thetoken")
|
client := listenbrainz.NewClient("thetoken")
|
||||||
httpmock.ActivateNonDefault(client.HttpClient.GetClient())
|
httpmock.ActivateNonDefault(client.HTTPClient.GetClient())
|
||||||
|
|
||||||
responder, err := httpmock.NewJsonResponder(200, listenbrainz.StatusResult{
|
responder, err := httpmock.NewJsonResponder(200, listenbrainz.StatusResult{
|
||||||
Status: "ok",
|
Status: "ok",
|
||||||
|
@ -102,7 +103,7 @@ func TestGetFeedback(t *testing.T) {
|
||||||
|
|
||||||
client := listenbrainz.NewClient("thetoken")
|
client := listenbrainz.NewClient("thetoken")
|
||||||
client.MaxResults = 2
|
client.MaxResults = 2
|
||||||
setupHttpMock(t, client.HttpClient.GetClient(),
|
setupHTTPMock(t, client.HTTPClient.GetClient(),
|
||||||
"https://api.listenbrainz.org/1/feedback/user/outsidecontext/get-feedback",
|
"https://api.listenbrainz.org/1/feedback/user/outsidecontext/get-feedback",
|
||||||
"testdata/feedback.json")
|
"testdata/feedback.json")
|
||||||
|
|
||||||
|
@ -114,12 +115,12 @@ func TestGetFeedback(t *testing.T) {
|
||||||
assert.Equal(302, result.TotalCount)
|
assert.Equal(302, result.TotalCount)
|
||||||
assert.Equal(3, result.Offset)
|
assert.Equal(3, result.Offset)
|
||||||
require.Len(t, result.Feedback, 2)
|
require.Len(t, result.Feedback, 2)
|
||||||
assert.Equal("c0a1fc94-5f04-4a5f-bc09-e5de0c49cd12", result.Feedback[0].RecordingMbid)
|
assert.Equal(mbtypes.MBID("c0a1fc94-5f04-4a5f-bc09-e5de0c49cd12"), result.Feedback[0].RecordingMBID)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSendFeedback(t *testing.T) {
|
func TestSendFeedback(t *testing.T) {
|
||||||
client := listenbrainz.NewClient("thetoken")
|
client := listenbrainz.NewClient("thetoken")
|
||||||
httpmock.ActivateNonDefault(client.HttpClient.GetClient())
|
httpmock.ActivateNonDefault(client.HTTPClient.GetClient())
|
||||||
|
|
||||||
responder, err := httpmock.NewJsonResponder(200, listenbrainz.StatusResult{
|
responder, err := httpmock.NewJsonResponder(200, listenbrainz.StatusResult{
|
||||||
Status: "ok",
|
Status: "ok",
|
||||||
|
@ -131,7 +132,7 @@ func TestSendFeedback(t *testing.T) {
|
||||||
httpmock.RegisterResponder("POST", url, responder)
|
httpmock.RegisterResponder("POST", url, responder)
|
||||||
|
|
||||||
feedback := listenbrainz.Feedback{
|
feedback := listenbrainz.Feedback{
|
||||||
RecordingMbid: "c0a1fc94-5f04-4a5f-bc09-e5de0c49cd12",
|
RecordingMBID: "c0a1fc94-5f04-4a5f-bc09-e5de0c49cd12",
|
||||||
Score: 1,
|
Score: 1,
|
||||||
}
|
}
|
||||||
result, err := client.SendFeedback(feedback)
|
result, err := client.SendFeedback(feedback)
|
||||||
|
@ -144,7 +145,7 @@ func TestLookup(t *testing.T) {
|
||||||
defer httpmock.DeactivateAndReset()
|
defer httpmock.DeactivateAndReset()
|
||||||
|
|
||||||
client := listenbrainz.NewClient("thetoken")
|
client := listenbrainz.NewClient("thetoken")
|
||||||
setupHttpMock(t, client.HttpClient.GetClient(),
|
setupHTTPMock(t, client.HTTPClient.GetClient(),
|
||||||
"https://api.listenbrainz.org/1/metadata/lookup",
|
"https://api.listenbrainz.org/1/metadata/lookup",
|
||||||
"testdata/lookup.json")
|
"testdata/lookup.json")
|
||||||
|
|
||||||
|
@ -154,10 +155,10 @@ func TestLookup(t *testing.T) {
|
||||||
assert := assert.New(t)
|
assert := assert.New(t)
|
||||||
assert.Equal("Say Just Words", result.RecordingName)
|
assert.Equal("Say Just Words", result.RecordingName)
|
||||||
assert.Equal("Paradise Lost", result.ArtistCreditName)
|
assert.Equal("Paradise Lost", result.ArtistCreditName)
|
||||||
assert.Equal("569436a1-234a-44bc-a370-8f4d252bef21", result.RecordingMbid)
|
assert.Equal(mbtypes.MBID("569436a1-234a-44bc-a370-8f4d252bef21"), result.RecordingMBID)
|
||||||
}
|
}
|
||||||
|
|
||||||
func setupHttpMock(t *testing.T, client *http.Client, url string, testDataPath string) {
|
func setupHTTPMock(t *testing.T, client *http.Client, url string, testDataPath string) {
|
||||||
httpmock.ActivateNonDefault(client)
|
httpmock.ActivateNonDefault(client)
|
||||||
|
|
||||||
responder, err := httpmock.NewJsonResponder(200, httpmock.File(testDataPath))
|
responder, err := httpmock.NewJsonResponder(200, httpmock.File(testDataPath))
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
This file is part of Scotty.
|
This file is part of Scotty.
|
||||||
|
|
||||||
|
@ -21,6 +21,8 @@ import (
|
||||||
"sort"
|
"sort"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"go.uploadedlobster.com/mbtypes"
|
||||||
|
"go.uploadedlobster.com/musicbrainzws2"
|
||||||
"go.uploadedlobster.com/scotty/internal/config"
|
"go.uploadedlobster.com/scotty/internal/config"
|
||||||
"go.uploadedlobster.com/scotty/internal/i18n"
|
"go.uploadedlobster.com/scotty/internal/i18n"
|
||||||
"go.uploadedlobster.com/scotty/internal/models"
|
"go.uploadedlobster.com/scotty/internal/models"
|
||||||
|
@ -30,9 +32,10 @@ import (
|
||||||
|
|
||||||
type ListenBrainzApiBackend struct {
|
type ListenBrainzApiBackend struct {
|
||||||
client Client
|
client Client
|
||||||
|
mbClient musicbrainzws2.Client
|
||||||
username string
|
username string
|
||||||
checkDuplicates bool
|
checkDuplicates bool
|
||||||
existingMbids map[string]bool
|
existingMBIDs map[mbtypes.MBID]bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *ListenBrainzApiBackend) Name() string { return "listenbrainz" }
|
func (b *ListenBrainzApiBackend) Name() string { return "listenbrainz" }
|
||||||
|
@ -53,34 +56,41 @@ func (b *ListenBrainzApiBackend) Options() []models.BackendOption {
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *ListenBrainzApiBackend) FromConfig(config *config.ServiceConfig) models.Backend {
|
func (b *ListenBrainzApiBackend) InitConfig(config *config.ServiceConfig) error {
|
||||||
b.client = NewClient(config.GetString("token"))
|
b.client = NewClient(config.GetString("token"))
|
||||||
|
b.mbClient = *musicbrainzws2.NewClient(musicbrainzws2.AppInfo{
|
||||||
|
Name: version.AppName,
|
||||||
|
Version: version.AppVersion,
|
||||||
|
URL: version.AppURL,
|
||||||
|
})
|
||||||
b.client.MaxResults = MaxItemsPerGet
|
b.client.MaxResults = MaxItemsPerGet
|
||||||
b.username = config.GetString("username")
|
b.username = config.GetString("username")
|
||||||
b.checkDuplicates = config.GetBool("check-duplicate-listens", false)
|
b.checkDuplicates = config.GetBool("check-duplicate-listens", false)
|
||||||
return b
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *ListenBrainzApiBackend) StartImport() error { return nil }
|
func (b *ListenBrainzApiBackend) StartImport() error { return nil }
|
||||||
func (b *ListenBrainzApiBackend) FinishImport() error { return nil }
|
func (b *ListenBrainzApiBackend) FinishImport() error { return nil }
|
||||||
|
|
||||||
func (b *ListenBrainzApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
func (b *ListenBrainzApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
||||||
startTime := time.Now()
|
startTime := time.Now()
|
||||||
minTime := oldestTimestamp
|
minTime := oldestTimestamp
|
||||||
if minTime.Unix() < 1 {
|
if minTime.Unix() < 1 {
|
||||||
minTime = time.Unix(1, 0)
|
minTime = time.Unix(1, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
totalDuration := startTime.Sub(minTime)
|
totalDuration := startTime.Sub(oldestTimestamp)
|
||||||
|
p := models.TransferProgress{
|
||||||
defer close(results)
|
Export: &models.Progress{
|
||||||
|
Total: int64(totalDuration.Seconds()),
|
||||||
p := models.Progress{Total: int64(totalDuration.Seconds())}
|
},
|
||||||
|
}
|
||||||
|
|
||||||
for {
|
for {
|
||||||
result, err := b.client.GetListens(b.username, time.Now(), minTime)
|
result, err := b.client.GetListens(b.username, time.Now(), minTime)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- p.Complete()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -90,7 +100,7 @@ func (b *ListenBrainzApiBackend) ExportListens(oldestTimestamp time.Time, result
|
||||||
if minTime.Unix() < result.Payload.OldestListenTimestamp {
|
if minTime.Unix() < result.Payload.OldestListenTimestamp {
|
||||||
minTime = time.Unix(result.Payload.OldestListenTimestamp, 0)
|
minTime = time.Unix(result.Payload.OldestListenTimestamp, 0)
|
||||||
totalDuration = startTime.Sub(minTime)
|
totalDuration = startTime.Sub(minTime)
|
||||||
p.Total = int64(totalDuration.Seconds())
|
p.Export.Total = int64(totalDuration.Seconds())
|
||||||
continue
|
continue
|
||||||
} else {
|
} else {
|
||||||
break
|
break
|
||||||
|
@ -113,18 +123,20 @@ func (b *ListenBrainzApiBackend) ExportListens(oldestTimestamp time.Time, result
|
||||||
}
|
}
|
||||||
|
|
||||||
sort.Sort(listens)
|
sort.Sort(listens)
|
||||||
p.Elapsed = int64(totalDuration.Seconds() - remainingTime.Seconds())
|
p.Export.TotalItems += len(listens)
|
||||||
|
p.Export.Elapsed = int64(totalDuration.Seconds() - remainingTime.Seconds())
|
||||||
progress <- p
|
progress <- p
|
||||||
results <- models.ListensResult{Items: listens, OldestTimestamp: minTime}
|
results <- models.ListensResult{Items: listens, OldestTimestamp: minTime}
|
||||||
}
|
}
|
||||||
|
|
||||||
results <- models.ListensResult{OldestTimestamp: minTime}
|
results <- models.ListensResult{OldestTimestamp: minTime}
|
||||||
progress <- p.Complete()
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *ListenBrainzApiBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
|
func (b *ListenBrainzApiBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
total := len(export.Items)
|
total := len(export.Items)
|
||||||
p := models.Progress{}.FromImportResult(importResult)
|
p := models.TransferProgress{}.FromImportResult(importResult, false)
|
||||||
for i := 0; i < total; i += MaxListensPerRequest {
|
for i := 0; i < total; i += MaxListensPerRequest {
|
||||||
listens := export.Items[i:min(i+MaxListensPerRequest, total)]
|
listens := export.Items[i:min(i+MaxListensPerRequest, total)]
|
||||||
count := len(listens)
|
count := len(listens)
|
||||||
|
@ -140,14 +152,14 @@ func (b *ListenBrainzApiBackend) ImportListens(export models.ListensResult, impo
|
||||||
for _, l := range listens {
|
for _, l := range listens {
|
||||||
if b.checkDuplicates {
|
if b.checkDuplicates {
|
||||||
isDupe, err := b.checkDuplicateListen(l)
|
isDupe, err := b.checkDuplicateListen(l)
|
||||||
p.Elapsed += 1
|
p.Import.Elapsed += 1
|
||||||
progress <- p
|
progress <- p
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return importResult, err
|
return importResult, err
|
||||||
} else if isDupe {
|
} else if isDupe {
|
||||||
count -= 1
|
count -= 1
|
||||||
msg := i18n.Tr("Ignored duplicate listen %v: \"%v\" by %v (%v)",
|
msg := i18n.Tr("Ignored duplicate listen %v: \"%v\" by %v (%v)",
|
||||||
l.ListenedAt, l.TrackName, l.ArtistName(), l.RecordingMbid)
|
l.ListenedAt, l.TrackName, l.ArtistName(), l.RecordingMBID)
|
||||||
importResult.Log(models.Info, msg)
|
importResult.Log(models.Info, msg)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -180,23 +192,47 @@ func (b *ListenBrainzApiBackend) ImportListens(export models.ListensResult, impo
|
||||||
importResult.UpdateTimestamp(listens[count-1].ListenedAt)
|
importResult.UpdateTimestamp(listens[count-1].ListenedAt)
|
||||||
}
|
}
|
||||||
importResult.ImportCount += count
|
importResult.ImportCount += count
|
||||||
progress <- p.FromImportResult(importResult)
|
progress <- p.FromImportResult(importResult, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
return importResult, nil
|
return importResult, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *ListenBrainzApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
|
func (b *ListenBrainzApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
|
||||||
|
exportChan := make(chan models.LovesResult)
|
||||||
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{},
|
||||||
|
}
|
||||||
|
|
||||||
|
go b.exportLoves(oldestTimestamp, exportChan)
|
||||||
|
for existingLoves := range exportChan {
|
||||||
|
if existingLoves.Error != nil {
|
||||||
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
|
results <- models.LovesResult{Error: existingLoves.Error}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
p.Export.TotalItems = existingLoves.Total
|
||||||
|
p.Export.Total = int64(existingLoves.Total)
|
||||||
|
p.Export.Elapsed += int64(len(existingLoves.Items))
|
||||||
|
progress <- p
|
||||||
|
results <- existingLoves
|
||||||
|
}
|
||||||
|
|
||||||
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *ListenBrainzApiBackend) exportLoves(oldestTimestamp time.Time, results chan models.LovesResult) {
|
||||||
offset := 0
|
offset := 0
|
||||||
defer close(results)
|
defer close(results)
|
||||||
loves := make(models.LovesList, 0, 2*MaxItemsPerGet)
|
loves := make(models.LovesList, 0, 2*MaxItemsPerGet)
|
||||||
p := models.Progress{}
|
|
||||||
|
|
||||||
out:
|
out:
|
||||||
for {
|
for {
|
||||||
result, err := b.client.GetFeedback(b.username, 1, offset)
|
result, err := b.client.GetFeedback(b.username, 1, offset)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- p.Complete()
|
|
||||||
results <- models.LovesResult{Error: err}
|
results <- models.LovesResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -207,66 +243,87 @@ out:
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, feedback := range result.Feedback {
|
for _, feedback := range result.Feedback {
|
||||||
|
// Missing track metadata indicates that the recording MBID is no
|
||||||
|
// longer available and might have been merged. Try fetching details
|
||||||
|
// from MusicBrainz.
|
||||||
|
if feedback.TrackMetadata == nil {
|
||||||
|
track, err := b.lookupRecording(feedback.RecordingMBID)
|
||||||
|
if err == nil {
|
||||||
|
feedback.TrackMetadata = track
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
love := feedback.AsLove()
|
love := feedback.AsLove()
|
||||||
if love.Created.Unix() > oldestTimestamp.Unix() {
|
if love.Created.After(oldestTimestamp) {
|
||||||
loves = append(loves, love)
|
loves = append(loves, love)
|
||||||
p.Elapsed += 1
|
|
||||||
progress <- p
|
|
||||||
} else {
|
} else {
|
||||||
break out
|
break out
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
p.Total = int64(result.TotalCount)
|
|
||||||
p.Elapsed += int64(count)
|
|
||||||
|
|
||||||
offset += MaxItemsPerGet
|
offset += MaxItemsPerGet
|
||||||
}
|
}
|
||||||
|
|
||||||
sort.Sort(loves)
|
sort.Sort(loves)
|
||||||
progress <- p.Complete()
|
results <- models.LovesResult{
|
||||||
results <- models.LovesResult{Items: loves}
|
Total: len(loves),
|
||||||
|
Items: loves,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *ListenBrainzApiBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
|
func (b *ListenBrainzApiBackend) ImportLoves(export models.LovesResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
if len(b.existingMbids) == 0 {
|
if len(b.existingMBIDs) == 0 {
|
||||||
existingLovesChan := make(chan models.LovesResult)
|
existingLovesChan := make(chan models.LovesResult)
|
||||||
go b.ExportLoves(time.Unix(0, 0), existingLovesChan, progress)
|
go b.exportLoves(time.Unix(0, 0), existingLovesChan)
|
||||||
existingLoves := <-existingLovesChan
|
|
||||||
|
// TODO: Store MBIDs directly
|
||||||
|
b.existingMBIDs = make(map[mbtypes.MBID]bool, MaxItemsPerGet)
|
||||||
|
|
||||||
|
for existingLoves := range existingLovesChan {
|
||||||
if existingLoves.Error != nil {
|
if existingLoves.Error != nil {
|
||||||
return importResult, existingLoves.Error
|
return importResult, existingLoves.Error
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Store MBIDs directly
|
|
||||||
b.existingMbids = make(map[string]bool, len(existingLoves.Items))
|
|
||||||
for _, love := range existingLoves.Items {
|
for _, love := range existingLoves.Items {
|
||||||
b.existingMbids[string(love.RecordingMbid)] = true
|
b.existingMBIDs[love.RecordingMBID] = true
|
||||||
|
// In case the loved MBID got merged the track MBID represents the
|
||||||
|
// actual recording MBID.
|
||||||
|
if love.Track.RecordingMBID != "" &&
|
||||||
|
love.Track.RecordingMBID != love.RecordingMBID {
|
||||||
|
b.existingMBIDs[love.Track.RecordingMBID] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, love := range export.Items {
|
for _, love := range export.Items {
|
||||||
recordingMbid := string(love.RecordingMbid)
|
recordingMBID := love.RecordingMBID
|
||||||
|
if recordingMBID == "" {
|
||||||
|
recordingMBID = love.Track.RecordingMBID
|
||||||
|
}
|
||||||
|
|
||||||
if recordingMbid == "" {
|
if recordingMBID == "" {
|
||||||
lookup, err := b.client.Lookup(love.TrackName, love.ArtistName())
|
lookup, err := b.client.Lookup(love.TrackName, love.ArtistName())
|
||||||
if err == nil {
|
if err == nil {
|
||||||
recordingMbid = lookup.RecordingMbid
|
recordingMBID = lookup.RecordingMBID
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if recordingMbid != "" {
|
if recordingMBID != "" {
|
||||||
ok := false
|
ok := false
|
||||||
errMsg := ""
|
errMsg := ""
|
||||||
if b.existingMbids[recordingMbid] {
|
if b.existingMBIDs[recordingMBID] {
|
||||||
ok = true
|
ok = true
|
||||||
} else {
|
} else {
|
||||||
resp, err := b.client.SendFeedback(Feedback{
|
resp, err := b.client.SendFeedback(Feedback{
|
||||||
RecordingMbid: recordingMbid,
|
RecordingMBID: recordingMBID,
|
||||||
Score: 1,
|
Score: 1,
|
||||||
})
|
})
|
||||||
ok = err == nil && resp.Status == "ok"
|
ok = err == nil && resp.Status == "ok"
|
||||||
if err != nil {
|
if err != nil {
|
||||||
errMsg = err.Error()
|
errMsg = err.Error()
|
||||||
|
} else {
|
||||||
|
b.existingMBIDs[recordingMBID] = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -278,9 +335,13 @@ func (b *ListenBrainzApiBackend) ImportLoves(export models.LovesResult, importRe
|
||||||
love.TrackName, love.ArtistName(), errMsg)
|
love.TrackName, love.ArtistName(), errMsg)
|
||||||
importResult.Log(models.Error, msg)
|
importResult.Log(models.Error, msg)
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
msg := fmt.Sprintf("Failed import of \"%s\" by %s: no recording MBID",
|
||||||
|
love.TrackName, love.ArtistName())
|
||||||
|
importResult.Log(models.Error, msg)
|
||||||
}
|
}
|
||||||
|
|
||||||
progress <- models.Progress{}.FromImportResult(importResult)
|
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
return importResult, nil
|
return importResult, nil
|
||||||
|
@ -313,6 +374,31 @@ func (b *ListenBrainzApiBackend) checkDuplicateListen(listen models.Listen) (boo
|
||||||
return false, nil
|
return false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (b *ListenBrainzApiBackend) lookupRecording(mbid mbtypes.MBID) (*Track, error) {
|
||||||
|
filter := musicbrainzws2.IncludesFilter{
|
||||||
|
Includes: []string{"artist-credits"},
|
||||||
|
}
|
||||||
|
recording, err := b.mbClient.LookupRecording(mbid, filter)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
artistMBIDs := make([]mbtypes.MBID, 0, len(recording.ArtistCredit))
|
||||||
|
for _, artist := range recording.ArtistCredit {
|
||||||
|
artistMBIDs = append(artistMBIDs, artist.Artist.ID)
|
||||||
|
}
|
||||||
|
track := Track{
|
||||||
|
TrackName: recording.Title,
|
||||||
|
ArtistName: recording.ArtistCredit.String(),
|
||||||
|
MBIDMapping: &MBIDMapping{
|
||||||
|
// In case of redirects this MBID differs from the looked up MBID
|
||||||
|
RecordingMBID: recording.ID,
|
||||||
|
ArtistMBIDs: artistMBIDs,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return &track, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (lbListen Listen) AsListen() models.Listen {
|
func (lbListen Listen) AsListen() models.Listen {
|
||||||
listen := models.Listen{
|
listen := models.Listen{
|
||||||
ListenedAt: time.Unix(lbListen.ListenedAt, 0),
|
ListenedAt: time.Unix(lbListen.ListenedAt, 0),
|
||||||
|
@ -323,20 +409,20 @@ func (lbListen Listen) AsListen() models.Listen {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f Feedback) AsLove() models.Love {
|
func (f Feedback) AsLove() models.Love {
|
||||||
recordingMbid := models.MBID(f.RecordingMbid)
|
recordingMBID := f.RecordingMBID
|
||||||
track := f.TrackMetadata
|
track := f.TrackMetadata
|
||||||
if track == nil {
|
if track == nil {
|
||||||
track = &Track{}
|
track = &Track{}
|
||||||
}
|
}
|
||||||
love := models.Love{
|
love := models.Love{
|
||||||
UserName: f.UserName,
|
UserName: f.UserName,
|
||||||
RecordingMbid: recordingMbid,
|
RecordingMBID: recordingMBID,
|
||||||
Created: time.Unix(f.Created, 0),
|
Created: time.Unix(f.Created, 0),
|
||||||
Track: track.AsTrack(),
|
Track: track.AsTrack(),
|
||||||
}
|
}
|
||||||
|
|
||||||
if love.Track.RecordingMbid == "" {
|
if love.Track.RecordingMBID == "" {
|
||||||
love.Track.RecordingMbid = love.RecordingMbid
|
love.Track.RecordingMBID = love.RecordingMBID
|
||||||
}
|
}
|
||||||
|
|
||||||
return love
|
return love
|
||||||
|
@ -350,16 +436,16 @@ func (t Track) AsTrack() models.Track {
|
||||||
Duration: t.Duration(),
|
Duration: t.Duration(),
|
||||||
TrackNumber: t.TrackNumber(),
|
TrackNumber: t.TrackNumber(),
|
||||||
DiscNumber: t.DiscNumber(),
|
DiscNumber: t.DiscNumber(),
|
||||||
RecordingMbid: models.MBID(t.RecordingMbid()),
|
RecordingMBID: t.RecordingMBID(),
|
||||||
ReleaseMbid: models.MBID(t.ReleaseMbid()),
|
ReleaseMBID: t.ReleaseMBID(),
|
||||||
ReleaseGroupMbid: models.MBID(t.ReleaseGroupMbid()),
|
ReleaseGroupMBID: t.ReleaseGroupMBID(),
|
||||||
ISRC: t.ISRC(),
|
ISRC: t.ISRC(),
|
||||||
AdditionalInfo: t.AdditionalInfo,
|
AdditionalInfo: t.AdditionalInfo,
|
||||||
}
|
}
|
||||||
|
|
||||||
if t.MbidMapping != nil && len(track.ArtistMbids) == 0 {
|
if t.MBIDMapping != nil && len(track.ArtistMBIDs) == 0 {
|
||||||
for _, artistMbid := range t.MbidMapping.ArtistMbids {
|
for _, artistMBID := range t.MBIDMapping.ArtistMBIDs {
|
||||||
track.ArtistMbids = append(track.ArtistMbids, models.MBID(artistMbid))
|
track.ArtistMBIDs = append(track.ArtistMBIDs, artistMBID)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,17 +23,18 @@ import (
|
||||||
"github.com/spf13/viper"
|
"github.com/spf13/viper"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
"go.uploadedlobster.com/mbtypes"
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/listenbrainz"
|
"go.uploadedlobster.com/scotty/internal/backends/listenbrainz"
|
||||||
"go.uploadedlobster.com/scotty/internal/config"
|
"go.uploadedlobster.com/scotty/internal/config"
|
||||||
"go.uploadedlobster.com/scotty/internal/models"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestFromConfig(t *testing.T) {
|
func TestInitConfig(t *testing.T) {
|
||||||
c := viper.New()
|
c := viper.New()
|
||||||
c.Set("token", "thetoken")
|
c.Set("token", "thetoken")
|
||||||
service := config.NewServiceConfig("test", c)
|
service := config.NewServiceConfig("test", c)
|
||||||
backend := (&listenbrainz.ListenBrainzApiBackend{}).FromConfig(&service)
|
backend := listenbrainz.ListenBrainzApiBackend{}
|
||||||
assert.IsType(t, &listenbrainz.ListenBrainzApiBackend{}, backend)
|
err := backend.InitConfig(&service)
|
||||||
|
assert.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestListenBrainzListenAsListen(t *testing.T) {
|
func TestListenBrainzListenAsListen(t *testing.T) {
|
||||||
|
@ -65,30 +66,30 @@ func TestListenBrainzListenAsListen(t *testing.T) {
|
||||||
assert.Equal(t, []string{lbListen.TrackMetadata.ArtistName}, listen.ArtistNames)
|
assert.Equal(t, []string{lbListen.TrackMetadata.ArtistName}, listen.ArtistNames)
|
||||||
assert.Equal(t, 5, listen.TrackNumber)
|
assert.Equal(t, 5, listen.TrackNumber)
|
||||||
assert.Equal(t, 1, listen.DiscNumber)
|
assert.Equal(t, 1, listen.DiscNumber)
|
||||||
assert.Equal(t, models.MBID("c0a1fc94-5f04-4a5f-bc09-e5de0c49cd12"), listen.RecordingMbid)
|
assert.Equal(t, mbtypes.MBID("c0a1fc94-5f04-4a5f-bc09-e5de0c49cd12"), listen.RecordingMBID)
|
||||||
assert.Equal(t, models.MBID("d7f22677-9803-4d21-ba42-081b633a6f68"), listen.ReleaseMbid)
|
assert.Equal(t, mbtypes.MBID("d7f22677-9803-4d21-ba42-081b633a6f68"), listen.ReleaseMBID)
|
||||||
assert.Equal(t, models.MBID("80aca1ee-aa51-41be-9f75-024710d92ff4"), listen.ReleaseGroupMbid)
|
assert.Equal(t, mbtypes.MBID("80aca1ee-aa51-41be-9f75-024710d92ff4"), listen.ReleaseGroupMBID)
|
||||||
assert.Equal(t, "DES561620801", listen.ISRC)
|
assert.Equal(t, mbtypes.ISRC("DES561620801"), listen.ISRC)
|
||||||
assert.Equal(t, lbListen.TrackMetadata.AdditionalInfo["foo"], listen.AdditionalInfo["foo"])
|
assert.Equal(t, lbListen.TrackMetadata.AdditionalInfo["foo"], listen.AdditionalInfo["foo"])
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestListenBrainzFeedbackAsLove(t *testing.T) {
|
func TestListenBrainzFeedbackAsLove(t *testing.T) {
|
||||||
recordingMbid := "c0a1fc94-5f04-4a5f-bc09-e5de0c49cd12"
|
recordingMBID := mbtypes.MBID("c0a1fc94-5f04-4a5f-bc09-e5de0c49cd12")
|
||||||
releaseMbid := "d7f22677-9803-4d21-ba42-081b633a6f68"
|
releaseMBID := mbtypes.MBID("d7f22677-9803-4d21-ba42-081b633a6f68")
|
||||||
artistMbid := "d7f22677-9803-4d21-ba42-081b633a6f68"
|
artistMBID := mbtypes.MBID("d7f22677-9803-4d21-ba42-081b633a6f68")
|
||||||
feedback := listenbrainz.Feedback{
|
feedback := listenbrainz.Feedback{
|
||||||
Created: 1699859066,
|
Created: 1699859066,
|
||||||
RecordingMbid: recordingMbid,
|
RecordingMBID: recordingMBID,
|
||||||
Score: 1,
|
Score: 1,
|
||||||
UserName: "ousidecontext",
|
UserName: "ousidecontext",
|
||||||
TrackMetadata: &listenbrainz.Track{
|
TrackMetadata: &listenbrainz.Track{
|
||||||
TrackName: "Oweynagat",
|
TrackName: "Oweynagat",
|
||||||
ArtistName: "Dool",
|
ArtistName: "Dool",
|
||||||
ReleaseName: "Here Now, There Then",
|
ReleaseName: "Here Now, There Then",
|
||||||
MbidMapping: &listenbrainz.MbidMapping{
|
MBIDMapping: &listenbrainz.MBIDMapping{
|
||||||
RecordingMbid: recordingMbid,
|
RecordingMBID: recordingMBID,
|
||||||
ReleaseMbid: releaseMbid,
|
ReleaseMBID: releaseMBID,
|
||||||
ArtistMbids: []string{artistMbid},
|
ArtistMBIDs: []mbtypes.MBID{artistMBID},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -99,24 +100,24 @@ func TestListenBrainzFeedbackAsLove(t *testing.T) {
|
||||||
assert.Equal(feedback.TrackMetadata.TrackName, love.TrackName)
|
assert.Equal(feedback.TrackMetadata.TrackName, love.TrackName)
|
||||||
assert.Equal(feedback.TrackMetadata.ReleaseName, love.ReleaseName)
|
assert.Equal(feedback.TrackMetadata.ReleaseName, love.ReleaseName)
|
||||||
assert.Equal([]string{feedback.TrackMetadata.ArtistName}, love.ArtistNames)
|
assert.Equal([]string{feedback.TrackMetadata.ArtistName}, love.ArtistNames)
|
||||||
assert.Equal(models.MBID(recordingMbid), love.RecordingMbid)
|
assert.Equal(recordingMBID, love.RecordingMBID)
|
||||||
assert.Equal(models.MBID(recordingMbid), love.Track.RecordingMbid)
|
assert.Equal(recordingMBID, love.Track.RecordingMBID)
|
||||||
assert.Equal(models.MBID(releaseMbid), love.Track.ReleaseMbid)
|
assert.Equal(releaseMBID, love.Track.ReleaseMBID)
|
||||||
require.Len(t, love.Track.ArtistMbids, 1)
|
require.Len(t, love.Track.ArtistMBIDs, 1)
|
||||||
assert.Equal(models.MBID(artistMbid), love.Track.ArtistMbids[0])
|
assert.Equal(artistMBID, love.Track.ArtistMBIDs[0])
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestListenBrainzPartialFeedbackAsLove(t *testing.T) {
|
func TestListenBrainzPartialFeedbackAsLove(t *testing.T) {
|
||||||
recordingMbid := "c0a1fc94-5f04-4a5f-bc09-e5de0c49cd12"
|
recordingMBID := mbtypes.MBID("c0a1fc94-5f04-4a5f-bc09-e5de0c49cd12")
|
||||||
feedback := listenbrainz.Feedback{
|
feedback := listenbrainz.Feedback{
|
||||||
Created: 1699859066,
|
Created: 1699859066,
|
||||||
RecordingMbid: recordingMbid,
|
RecordingMBID: recordingMBID,
|
||||||
Score: 1,
|
Score: 1,
|
||||||
}
|
}
|
||||||
love := feedback.AsLove()
|
love := feedback.AsLove()
|
||||||
assert := assert.New(t)
|
assert := assert.New(t)
|
||||||
assert.Equal(time.Unix(1699859066, 0).Unix(), love.Created.Unix())
|
assert.Equal(time.Unix(1699859066, 0).Unix(), love.Created.Unix())
|
||||||
assert.Equal(models.MBID(recordingMbid), love.RecordingMbid)
|
assert.Equal(recordingMBID, love.RecordingMBID)
|
||||||
assert.Equal(models.MBID(recordingMbid), love.Track.RecordingMbid)
|
assert.Equal(recordingMBID, love.Track.RecordingMBID)
|
||||||
assert.Empty(love.Track.TrackName)
|
assert.Empty(love.Track.TrackName)
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,6 +25,7 @@ import (
|
||||||
"strconv"
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"go.uploadedlobster.com/mbtypes"
|
||||||
"golang.org/x/exp/constraints"
|
"golang.org/x/exp/constraints"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -56,7 +57,7 @@ type ListenSubmission struct {
|
||||||
type Listen struct {
|
type Listen struct {
|
||||||
InsertedAt int64 `json:"inserted_at,omitempty"`
|
InsertedAt int64 `json:"inserted_at,omitempty"`
|
||||||
ListenedAt int64 `json:"listened_at"`
|
ListenedAt int64 `json:"listened_at"`
|
||||||
RecordingMsid string `json:"recording_msid,omitempty"`
|
RecordingMSID string `json:"recording_msid,omitempty"`
|
||||||
UserName string `json:"user_name,omitempty"`
|
UserName string `json:"user_name,omitempty"`
|
||||||
TrackMetadata Track `json:"track_metadata"`
|
TrackMetadata Track `json:"track_metadata"`
|
||||||
}
|
}
|
||||||
|
@ -66,20 +67,20 @@ type Track struct {
|
||||||
ArtistName string `json:"artist_name,omitempty"`
|
ArtistName string `json:"artist_name,omitempty"`
|
||||||
ReleaseName string `json:"release_name,omitempty"`
|
ReleaseName string `json:"release_name,omitempty"`
|
||||||
AdditionalInfo map[string]any `json:"additional_info,omitempty"`
|
AdditionalInfo map[string]any `json:"additional_info,omitempty"`
|
||||||
MbidMapping *MbidMapping `json:"mbid_mapping,omitempty"`
|
MBIDMapping *MBIDMapping `json:"mbid_mapping,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type MbidMapping struct {
|
type MBIDMapping struct {
|
||||||
RecordingName string `json:"recording_name,omitempty"`
|
RecordingName string `json:"recording_name,omitempty"`
|
||||||
RecordingMbid string `json:"recording_mbid,omitempty"`
|
RecordingMBID mbtypes.MBID `json:"recording_mbid,omitempty"`
|
||||||
ReleaseMbid string `json:"release_mbid,omitempty"`
|
ReleaseMBID mbtypes.MBID `json:"release_mbid,omitempty"`
|
||||||
ArtistMbids []string `json:"artist_mbids,omitempty"`
|
ArtistMBIDs []mbtypes.MBID `json:"artist_mbids,omitempty"`
|
||||||
Artists []Artist `json:"artists,omitempty"`
|
Artists []Artist `json:"artists,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Artist struct {
|
type Artist struct {
|
||||||
ArtistCreditName string `json:"artist_credit_name,omitempty"`
|
ArtistCreditName string `json:"artist_credit_name,omitempty"`
|
||||||
ArtistMbid string `json:"artist_mbid,omitempty"`
|
ArtistMBID string `json:"artist_mbid,omitempty"`
|
||||||
JoinPhrase string `json:"join_phrase,omitempty"`
|
JoinPhrase string `json:"join_phrase,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -92,8 +93,8 @@ type GetFeedbackResult struct {
|
||||||
|
|
||||||
type Feedback struct {
|
type Feedback struct {
|
||||||
Created int64 `json:"created,omitempty"`
|
Created int64 `json:"created,omitempty"`
|
||||||
RecordingMbid string `json:"recording_mbid,omitempty"`
|
RecordingMBID mbtypes.MBID `json:"recording_mbid,omitempty"`
|
||||||
RecordingMsid string `json:"recording_msid,omitempty"`
|
RecordingMSID mbtypes.MBID `json:"recording_msid,omitempty"`
|
||||||
Score int `json:"score,omitempty"`
|
Score int `json:"score,omitempty"`
|
||||||
TrackMetadata *Track `json:"track_metadata,omitempty"`
|
TrackMetadata *Track `json:"track_metadata,omitempty"`
|
||||||
UserName string `json:"user_id,omitempty"`
|
UserName string `json:"user_id,omitempty"`
|
||||||
|
@ -103,9 +104,9 @@ type LookupResult struct {
|
||||||
ArtistCreditName string `json:"artist_credit_name"`
|
ArtistCreditName string `json:"artist_credit_name"`
|
||||||
ReleaseName string `json:"release_name"`
|
ReleaseName string `json:"release_name"`
|
||||||
RecordingName string `json:"recording_name"`
|
RecordingName string `json:"recording_name"`
|
||||||
RecordingMbid string `json:"recording_mbid"`
|
RecordingMBID mbtypes.MBID `json:"recording_mbid"`
|
||||||
ReleaseMbid string `json:"release_mbid"`
|
ReleaseMBID mbtypes.MBID `json:"release_mbid"`
|
||||||
ArtistMbids []string `json:"artist_mbids"`
|
ArtistMBIDs []mbtypes.MBID `json:"artist_mbids"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type StatusResult struct {
|
type StatusResult struct {
|
||||||
|
@ -158,30 +159,30 @@ func (t Track) DiscNumber() int {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t Track) ISRC() string {
|
func (t Track) ISRC() mbtypes.ISRC {
|
||||||
return tryGetValueOrEmpty[string](t.AdditionalInfo, "isrc")
|
return mbtypes.ISRC(tryGetValueOrEmpty[string](t.AdditionalInfo, "isrc"))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t Track) RecordingMbid() string {
|
func (t Track) RecordingMBID() mbtypes.MBID {
|
||||||
mbid := tryGetValueOrEmpty[string](t.AdditionalInfo, "recording_mbid")
|
mbid := mbtypes.MBID(tryGetValueOrEmpty[string](t.AdditionalInfo, "recording_mbid"))
|
||||||
if mbid == "" && t.MbidMapping != nil {
|
if mbid == "" && t.MBIDMapping != nil {
|
||||||
return t.MbidMapping.RecordingMbid
|
return t.MBIDMapping.RecordingMBID
|
||||||
} else {
|
} else {
|
||||||
return mbid
|
return mbid
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t Track) ReleaseMbid() string {
|
func (t Track) ReleaseMBID() mbtypes.MBID {
|
||||||
mbid := tryGetValueOrEmpty[string](t.AdditionalInfo, "release_mbid")
|
mbid := mbtypes.MBID(tryGetValueOrEmpty[string](t.AdditionalInfo, "release_mbid"))
|
||||||
if mbid == "" && t.MbidMapping != nil {
|
if mbid == "" && t.MBIDMapping != nil {
|
||||||
return t.MbidMapping.ReleaseMbid
|
return t.MBIDMapping.ReleaseMBID
|
||||||
} else {
|
} else {
|
||||||
return mbid
|
return mbid
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t Track) ReleaseGroupMbid() string {
|
func (t Track) ReleaseGroupMBID() mbtypes.MBID {
|
||||||
return tryGetValueOrEmpty[string](t.AdditionalInfo, "release_group_mbid")
|
return mbtypes.MBID(tryGetValueOrEmpty[string](t.AdditionalInfo, "release_group_mbid"))
|
||||||
}
|
}
|
||||||
|
|
||||||
func tryGetValueOrEmpty[T any](dict map[string]any, key string) T {
|
func tryGetValueOrEmpty[T any](dict map[string]any, key string) T {
|
||||||
|
|
|
@ -28,6 +28,7 @@ import (
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
"go.uploadedlobster.com/mbtypes"
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/listenbrainz"
|
"go.uploadedlobster.com/scotty/internal/backends/listenbrainz"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -130,50 +131,50 @@ func TestTrackTrackNumberString(t *testing.T) {
|
||||||
assert.Equal(t, 12, track.TrackNumber())
|
assert.Equal(t, 12, track.TrackNumber())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTrackIsrc(t *testing.T) {
|
func TestTrackISRC(t *testing.T) {
|
||||||
expected := "TCAEJ1934417"
|
expected := mbtypes.ISRC("TCAEJ1934417")
|
||||||
track := listenbrainz.Track{
|
track := listenbrainz.Track{
|
||||||
AdditionalInfo: map[string]any{
|
AdditionalInfo: map[string]any{
|
||||||
"isrc": expected,
|
"isrc": string(expected),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
assert.Equal(t, expected, track.ISRC())
|
assert.Equal(t, expected, track.ISRC())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTrackRecordingMbid(t *testing.T) {
|
func TestTrackRecordingMBID(t *testing.T) {
|
||||||
expected := "e02cc1c3-93fd-4e24-8b77-325060de920b"
|
expected := mbtypes.MBID("e02cc1c3-93fd-4e24-8b77-325060de920b")
|
||||||
track := listenbrainz.Track{
|
track := listenbrainz.Track{
|
||||||
AdditionalInfo: map[string]any{
|
AdditionalInfo: map[string]any{
|
||||||
"recording_mbid": expected,
|
"recording_mbid": string(expected),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
assert.Equal(t, expected, track.RecordingMbid())
|
assert.Equal(t, expected, track.RecordingMBID())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTrackReleaseMbid(t *testing.T) {
|
func TestTrackReleaseMBID(t *testing.T) {
|
||||||
expected := "e02cc1c3-93fd-4e24-8b77-325060de920b"
|
expected := mbtypes.MBID("e02cc1c3-93fd-4e24-8b77-325060de920b")
|
||||||
track := listenbrainz.Track{
|
track := listenbrainz.Track{
|
||||||
AdditionalInfo: map[string]any{
|
AdditionalInfo: map[string]any{
|
||||||
"release_mbid": expected,
|
"release_mbid": string(expected),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
assert.Equal(t, expected, track.ReleaseMbid())
|
assert.Equal(t, expected, track.ReleaseMBID())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestReleaseGroupMbid(t *testing.T) {
|
func TestReleaseGroupMBID(t *testing.T) {
|
||||||
expected := "e02cc1c3-93fd-4e24-8b77-325060de920b"
|
expected := mbtypes.MBID("e02cc1c3-93fd-4e24-8b77-325060de920b")
|
||||||
track := listenbrainz.Track{
|
track := listenbrainz.Track{
|
||||||
AdditionalInfo: map[string]any{
|
AdditionalInfo: map[string]any{
|
||||||
"release_group_mbid": expected,
|
"release_group_mbid": string(expected),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
assert.Equal(t, expected, track.ReleaseGroupMbid())
|
assert.Equal(t, expected, track.ReleaseGroupMBID())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestMarshalPartialFeedback(t *testing.T) {
|
func TestMarshalPartialFeedback(t *testing.T) {
|
||||||
feedback := listenbrainz.Feedback{
|
feedback := listenbrainz.Feedback{
|
||||||
Created: 1699859066,
|
Created: 1699859066,
|
||||||
RecordingMbid: "c0a1fc94-5f04-4a5f-bc09-e5de0c49cd12",
|
RecordingMBID: "c0a1fc94-5f04-4a5f-bc09-e5de0c49cd12",
|
||||||
}
|
}
|
||||||
b, err := json.Marshal(feedback)
|
b, err := json.Marshal(feedback)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
|
@ -32,25 +32,25 @@ import (
|
||||||
const MaxItemsPerGet = 1000
|
const MaxItemsPerGet = 1000
|
||||||
|
|
||||||
type Client struct {
|
type Client struct {
|
||||||
HttpClient *resty.Client
|
HTTPClient *resty.Client
|
||||||
token string
|
token string
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewClient(serverUrl string, token string) Client {
|
func NewClient(serverURL string, token string) Client {
|
||||||
client := resty.New()
|
client := resty.New()
|
||||||
client.SetBaseURL(serverUrl)
|
client.SetBaseURL(serverURL)
|
||||||
client.SetHeader("Accept", "application/json")
|
client.SetHeader("Accept", "application/json")
|
||||||
client.SetHeader("User-Agent", version.UserAgent())
|
client.SetHeader("User-Agent", version.UserAgent())
|
||||||
client.SetRetryCount(5)
|
client.SetRetryCount(5)
|
||||||
return Client{
|
return Client{
|
||||||
HttpClient: client,
|
HTTPClient: client,
|
||||||
token: token,
|
token: token,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Client) GetScrobbles(page int, perPage int) (result GetScrobblesResult, err error) {
|
func (c Client) GetScrobbles(page int, perPage int) (result GetScrobblesResult, err error) {
|
||||||
const path = "/apis/mlj_1/scrobbles"
|
const path = "/apis/mlj_1/scrobbles"
|
||||||
response, err := c.HttpClient.R().
|
response, err := c.HTTPClient.R().
|
||||||
SetQueryParams(map[string]string{
|
SetQueryParams(map[string]string{
|
||||||
"page": strconv.Itoa(page),
|
"page": strconv.Itoa(page),
|
||||||
"perpage": strconv.Itoa(perPage),
|
"perpage": strconv.Itoa(perPage),
|
||||||
|
@ -68,7 +68,7 @@ func (c Client) GetScrobbles(page int, perPage int) (result GetScrobblesResult,
|
||||||
func (c Client) NewScrobble(scrobble NewScrobble) (result NewScrobbleResult, err error) {
|
func (c Client) NewScrobble(scrobble NewScrobble) (result NewScrobbleResult, err error) {
|
||||||
const path = "/apis/mlj_1/newscrobble"
|
const path = "/apis/mlj_1/newscrobble"
|
||||||
scrobble.Key = c.token
|
scrobble.Key = c.token
|
||||||
response, err := c.HttpClient.R().
|
response, err := c.HTTPClient.R().
|
||||||
SetBody(scrobble).
|
SetBody(scrobble).
|
||||||
SetResult(&result).
|
SetResult(&result).
|
||||||
Post(path)
|
Post(path)
|
||||||
|
|
|
@ -32,19 +32,19 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestNewClient(t *testing.T) {
|
func TestNewClient(t *testing.T) {
|
||||||
serverUrl := "https://maloja.example.com"
|
serverURL := "https://maloja.example.com"
|
||||||
token := "foobar123"
|
token := "foobar123"
|
||||||
client := maloja.NewClient(serverUrl, token)
|
client := maloja.NewClient(serverURL, token)
|
||||||
assert.Equal(t, serverUrl, client.HttpClient.BaseURL)
|
assert.Equal(t, serverURL, client.HTTPClient.BaseURL)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGetScrobbles(t *testing.T) {
|
func TestGetScrobbles(t *testing.T) {
|
||||||
defer httpmock.DeactivateAndReset()
|
defer httpmock.DeactivateAndReset()
|
||||||
|
|
||||||
serverUrl := "https://maloja.example.com"
|
serverURL := "https://maloja.example.com"
|
||||||
token := "thetoken"
|
token := "thetoken"
|
||||||
client := maloja.NewClient(serverUrl, token)
|
client := maloja.NewClient(serverURL, token)
|
||||||
setupHttpMock(t, client.HttpClient.GetClient(),
|
setupHTTPMock(t, client.HTTPClient.GetClient(),
|
||||||
"https://maloja.example.com/apis/mlj_1/scrobbles",
|
"https://maloja.example.com/apis/mlj_1/scrobbles",
|
||||||
"testdata/scrobbles.json")
|
"testdata/scrobbles.json")
|
||||||
|
|
||||||
|
@ -60,7 +60,7 @@ func TestGetScrobbles(t *testing.T) {
|
||||||
func TestNewScrobble(t *testing.T) {
|
func TestNewScrobble(t *testing.T) {
|
||||||
server := "https://maloja.example.com"
|
server := "https://maloja.example.com"
|
||||||
client := maloja.NewClient(server, "thetoken")
|
client := maloja.NewClient(server, "thetoken")
|
||||||
httpmock.ActivateNonDefault(client.HttpClient.GetClient())
|
httpmock.ActivateNonDefault(client.HTTPClient.GetClient())
|
||||||
|
|
||||||
responder, err := httpmock.NewJsonResponder(200, httpmock.File("testdata/newscrobble-result.json"))
|
responder, err := httpmock.NewJsonResponder(200, httpmock.File("testdata/newscrobble-result.json"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -80,7 +80,7 @@ func TestNewScrobble(t *testing.T) {
|
||||||
assert.Equal(t, "success", result.Status)
|
assert.Equal(t, "success", result.Status)
|
||||||
}
|
}
|
||||||
|
|
||||||
func setupHttpMock(t *testing.T, client *http.Client, url string, testDataPath string) {
|
func setupHTTPMock(t *testing.T, client *http.Client, url string, testDataPath string) {
|
||||||
httpmock.ActivateNonDefault(client)
|
httpmock.ActivateNonDefault(client)
|
||||||
|
|
||||||
responder, err := httpmock.NewJsonResponder(200, httpmock.File(testDataPath))
|
responder, err := httpmock.NewJsonResponder(200, httpmock.File(testDataPath))
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
This file is part of Scotty.
|
This file is part of Scotty.
|
||||||
|
|
||||||
|
@ -51,33 +51,36 @@ func (b *MalojaApiBackend) Options() []models.BackendOption {
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *MalojaApiBackend) FromConfig(config *config.ServiceConfig) models.Backend {
|
func (b *MalojaApiBackend) InitConfig(config *config.ServiceConfig) error {
|
||||||
b.client = NewClient(
|
b.client = NewClient(
|
||||||
config.GetString("server-url"),
|
config.GetString("server-url"),
|
||||||
config.GetString("token"),
|
config.GetString("token"),
|
||||||
)
|
)
|
||||||
b.nofix = config.GetBool("nofix", false)
|
b.nofix = config.GetBool("nofix", false)
|
||||||
return b
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *MalojaApiBackend) StartImport() error { return nil }
|
func (b *MalojaApiBackend) StartImport() error { return nil }
|
||||||
func (b *MalojaApiBackend) FinishImport() error { return nil }
|
func (b *MalojaApiBackend) FinishImport() error { return nil }
|
||||||
|
|
||||||
func (b *MalojaApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
func (b *MalojaApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
||||||
page := 0
|
page := 0
|
||||||
perPage := MaxItemsPerGet
|
perPage := MaxItemsPerGet
|
||||||
|
|
||||||
defer close(results)
|
|
||||||
|
|
||||||
// We need to gather the full list of listens in order to sort them
|
// We need to gather the full list of listens in order to sort them
|
||||||
listens := make(models.ListensList, 0, 2*perPage)
|
listens := make(models.ListensList, 0, 2*perPage)
|
||||||
p := models.Progress{Total: int64(perPage)}
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{
|
||||||
|
Total: int64(perPage),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
out:
|
out:
|
||||||
for {
|
for {
|
||||||
result, err := b.client.GetScrobbles(page, perPage)
|
result, err := b.client.GetScrobbles(page, perPage)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- p.Complete()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -89,24 +92,27 @@ out:
|
||||||
|
|
||||||
for _, scrobble := range result.List {
|
for _, scrobble := range result.List {
|
||||||
if scrobble.ListenedAt > oldestTimestamp.Unix() {
|
if scrobble.ListenedAt > oldestTimestamp.Unix() {
|
||||||
p.Elapsed += 1
|
p.Export.Elapsed += 1
|
||||||
listens = append(listens, scrobble.AsListen())
|
listens = append(listens, scrobble.AsListen())
|
||||||
} else {
|
} else {
|
||||||
break out
|
break out
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
p.Total += int64(perPage)
|
p.Export.TotalItems = len(listens)
|
||||||
|
p.Export.Total += int64(perPage)
|
||||||
progress <- p
|
progress <- p
|
||||||
page += 1
|
page += 1
|
||||||
}
|
}
|
||||||
|
|
||||||
sort.Sort(listens)
|
sort.Sort(listens)
|
||||||
progress <- p.Complete()
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Items: listens}
|
results <- models.ListensResult{Items: listens}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *MalojaApiBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
|
func (b *MalojaApiBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
|
p := models.TransferProgress{}.FromImportResult(importResult, false)
|
||||||
for _, listen := range export.Items {
|
for _, listen := range export.Items {
|
||||||
scrobble := NewScrobble{
|
scrobble := NewScrobble{
|
||||||
Title: listen.TrackName,
|
Title: listen.TrackName,
|
||||||
|
@ -127,7 +133,7 @@ func (b *MalojaApiBackend) ImportListens(export models.ListensResult, importResu
|
||||||
|
|
||||||
importResult.UpdateTimestamp(listen.ListenedAt)
|
importResult.UpdateTimestamp(listen.ListenedAt)
|
||||||
importResult.ImportCount += 1
|
importResult.ImportCount += 1
|
||||||
progress <- models.Progress{}.FromImportResult(importResult)
|
progress <- p.FromImportResult(importResult, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
return importResult, nil
|
return importResult, nil
|
||||||
|
|
|
@ -26,12 +26,13 @@ import (
|
||||||
"go.uploadedlobster.com/scotty/internal/config"
|
"go.uploadedlobster.com/scotty/internal/config"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestFromConfig(t *testing.T) {
|
func TestInitConfig(t *testing.T) {
|
||||||
c := viper.New()
|
c := viper.New()
|
||||||
c.Set("token", "thetoken")
|
c.Set("token", "thetoken")
|
||||||
service := config.NewServiceConfig("test", c)
|
service := config.NewServiceConfig("test", c)
|
||||||
backend := (&maloja.MalojaApiBackend{}).FromConfig(&service)
|
backend := maloja.MalojaApiBackend{}
|
||||||
assert.IsType(t, &maloja.MalojaApiBackend{}, backend)
|
err := backend.InitConfig(&service)
|
||||||
|
assert.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestScrobbleAsListen(t *testing.T) {
|
func TestScrobbleAsListen(t *testing.T) {
|
||||||
|
|
|
@ -1,210 +0,0 @@
|
||||||
/*
|
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in
|
|
||||||
all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
||||||
THE SOFTWARE.
|
|
||||||
*/
|
|
||||||
package scrobblerlog
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bufio"
|
|
||||||
"encoding/csv"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"go.uploadedlobster.com/scotty/internal/models"
|
|
||||||
)
|
|
||||||
|
|
||||||
type ScrobblerLog struct {
|
|
||||||
Timezone string
|
|
||||||
Client string
|
|
||||||
Listens models.ListensList
|
|
||||||
}
|
|
||||||
|
|
||||||
func Parse(data io.Reader, includeSkipped bool) (ScrobblerLog, error) {
|
|
||||||
result := ScrobblerLog{
|
|
||||||
Listens: make(models.ListensList, 0),
|
|
||||||
}
|
|
||||||
|
|
||||||
reader := bufio.NewReader(data)
|
|
||||||
err := ReadHeader(reader, &result)
|
|
||||||
if err != nil {
|
|
||||||
return result, err
|
|
||||||
}
|
|
||||||
|
|
||||||
tsvReader := csv.NewReader(reader)
|
|
||||||
tsvReader.Comma = '\t'
|
|
||||||
// Row length is often flexible
|
|
||||||
tsvReader.FieldsPerRecord = -1
|
|
||||||
|
|
||||||
for {
|
|
||||||
// A row is:
|
|
||||||
// artistName releaseName trackName trackNumber duration rating timestamp recordingMbid
|
|
||||||
row, err := tsvReader.Read()
|
|
||||||
if err == io.EOF {
|
|
||||||
break
|
|
||||||
} else if err != nil {
|
|
||||||
return result, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// fmt.Printf("row: %v\n", row)
|
|
||||||
|
|
||||||
// We consider only the last field (recording MBID) optional
|
|
||||||
if len(row) < 7 {
|
|
||||||
line, _ := tsvReader.FieldPos(0)
|
|
||||||
return result, fmt.Errorf("invalid record in scrobblerlog line %v", line)
|
|
||||||
}
|
|
||||||
|
|
||||||
rating := row[5]
|
|
||||||
if !includeSkipped && rating == "S" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
client := strings.Split(result.Client, " ")[0]
|
|
||||||
listen, err := rowToListen(row, client)
|
|
||||||
if err != nil {
|
|
||||||
return result, err
|
|
||||||
}
|
|
||||||
|
|
||||||
result.Listens = append(result.Listens, listen)
|
|
||||||
}
|
|
||||||
|
|
||||||
return result, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func Write(data io.Writer, listens models.ListensList) (lastTimestamp time.Time, err error) {
|
|
||||||
tsvWriter := csv.NewWriter(data)
|
|
||||||
tsvWriter.Comma = '\t'
|
|
||||||
|
|
||||||
for _, listen := range listens {
|
|
||||||
if listen.ListenedAt.Unix() > lastTimestamp.Unix() {
|
|
||||||
lastTimestamp = listen.ListenedAt
|
|
||||||
}
|
|
||||||
|
|
||||||
// A row is:
|
|
||||||
// artistName releaseName trackName trackNumber duration rating timestamp recordingMbid
|
|
||||||
rating, ok := listen.AdditionalInfo["rockbox_rating"].(string)
|
|
||||||
if !ok || rating == "" {
|
|
||||||
rating = "L"
|
|
||||||
}
|
|
||||||
err = tsvWriter.Write([]string{
|
|
||||||
listen.ArtistName(),
|
|
||||||
listen.ReleaseName,
|
|
||||||
listen.TrackName,
|
|
||||||
strconv.Itoa(listen.TrackNumber),
|
|
||||||
strconv.Itoa(int(listen.Duration.Seconds())),
|
|
||||||
rating,
|
|
||||||
strconv.Itoa(int(listen.ListenedAt.Unix())),
|
|
||||||
string(listen.RecordingMbid),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
tsvWriter.Flush()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func ReadHeader(reader *bufio.Reader, log *ScrobblerLog) error {
|
|
||||||
// Skip header
|
|
||||||
for i := 0; i < 3; i++ {
|
|
||||||
line, _, err := reader.ReadLine()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(line) == 0 || line[0] != '#' {
|
|
||||||
err = fmt.Errorf("unexpected header (line %v)", i)
|
|
||||||
} else {
|
|
||||||
text := string(line)
|
|
||||||
if i == 0 && !strings.HasPrefix(text, "#AUDIOSCROBBLER/1") {
|
|
||||||
err = fmt.Errorf("not a scrobbler log file")
|
|
||||||
}
|
|
||||||
|
|
||||||
timezone, found := strings.CutPrefix(text, "#TZ/")
|
|
||||||
if found {
|
|
||||||
log.Timezone = timezone
|
|
||||||
}
|
|
||||||
|
|
||||||
client, found := strings.CutPrefix(text, "#CLIENT/")
|
|
||||||
if found {
|
|
||||||
log.Client = client
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func WriteHeader(writer io.Writer, log *ScrobblerLog) error {
|
|
||||||
headers := []string{
|
|
||||||
"#AUDIOSCROBBLER/1.1\n",
|
|
||||||
"#TZ/" + log.Timezone + "\n",
|
|
||||||
"#CLIENT/" + log.Client + "\n",
|
|
||||||
}
|
|
||||||
for _, line := range headers {
|
|
||||||
_, err := writer.Write([]byte(line))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func rowToListen(row []string, client string) (models.Listen, error) {
|
|
||||||
var listen models.Listen
|
|
||||||
trackNumber, err := strconv.Atoi(row[3])
|
|
||||||
if err != nil {
|
|
||||||
return listen, err
|
|
||||||
}
|
|
||||||
|
|
||||||
duration, err := strconv.Atoi(row[4])
|
|
||||||
if err != nil {
|
|
||||||
return listen, err
|
|
||||||
}
|
|
||||||
|
|
||||||
timestamp, err := strconv.Atoi(row[6])
|
|
||||||
if err != nil {
|
|
||||||
return listen, err
|
|
||||||
}
|
|
||||||
|
|
||||||
listen = models.Listen{
|
|
||||||
Track: models.Track{
|
|
||||||
ArtistNames: []string{row[0]},
|
|
||||||
ReleaseName: row[1],
|
|
||||||
TrackName: row[2],
|
|
||||||
TrackNumber: trackNumber,
|
|
||||||
Duration: time.Duration(duration * int(time.Second)),
|
|
||||||
AdditionalInfo: models.AdditionalInfo{
|
|
||||||
"rockbox_rating": row[5],
|
|
||||||
"media_player": client,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
ListenedAt: time.Unix(int64(timestamp), 0),
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(row) > 7 {
|
|
||||||
listen.Track.RecordingMbid = models.MBID(row[7])
|
|
||||||
}
|
|
||||||
|
|
||||||
return listen, nil
|
|
||||||
}
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
This file is part of Scotty.
|
This file is part of Scotty.
|
||||||
|
|
||||||
|
@ -18,21 +18,25 @@ package scrobblerlog
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"sort"
|
"sort"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"go.uploadedlobster.com/scotty/internal/config"
|
"go.uploadedlobster.com/scotty/internal/config"
|
||||||
"go.uploadedlobster.com/scotty/internal/i18n"
|
"go.uploadedlobster.com/scotty/internal/i18n"
|
||||||
"go.uploadedlobster.com/scotty/internal/models"
|
"go.uploadedlobster.com/scotty/internal/models"
|
||||||
|
"go.uploadedlobster.com/scotty/pkg/scrobblerlog"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ScrobblerLogBackend struct {
|
type ScrobblerLogBackend struct {
|
||||||
filePath string
|
filePath string
|
||||||
includeSkipped bool
|
ignoreSkipped bool
|
||||||
append bool
|
append bool
|
||||||
file *os.File
|
file *os.File
|
||||||
log ScrobblerLog
|
timezone *time.Location
|
||||||
|
log scrobblerlog.ScrobblerLog
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *ScrobblerLogBackend) Name() string { return "scrobbler-log" }
|
func (b *ScrobblerLogBackend) Name() string { return "scrobbler-log" }
|
||||||
|
@ -43,26 +47,40 @@ func (b *ScrobblerLogBackend) Options() []models.BackendOption {
|
||||||
Label: i18n.Tr("File path"),
|
Label: i18n.Tr("File path"),
|
||||||
Type: models.String,
|
Type: models.String,
|
||||||
}, {
|
}, {
|
||||||
Name: "include-skipped",
|
Name: "ignore-skipped",
|
||||||
Label: i18n.Tr("Include skipped listens"),
|
Label: i18n.Tr("Ignore skipped listens"),
|
||||||
Type: models.Bool,
|
Type: models.Bool,
|
||||||
|
Default: "true",
|
||||||
}, {
|
}, {
|
||||||
Name: "append",
|
Name: "append",
|
||||||
Label: i18n.Tr("Append to file"),
|
Label: i18n.Tr("Append to file"),
|
||||||
Type: models.Bool,
|
Type: models.Bool,
|
||||||
Default: "true",
|
Default: "true",
|
||||||
|
}, {
|
||||||
|
Name: "time-zone",
|
||||||
|
Label: i18n.Tr("Specify a time zone for the listen timestamps"),
|
||||||
|
Type: models.String,
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *ScrobblerLogBackend) FromConfig(config *config.ServiceConfig) models.Backend {
|
func (b *ScrobblerLogBackend) InitConfig(config *config.ServiceConfig) error {
|
||||||
b.filePath = config.GetString("file-path")
|
b.filePath = config.GetString("file-path")
|
||||||
b.includeSkipped = config.GetBool("include-skipped", false)
|
b.ignoreSkipped = config.GetBool("ignore-skipped", true)
|
||||||
b.append = config.GetBool("append", true)
|
b.append = config.GetBool("append", true)
|
||||||
b.log = ScrobblerLog{
|
b.log = scrobblerlog.ScrobblerLog{
|
||||||
Timezone: "UNKNOWN",
|
TZ: scrobblerlog.TimezoneUTC,
|
||||||
Client: "Rockbox unknown $Revision$",
|
Client: "Rockbox unknown $Revision$",
|
||||||
}
|
}
|
||||||
return b
|
|
||||||
|
if timezone := config.GetString("time-zone"); timezone != "" {
|
||||||
|
location, err := time.LoadLocation(timezone)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("Invalid time-zone %q: %w", timezone, err)
|
||||||
|
}
|
||||||
|
b.log.FallbackTimezone = location
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *ScrobblerLogBackend) StartImport() error {
|
func (b *ScrobblerLogBackend) StartImport() error {
|
||||||
|
@ -88,7 +106,7 @@ func (b *ScrobblerLogBackend) StartImport() error {
|
||||||
} else {
|
} else {
|
||||||
// Verify existing file is a scrobbler log
|
// Verify existing file is a scrobbler log
|
||||||
reader := bufio.NewReader(file)
|
reader := bufio.NewReader(file)
|
||||||
if err = ReadHeader(reader, &b.log); err != nil {
|
if err = b.log.ReadHeader(reader); err != nil {
|
||||||
file.Close()
|
file.Close()
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -99,7 +117,7 @@ func (b *ScrobblerLogBackend) StartImport() error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if !b.append {
|
if !b.append {
|
||||||
if err = WriteHeader(file, &b.log); err != nil {
|
if err = b.log.WriteHeader(file); err != nil {
|
||||||
file.Close()
|
file.Close()
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -113,39 +131,95 @@ func (b *ScrobblerLogBackend) FinishImport() error {
|
||||||
return b.file.Close()
|
return b.file.Close()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *ScrobblerLogBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
func (b *ScrobblerLogBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
||||||
defer close(results)
|
|
||||||
file, err := os.Open(b.filePath)
|
file, err := os.Open(b.filePath)
|
||||||
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{},
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- models.Progress{}.Complete()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
defer file.Close()
|
defer file.Close()
|
||||||
|
|
||||||
log, err := Parse(file, b.includeSkipped)
|
err = b.log.Parse(file, b.ignoreSkipped)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- models.Progress{}.Complete()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
listens := log.Listens.NewerThan(oldestTimestamp)
|
listens := make(models.ListensList, 0, len(b.log.Records))
|
||||||
|
client := strings.Split(b.log.Client, " ")[0]
|
||||||
|
for _, record := range b.log.Records {
|
||||||
|
listen := recordToListen(record, client)
|
||||||
|
if listen.ListenedAt.After(oldestTimestamp) {
|
||||||
|
listens = append(listens, recordToListen(record, client))
|
||||||
|
}
|
||||||
|
}
|
||||||
sort.Sort(listens)
|
sort.Sort(listens)
|
||||||
progress <- models.Progress{Elapsed: int64(len(listens))}.Complete()
|
p.Export.Total = int64(len(listens))
|
||||||
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Items: listens}
|
results <- models.ListensResult{Items: listens}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *ScrobblerLogBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.Progress) (models.ImportResult, error) {
|
func (b *ScrobblerLogBackend) ImportListens(export models.ListensResult, importResult models.ImportResult, progress chan models.TransferProgress) (models.ImportResult, error) {
|
||||||
lastTimestamp, err := Write(b.file, export.Items)
|
records := make([]scrobblerlog.Record, len(export.Items))
|
||||||
|
for i, listen := range export.Items {
|
||||||
|
records[i] = listenToRecord(listen)
|
||||||
|
}
|
||||||
|
lastTimestamp, err := b.log.Append(b.file, records)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return importResult, err
|
return importResult, err
|
||||||
}
|
}
|
||||||
|
|
||||||
importResult.UpdateTimestamp(lastTimestamp)
|
importResult.UpdateTimestamp(lastTimestamp)
|
||||||
importResult.ImportCount += len(export.Items)
|
importResult.ImportCount += len(export.Items)
|
||||||
progress <- models.Progress{}.FromImportResult(importResult)
|
progress <- models.TransferProgress{}.FromImportResult(importResult, false)
|
||||||
|
|
||||||
return importResult, nil
|
return importResult, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func recordToListen(record scrobblerlog.Record, client string) models.Listen {
|
||||||
|
return models.Listen{
|
||||||
|
ListenedAt: record.Timestamp,
|
||||||
|
Track: models.Track{
|
||||||
|
ArtistNames: []string{record.ArtistName},
|
||||||
|
ReleaseName: record.AlbumName,
|
||||||
|
TrackName: record.TrackName,
|
||||||
|
TrackNumber: record.TrackNumber,
|
||||||
|
Duration: record.Duration,
|
||||||
|
RecordingMBID: record.MusicBrainzRecordingID,
|
||||||
|
AdditionalInfo: models.AdditionalInfo{
|
||||||
|
"rockbox_rating": record.Rating,
|
||||||
|
"media_player": client,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func listenToRecord(listen models.Listen) scrobblerlog.Record {
|
||||||
|
var rating scrobblerlog.Rating
|
||||||
|
rockboxRating, ok := listen.AdditionalInfo["rockbox_rating"].(string)
|
||||||
|
if !ok || rockboxRating == "" {
|
||||||
|
rating = scrobblerlog.RatingListened
|
||||||
|
} else {
|
||||||
|
rating = scrobblerlog.Rating(rating)
|
||||||
|
}
|
||||||
|
|
||||||
|
return scrobblerlog.Record{
|
||||||
|
ArtistName: listen.ArtistName(),
|
||||||
|
AlbumName: listen.ReleaseName,
|
||||||
|
TrackName: listen.TrackName,
|
||||||
|
TrackNumber: listen.TrackNumber,
|
||||||
|
Duration: listen.Duration,
|
||||||
|
Rating: rating,
|
||||||
|
Timestamp: listen.ListenedAt,
|
||||||
|
MusicBrainzRecordingID: listen.RecordingMBID,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -25,10 +25,21 @@ import (
|
||||||
"go.uploadedlobster.com/scotty/internal/config"
|
"go.uploadedlobster.com/scotty/internal/config"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestFromConfig(t *testing.T) {
|
func TestInitConfig(t *testing.T) {
|
||||||
c := viper.New()
|
c := viper.New()
|
||||||
c.Set("token", "thetoken")
|
c.Set("token", "thetoken")
|
||||||
service := config.NewServiceConfig("test", c)
|
service := config.NewServiceConfig("test", c)
|
||||||
backend := (&scrobblerlog.ScrobblerLogBackend{}).FromConfig(&service)
|
backend := scrobblerlog.ScrobblerLogBackend{}
|
||||||
assert.IsType(t, &scrobblerlog.ScrobblerLogBackend{}, backend)
|
err := backend.InitConfig(&service)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInitConfigInvalidTimezone(t *testing.T) {
|
||||||
|
c := viper.New()
|
||||||
|
configuredTimezone := "Invalid/Timezone"
|
||||||
|
c.Set("time-zone", configuredTimezone)
|
||||||
|
service := config.NewServiceConfig("test", c)
|
||||||
|
backend := scrobblerlog.ScrobblerLogBackend{}
|
||||||
|
err := backend.InitConfig(&service)
|
||||||
|
assert.ErrorContains(t, err, `Invalid time-zone "Invalid/Timezone"`)
|
||||||
}
|
}
|
||||||
|
|
|
@ -40,7 +40,7 @@ const (
|
||||||
)
|
)
|
||||||
|
|
||||||
type Client struct {
|
type Client struct {
|
||||||
HttpClient *resty.Client
|
HTTPClient *resty.Client
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewClient(token oauth2.TokenSource) Client {
|
func NewClient(token oauth2.TokenSource) Client {
|
||||||
|
@ -55,7 +55,7 @@ func NewClient(token oauth2.TokenSource) Client {
|
||||||
ratelimit.EnableHTTPHeaderRateLimit(client, "Retry-After")
|
ratelimit.EnableHTTPHeaderRateLimit(client, "Retry-After")
|
||||||
|
|
||||||
return Client{
|
return Client{
|
||||||
HttpClient: client,
|
HTTPClient: client,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -69,7 +69,7 @@ func (c Client) RecentlyPlayedBefore(before time.Time, limit int) (RecentlyPlaye
|
||||||
|
|
||||||
func (c Client) recentlyPlayed(after *time.Time, before *time.Time, limit int) (result RecentlyPlayedResult, err error) {
|
func (c Client) recentlyPlayed(after *time.Time, before *time.Time, limit int) (result RecentlyPlayedResult, err error) {
|
||||||
const path = "/me/player/recently-played"
|
const path = "/me/player/recently-played"
|
||||||
request := c.HttpClient.R().
|
request := c.HTTPClient.R().
|
||||||
SetQueryParam("limit", strconv.Itoa(limit)).
|
SetQueryParam("limit", strconv.Itoa(limit)).
|
||||||
SetResult(&result)
|
SetResult(&result)
|
||||||
if after != nil {
|
if after != nil {
|
||||||
|
@ -87,7 +87,7 @@ func (c Client) recentlyPlayed(after *time.Time, before *time.Time, limit int) (
|
||||||
|
|
||||||
func (c Client) UserTracks(offset int, limit int) (result TracksResult, err error) {
|
func (c Client) UserTracks(offset int, limit int) (result TracksResult, err error) {
|
||||||
const path = "/me/tracks"
|
const path = "/me/tracks"
|
||||||
response, err := c.HttpClient.R().
|
response, err := c.HTTPClient.R().
|
||||||
SetQueryParams(map[string]string{
|
SetQueryParams(map[string]string{
|
||||||
"offset": strconv.Itoa(offset),
|
"offset": strconv.Itoa(offset),
|
||||||
"limit": strconv.Itoa(limit),
|
"limit": strconv.Itoa(limit),
|
||||||
|
|
|
@ -43,7 +43,7 @@ func TestRecentlyPlayedAfter(t *testing.T) {
|
||||||
defer httpmock.DeactivateAndReset()
|
defer httpmock.DeactivateAndReset()
|
||||||
|
|
||||||
client := spotify.NewClient(nil)
|
client := spotify.NewClient(nil)
|
||||||
setupHttpMock(t, client.HttpClient.GetClient(),
|
setupHTTPMock(t, client.HTTPClient.GetClient(),
|
||||||
"https://api.spotify.com/v1/me/player/recently-played",
|
"https://api.spotify.com/v1/me/player/recently-played",
|
||||||
"testdata/recently-played.json")
|
"testdata/recently-played.json")
|
||||||
|
|
||||||
|
@ -63,7 +63,7 @@ func TestGetUserTracks(t *testing.T) {
|
||||||
defer httpmock.DeactivateAndReset()
|
defer httpmock.DeactivateAndReset()
|
||||||
|
|
||||||
client := spotify.NewClient(nil)
|
client := spotify.NewClient(nil)
|
||||||
setupHttpMock(t, client.HttpClient.GetClient(),
|
setupHTTPMock(t, client.HTTPClient.GetClient(),
|
||||||
"https://api.spotify.com/v1/me/tracks",
|
"https://api.spotify.com/v1/me/tracks",
|
||||||
"testdata/user-tracks.json")
|
"testdata/user-tracks.json")
|
||||||
|
|
||||||
|
@ -79,7 +79,7 @@ func TestGetUserTracks(t *testing.T) {
|
||||||
assert.Equal("Zeal & Ardor", track1.Track.Album.Name)
|
assert.Equal("Zeal & Ardor", track1.Track.Album.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
func setupHttpMock(t *testing.T, client *http.Client, url string, testDataPath string) {
|
func setupHTTPMock(t *testing.T, client *http.Client, url string, testDataPath string) {
|
||||||
httpmock.ActivateNonDefault(client)
|
httpmock.ActivateNonDefault(client)
|
||||||
|
|
||||||
responder, err := httpmock.NewJsonResponder(200, httpmock.File(testDataPath))
|
responder, err := httpmock.NewJsonResponder(200, httpmock.File(testDataPath))
|
||||||
|
|
|
@ -22,6 +22,8 @@ THE SOFTWARE.
|
||||||
|
|
||||||
package spotify
|
package spotify
|
||||||
|
|
||||||
|
import "go.uploadedlobster.com/mbtypes"
|
||||||
|
|
||||||
type TracksResult struct {
|
type TracksResult struct {
|
||||||
Href string `json:"href"`
|
Href string `json:"href"`
|
||||||
Limit int `json:"limit"`
|
Limit int `json:"limit"`
|
||||||
|
@ -56,7 +58,7 @@ type Listen struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type Track struct {
|
type Track struct {
|
||||||
Id string `json:"id"`
|
ID string `json:"id"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
Href string `json:"href"`
|
Href string `json:"href"`
|
||||||
Uri string `json:"uri"`
|
Uri string `json:"uri"`
|
||||||
|
@ -67,14 +69,14 @@ type Track struct {
|
||||||
Explicit bool `json:"explicit"`
|
Explicit bool `json:"explicit"`
|
||||||
IsLocal bool `json:"is_local"`
|
IsLocal bool `json:"is_local"`
|
||||||
Popularity int `json:"popularity"`
|
Popularity int `json:"popularity"`
|
||||||
ExternalIds ExternalIds `json:"external_ids"`
|
ExternalIDs ExternalIDs `json:"external_ids"`
|
||||||
ExternalUrls ExternalUrls `json:"external_urls"`
|
ExternalURLs ExternalURLs `json:"external_urls"`
|
||||||
Album Album `json:"album"`
|
Album Album `json:"album"`
|
||||||
Artists []Artist `json:"artists"`
|
Artists []Artist `json:"artists"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Album struct {
|
type Album struct {
|
||||||
Id string `json:"id"`
|
ID string `json:"id"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
Href string `json:"href"`
|
Href string `json:"href"`
|
||||||
Uri string `json:"uri"`
|
Uri string `json:"uri"`
|
||||||
|
@ -83,32 +85,32 @@ type Album struct {
|
||||||
ReleaseDate string `json:"release_date"`
|
ReleaseDate string `json:"release_date"`
|
||||||
ReleaseDatePrecision string `json:"release_date_precision"`
|
ReleaseDatePrecision string `json:"release_date_precision"`
|
||||||
AlbumType string `json:"album_type"`
|
AlbumType string `json:"album_type"`
|
||||||
ExternalUrls ExternalUrls `json:"external_urls"`
|
ExternalURLs ExternalURLs `json:"external_urls"`
|
||||||
Artists []Artist `json:"artists"`
|
Artists []Artist `json:"artists"`
|
||||||
Images []Image `json:"images"`
|
Images []Image `json:"images"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Artist struct {
|
type Artist struct {
|
||||||
Id string `json:"id"`
|
ID string `json:"id"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
Href string `json:"href"`
|
Href string `json:"href"`
|
||||||
Uri string `json:"uri"`
|
Uri string `json:"uri"`
|
||||||
Type string `json:"type"`
|
Type string `json:"type"`
|
||||||
ExternalUrls ExternalUrls `json:"external_urls"`
|
ExternalURLs ExternalURLs `json:"external_urls"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type ExternalIds struct {
|
type ExternalIDs struct {
|
||||||
ISRC string `json:"isrc"`
|
ISRC mbtypes.ISRC `json:"isrc"`
|
||||||
EAN string `json:"ean"`
|
EAN string `json:"ean"`
|
||||||
UPC string `json:"upc"`
|
UPC string `json:"upc"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type ExternalUrls struct {
|
type ExternalURLs struct {
|
||||||
Spotify string `json:"spotify"`
|
Spotify string `json:"spotify"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Image struct {
|
type Image struct {
|
||||||
Url string `json:"url"`
|
URL string `json:"url"`
|
||||||
Height int `json:"height"`
|
Height int `json:"height"`
|
||||||
Width int `json:"width"`
|
Width int `json:"width"`
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
This file is part of Scotty.
|
This file is part of Scotty.
|
||||||
|
|
||||||
|
@ -34,7 +34,7 @@ import (
|
||||||
|
|
||||||
type SpotifyApiBackend struct {
|
type SpotifyApiBackend struct {
|
||||||
client Client
|
client Client
|
||||||
clientId string
|
clientID string
|
||||||
clientSecret string
|
clientSecret string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -52,15 +52,15 @@ func (b *SpotifyApiBackend) Options() []models.BackendOption {
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *SpotifyApiBackend) FromConfig(config *config.ServiceConfig) models.Backend {
|
func (b *SpotifyApiBackend) InitConfig(config *config.ServiceConfig) error {
|
||||||
b.clientId = config.GetString("client-id")
|
b.clientID = config.GetString("client-id")
|
||||||
b.clientSecret = config.GetString("client-secret")
|
b.clientSecret = config.GetString("client-secret")
|
||||||
return b
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *SpotifyApiBackend) OAuth2Strategy(redirectUrl *url.URL) auth.OAuth2Strategy {
|
func (b *SpotifyApiBackend) OAuth2Strategy(redirectURL *url.URL) auth.OAuth2Strategy {
|
||||||
conf := oauth2.Config{
|
conf := oauth2.Config{
|
||||||
ClientID: b.clientId,
|
ClientID: b.clientID,
|
||||||
ClientSecret: b.clientSecret,
|
ClientSecret: b.clientSecret,
|
||||||
Scopes: []string{
|
Scopes: []string{
|
||||||
"user-read-currently-playing",
|
"user-read-currently-playing",
|
||||||
|
@ -68,16 +68,16 @@ func (b *SpotifyApiBackend) OAuth2Strategy(redirectUrl *url.URL) auth.OAuth2Stra
|
||||||
"user-library-read",
|
"user-library-read",
|
||||||
"user-library-modify",
|
"user-library-modify",
|
||||||
},
|
},
|
||||||
RedirectURL: redirectUrl.String(),
|
RedirectURL: redirectURL.String(),
|
||||||
Endpoint: spotify.Endpoint,
|
Endpoint: spotify.Endpoint,
|
||||||
}
|
}
|
||||||
|
|
||||||
return auth.NewStandardStrategy(conf)
|
return auth.NewStandardStrategy(conf)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *SpotifyApiBackend) OAuth2Config(redirectUrl *url.URL) oauth2.Config {
|
func (b *SpotifyApiBackend) OAuth2Config(redirectURL *url.URL) oauth2.Config {
|
||||||
return oauth2.Config{
|
return oauth2.Config{
|
||||||
ClientID: b.clientId,
|
ClientID: b.clientID,
|
||||||
ClientSecret: b.clientSecret,
|
ClientSecret: b.clientSecret,
|
||||||
Scopes: []string{
|
Scopes: []string{
|
||||||
"user-read-currently-playing",
|
"user-read-currently-playing",
|
||||||
|
@ -85,7 +85,7 @@ func (b *SpotifyApiBackend) OAuth2Config(redirectUrl *url.URL) oauth2.Config {
|
||||||
"user-library-read",
|
"user-library-read",
|
||||||
"user-library-modify",
|
"user-library-modify",
|
||||||
},
|
},
|
||||||
RedirectURL: redirectUrl.String(),
|
RedirectURL: redirectURL.String(),
|
||||||
Endpoint: spotify.Endpoint,
|
Endpoint: spotify.Endpoint,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -95,20 +95,22 @@ func (b *SpotifyApiBackend) OAuth2Setup(token oauth2.TokenSource) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *SpotifyApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
func (b *SpotifyApiBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
||||||
startTime := time.Now()
|
startTime := time.Now()
|
||||||
minTime := oldestTimestamp
|
minTime := oldestTimestamp
|
||||||
|
|
||||||
totalDuration := startTime.Sub(oldestTimestamp)
|
totalDuration := startTime.Sub(oldestTimestamp)
|
||||||
|
p := models.TransferProgress{
|
||||||
defer close(results)
|
Export: &models.Progress{
|
||||||
|
Total: int64(totalDuration.Seconds()),
|
||||||
p := models.Progress{Total: int64(totalDuration.Seconds())}
|
},
|
||||||
|
}
|
||||||
|
|
||||||
for {
|
for {
|
||||||
result, err := b.client.RecentlyPlayedAfter(minTime, MaxItemsPerGet)
|
result, err := b.client.RecentlyPlayedAfter(minTime, MaxItemsPerGet)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- p.Complete()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -120,7 +122,8 @@ func (b *SpotifyApiBackend) ExportListens(oldestTimestamp time.Time, results cha
|
||||||
// Set minTime to the newest returned listen
|
// Set minTime to the newest returned listen
|
||||||
after, err := strconv.ParseInt(result.Cursors.After, 10, 64)
|
after, err := strconv.ParseInt(result.Cursors.After, 10, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- p.Complete()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
return
|
return
|
||||||
} else if after <= minTime.Unix() {
|
} else if after <= minTime.Unix() {
|
||||||
|
@ -139,7 +142,7 @@ func (b *SpotifyApiBackend) ExportListens(oldestTimestamp time.Time, results cha
|
||||||
|
|
||||||
for _, listen := range result.Items {
|
for _, listen := range result.Items {
|
||||||
l := listen.AsListen()
|
l := listen.AsListen()
|
||||||
if l.ListenedAt.Unix() > oldestTimestamp.Unix() {
|
if l.ListenedAt.After(oldestTimestamp) {
|
||||||
listens = append(listens, l)
|
listens = append(listens, l)
|
||||||
} else {
|
} else {
|
||||||
// result contains listens older then oldestTimestamp
|
// result contains listens older then oldestTimestamp
|
||||||
|
@ -148,24 +151,28 @@ func (b *SpotifyApiBackend) ExportListens(oldestTimestamp time.Time, results cha
|
||||||
}
|
}
|
||||||
|
|
||||||
sort.Sort(listens)
|
sort.Sort(listens)
|
||||||
p.Elapsed = int64(totalDuration.Seconds() - remainingTime.Seconds())
|
p.Export.TotalItems += len(listens)
|
||||||
|
p.Export.Elapsed = int64(totalDuration.Seconds() - remainingTime.Seconds())
|
||||||
progress <- p
|
progress <- p
|
||||||
results <- models.ListensResult{Items: listens, OldestTimestamp: minTime}
|
results <- models.ListensResult{Items: listens, OldestTimestamp: minTime}
|
||||||
}
|
}
|
||||||
|
|
||||||
results <- models.ListensResult{OldestTimestamp: minTime}
|
results <- models.ListensResult{OldestTimestamp: minTime}
|
||||||
progress <- p.Complete()
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *SpotifyApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
|
func (b *SpotifyApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
|
||||||
// Choose a high offset, we attempt to search the loves backwards starting
|
// Choose a high offset, we attempt to search the loves backwards starting
|
||||||
// at the oldest one.
|
// at the oldest one.
|
||||||
offset := math.MaxInt32
|
offset := math.MaxInt32
|
||||||
perPage := MaxItemsPerGet
|
perPage := MaxItemsPerGet
|
||||||
|
|
||||||
defer close(results)
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{
|
||||||
p := models.Progress{Total: int64(perPage)}
|
Total: int64(perPage),
|
||||||
|
},
|
||||||
|
}
|
||||||
totalCount := 0
|
totalCount := 0
|
||||||
exportCount := 0
|
exportCount := 0
|
||||||
|
|
||||||
|
@ -173,7 +180,8 @@ out:
|
||||||
for {
|
for {
|
||||||
result, err := b.client.UserTracks(offset, perPage)
|
result, err := b.client.UserTracks(offset, perPage)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- p.Complete()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.LovesResult{Error: err}
|
results <- models.LovesResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -181,7 +189,7 @@ out:
|
||||||
// The offset was higher then the actual number of tracks. Adjust the offset
|
// The offset was higher then the actual number of tracks. Adjust the offset
|
||||||
// and continue.
|
// and continue.
|
||||||
if offset >= result.Total {
|
if offset >= result.Total {
|
||||||
p.Total = int64(result.Total)
|
p.Export.Total = int64(result.Total)
|
||||||
totalCount = result.Total
|
totalCount = result.Total
|
||||||
offset = max(result.Total-perPage, 0)
|
offset = max(result.Total-perPage, 0)
|
||||||
continue
|
continue
|
||||||
|
@ -195,7 +203,7 @@ out:
|
||||||
loves := make(models.LovesList, 0, perPage)
|
loves := make(models.LovesList, 0, perPage)
|
||||||
for _, track := range result.Items {
|
for _, track := range result.Items {
|
||||||
love := track.AsLove()
|
love := track.AsLove()
|
||||||
if love.Created.Unix() > oldestTimestamp.Unix() {
|
if love.Created.After(oldestTimestamp) {
|
||||||
loves = append(loves, love)
|
loves = append(loves, love)
|
||||||
} else {
|
} else {
|
||||||
continue
|
continue
|
||||||
|
@ -205,7 +213,7 @@ out:
|
||||||
exportCount += len(loves)
|
exportCount += len(loves)
|
||||||
sort.Sort(loves)
|
sort.Sort(loves)
|
||||||
results <- models.LovesResult{Items: loves, Total: totalCount}
|
results <- models.LovesResult{Items: loves, Total: totalCount}
|
||||||
p.Elapsed += int64(count)
|
p.Export.Elapsed += int64(count)
|
||||||
progress <- p
|
progress <- p
|
||||||
|
|
||||||
if offset <= 0 {
|
if offset <= 0 {
|
||||||
|
@ -220,7 +228,8 @@ out:
|
||||||
}
|
}
|
||||||
|
|
||||||
results <- models.LovesResult{Total: exportCount}
|
results <- models.LovesResult{Total: exportCount}
|
||||||
progress <- p.Complete()
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l Listen) AsListen() models.Listen {
|
func (l Listen) AsListen() models.Listen {
|
||||||
|
@ -251,7 +260,7 @@ func (t Track) AsTrack() models.Track {
|
||||||
Duration: time.Duration(t.DurationMs * int(time.Millisecond)),
|
Duration: time.Duration(t.DurationMs * int(time.Millisecond)),
|
||||||
TrackNumber: t.TrackNumber,
|
TrackNumber: t.TrackNumber,
|
||||||
DiscNumber: t.DiscNumber,
|
DiscNumber: t.DiscNumber,
|
||||||
ISRC: t.ExternalIds.ISRC,
|
ISRC: t.ExternalIDs.ISRC,
|
||||||
AdditionalInfo: map[string]any{},
|
AdditionalInfo: map[string]any{},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -264,30 +273,30 @@ func (t Track) AsTrack() models.Track {
|
||||||
info["music_service"] = "spotify.com"
|
info["music_service"] = "spotify.com"
|
||||||
}
|
}
|
||||||
|
|
||||||
if t.ExternalUrls.Spotify != "" {
|
if t.ExternalURLs.Spotify != "" {
|
||||||
info["origin_url"] = t.ExternalUrls.Spotify
|
info["origin_url"] = t.ExternalURLs.Spotify
|
||||||
info["spotify_id"] = t.ExternalUrls.Spotify
|
info["spotify_id"] = t.ExternalURLs.Spotify
|
||||||
}
|
}
|
||||||
|
|
||||||
if t.Album.ExternalUrls.Spotify != "" {
|
if t.Album.ExternalURLs.Spotify != "" {
|
||||||
info["spotify_album_id"] = t.Album.ExternalUrls.Spotify
|
info["spotify_album_id"] = t.Album.ExternalURLs.Spotify
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(t.Artists) > 0 {
|
if len(t.Artists) > 0 {
|
||||||
info["spotify_artist_ids"] = extractArtistIds(t.Artists)
|
info["spotify_artist_ids"] = extractArtistIDs(t.Artists)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(t.Album.Artists) > 0 {
|
if len(t.Album.Artists) > 0 {
|
||||||
info["spotify_album_artist_ids"] = extractArtistIds(t.Album.Artists)
|
info["spotify_album_artist_ids"] = extractArtistIDs(t.Album.Artists)
|
||||||
}
|
}
|
||||||
|
|
||||||
return track
|
return track
|
||||||
}
|
}
|
||||||
|
|
||||||
func extractArtistIds(artists []Artist) []string {
|
func extractArtistIDs(artists []Artist) []string {
|
||||||
artistIds := make([]string, len(artists))
|
artistIDs := make([]string, len(artists))
|
||||||
for i, artist := range artists {
|
for i, artist := range artists {
|
||||||
artistIds[i] = artist.ExternalUrls.Spotify
|
artistIDs[i] = artist.ExternalURLs.Spotify
|
||||||
}
|
}
|
||||||
return artistIds
|
return artistIDs
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,6 +26,7 @@ import (
|
||||||
"github.com/spf13/viper"
|
"github.com/spf13/viper"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
"go.uploadedlobster.com/mbtypes"
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/spotify"
|
"go.uploadedlobster.com/scotty/internal/backends/spotify"
|
||||||
"go.uploadedlobster.com/scotty/internal/config"
|
"go.uploadedlobster.com/scotty/internal/config"
|
||||||
)
|
)
|
||||||
|
@ -37,13 +38,14 @@ var (
|
||||||
testTrack []byte
|
testTrack []byte
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestFromConfig(t *testing.T) {
|
func TestInitConfig(t *testing.T) {
|
||||||
c := viper.New()
|
c := viper.New()
|
||||||
c.Set("client-id", "someclientid")
|
c.Set("client-id", "someclientid")
|
||||||
c.Set("client-secret", "someclientsecret")
|
c.Set("client-secret", "someclientsecret")
|
||||||
service := config.NewServiceConfig("test", c)
|
service := config.NewServiceConfig("test", c)
|
||||||
backend := (&spotify.SpotifyApiBackend{}).FromConfig(&service)
|
backend := spotify.SpotifyApiBackend{}
|
||||||
assert.IsType(t, &spotify.SpotifyApiBackend{}, backend)
|
err := backend.InitConfig(&service)
|
||||||
|
assert.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSpotifyListenAsListen(t *testing.T) {
|
func TestSpotifyListenAsListen(t *testing.T) {
|
||||||
|
@ -59,7 +61,7 @@ func TestSpotifyListenAsListen(t *testing.T) {
|
||||||
assert.Equal(t, []string{"Dool"}, listen.ArtistNames)
|
assert.Equal(t, []string{"Dool"}, listen.ArtistNames)
|
||||||
assert.Equal(t, 5, listen.TrackNumber)
|
assert.Equal(t, 5, listen.TrackNumber)
|
||||||
assert.Equal(t, 1, listen.DiscNumber)
|
assert.Equal(t, 1, listen.DiscNumber)
|
||||||
assert.Equal(t, "DES561620801", listen.ISRC)
|
assert.Equal(t, mbtypes.ISRC("DES561620801"), listen.ISRC)
|
||||||
info := listen.AdditionalInfo
|
info := listen.AdditionalInfo
|
||||||
assert.Equal(t, "spotify.com", info["music_service"])
|
assert.Equal(t, "spotify.com", info["music_service"])
|
||||||
assert.Equal(t, "https://open.spotify.com/track/2JKUgGuXK3dEvyuIJ4Yj2V", info["origin_url"])
|
assert.Equal(t, "https://open.spotify.com/track/2JKUgGuXK3dEvyuIJ4Yj2V", info["origin_url"])
|
||||||
|
|
|
@ -92,8 +92,8 @@ func (i HistoryItem) AsListen() models.Listen {
|
||||||
PlaybackDuration: time.Duration(i.MillisecondsPlayed * int(time.Millisecond)),
|
PlaybackDuration: time.Duration(i.MillisecondsPlayed * int(time.Millisecond)),
|
||||||
UserName: i.UserName,
|
UserName: i.UserName,
|
||||||
}
|
}
|
||||||
if trackUrl, err := formatSpotifyUri(i.SpotifyTrackUri); err != nil {
|
if trackURL, err := formatSpotifyUri(i.SpotifyTrackUri); err != nil {
|
||||||
listen.AdditionalInfo["spotify_id"] = trackUrl
|
listen.AdditionalInfo["spotify_id"] = trackURL
|
||||||
}
|
}
|
||||||
return listen
|
return listen
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
This file is part of Scotty.
|
This file is part of Scotty.
|
||||||
|
|
||||||
|
@ -64,31 +64,35 @@ func (b *SpotifyHistoryBackend) Options() []models.BackendOption {
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *SpotifyHistoryBackend) FromConfig(config *config.ServiceConfig) models.Backend {
|
func (b *SpotifyHistoryBackend) InitConfig(config *config.ServiceConfig) error {
|
||||||
b.dirPath = config.GetString("dir-path")
|
b.dirPath = config.GetString("dir-path")
|
||||||
b.ignoreIncognito = config.GetBool("ignore-incognito", true)
|
b.ignoreIncognito = config.GetBool("ignore-incognito", true)
|
||||||
b.ignoreSkipped = config.GetBool("ignore-skipped", false)
|
b.ignoreSkipped = config.GetBool("ignore-skipped", false)
|
||||||
b.skippedMinSeconds = config.GetInt("ignore-min-duration-seconds", 30)
|
b.skippedMinSeconds = config.GetInt("ignore-min-duration-seconds", 30)
|
||||||
return b
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *SpotifyHistoryBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.Progress) {
|
func (b *SpotifyHistoryBackend) ExportListens(oldestTimestamp time.Time, results chan models.ListensResult, progress chan models.TransferProgress) {
|
||||||
defer close(results)
|
|
||||||
|
|
||||||
files, err := filepath.Glob(path.Join(b.dirPath, historyFileGlob))
|
files, err := filepath.Glob(path.Join(b.dirPath, historyFileGlob))
|
||||||
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{},
|
||||||
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- models.Progress{}.Complete()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
slices.Sort(files)
|
slices.Sort(files)
|
||||||
fileCount := int64(len(files))
|
fileCount := int64(len(files))
|
||||||
p := models.Progress{Total: fileCount}
|
p.Export.Total = fileCount
|
||||||
for i, filePath := range files {
|
for i, filePath := range files {
|
||||||
history, err := readHistoryFile(filePath)
|
history, err := readHistoryFile(filePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- models.Progress{}.Complete()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.ListensResult{Error: err}
|
results <- models.ListensResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -99,11 +103,13 @@ func (b *SpotifyHistoryBackend) ExportListens(oldestTimestamp time.Time, results
|
||||||
})
|
})
|
||||||
sort.Sort(listens)
|
sort.Sort(listens)
|
||||||
results <- models.ListensResult{Items: listens}
|
results <- models.ListensResult{Items: listens}
|
||||||
p.Elapsed = int64(i)
|
p.Export.Elapsed = int64(i)
|
||||||
|
p.Export.TotalItems += len(listens)
|
||||||
progress <- p
|
progress <- p
|
||||||
}
|
}
|
||||||
|
|
||||||
progress <- p.Complete()
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
}
|
}
|
||||||
|
|
||||||
func readHistoryFile(filePath string) (StreamingHistory, error) {
|
func readHistoryFile(filePath string) (StreamingHistory, error) {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
This file is part of Scotty.
|
This file is part of Scotty.
|
||||||
|
|
||||||
|
@ -21,7 +21,8 @@ import (
|
||||||
"sort"
|
"sort"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/delucks/go-subsonic"
|
"github.com/supersonic-app/go-subsonic/subsonic"
|
||||||
|
"go.uploadedlobster.com/mbtypes"
|
||||||
"go.uploadedlobster.com/scotty/internal/config"
|
"go.uploadedlobster.com/scotty/internal/config"
|
||||||
"go.uploadedlobster.com/scotty/internal/i18n"
|
"go.uploadedlobster.com/scotty/internal/i18n"
|
||||||
"go.uploadedlobster.com/scotty/internal/models"
|
"go.uploadedlobster.com/scotty/internal/models"
|
||||||
|
@ -51,7 +52,7 @@ func (b *SubsonicApiBackend) Options() []models.BackendOption {
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *SubsonicApiBackend) FromConfig(config *config.ServiceConfig) models.Backend {
|
func (b *SubsonicApiBackend) InitConfig(config *config.ServiceConfig) error {
|
||||||
b.client = subsonic.Client{
|
b.client = subsonic.Client{
|
||||||
Client: &http.Client{},
|
Client: &http.Client{},
|
||||||
BaseUrl: config.GetString("server-url"),
|
BaseUrl: config.GetString("server-url"),
|
||||||
|
@ -59,34 +60,41 @@ func (b *SubsonicApiBackend) FromConfig(config *config.ServiceConfig) models.Bac
|
||||||
ClientName: version.AppName,
|
ClientName: version.AppName,
|
||||||
}
|
}
|
||||||
b.password = config.GetString("token")
|
b.password = config.GetString("token")
|
||||||
return b
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *SubsonicApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.Progress) {
|
func (b *SubsonicApiBackend) ExportLoves(oldestTimestamp time.Time, results chan models.LovesResult, progress chan models.TransferProgress) {
|
||||||
defer close(results)
|
|
||||||
err := b.client.Authenticate(b.password)
|
err := b.client.Authenticate(b.password)
|
||||||
|
p := models.TransferProgress{
|
||||||
|
Export: &models.Progress{},
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- models.Progress{}.Complete()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.LovesResult{Error: err}
|
results <- models.LovesResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
starred, err := b.client.GetStarred2(map[string]string{})
|
starred, err := b.client.GetStarred2(map[string]string{})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
progress <- models.Progress{}.Complete()
|
p.Export.Abort()
|
||||||
|
progress <- p
|
||||||
results <- models.LovesResult{Error: err}
|
results <- models.LovesResult{Error: err}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
progress <- models.Progress{Elapsed: int64(len(starred.Song))}.Complete()
|
loves := b.filterSongs(starred.Song, oldestTimestamp)
|
||||||
results <- models.LovesResult{Items: b.filterSongs(starred.Song, oldestTimestamp)}
|
p.Export.Total = int64(len(loves))
|
||||||
|
p.Export.Complete()
|
||||||
|
progress <- p
|
||||||
|
results <- models.LovesResult{Items: loves}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *SubsonicApiBackend) filterSongs(songs []*subsonic.Child, oldestTimestamp time.Time) models.LovesList {
|
func (b *SubsonicApiBackend) filterSongs(songs []*subsonic.Child, oldestTimestamp time.Time) models.LovesList {
|
||||||
loves := make(models.LovesList, 0, len(songs))
|
loves := make(models.LovesList, 0, len(songs))
|
||||||
for _, song := range songs {
|
for _, song := range songs {
|
||||||
love := SongAsLove(*song, b.client.User)
|
love := SongAsLove(*song, b.client.User)
|
||||||
if love.Created.Unix() > oldestTimestamp.Unix() {
|
if love.Created.After(oldestTimestamp) {
|
||||||
loves = append(loves, love)
|
loves = append(loves, love)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -96,15 +104,19 @@ func (b *SubsonicApiBackend) filterSongs(songs []*subsonic.Child, oldestTimestam
|
||||||
}
|
}
|
||||||
|
|
||||||
func SongAsLove(song subsonic.Child, username string) models.Love {
|
func SongAsLove(song subsonic.Child, username string) models.Love {
|
||||||
|
recordingMBID := mbtypes.MBID(song.MusicBrainzID)
|
||||||
love := models.Love{
|
love := models.Love{
|
||||||
UserName: username,
|
UserName: username,
|
||||||
Created: song.Starred,
|
Created: song.Starred,
|
||||||
|
RecordingMBID: recordingMBID,
|
||||||
Track: models.Track{
|
Track: models.Track{
|
||||||
TrackName: song.Title,
|
TrackName: song.Title,
|
||||||
ReleaseName: song.Album,
|
ReleaseName: song.Album,
|
||||||
ArtistNames: []string{song.Artist},
|
ArtistNames: []string{song.Artist},
|
||||||
TrackNumber: song.Track,
|
TrackNumber: song.Track,
|
||||||
DiscNumber: song.DiscNumber,
|
DiscNumber: song.DiscNumber,
|
||||||
|
RecordingMBID: recordingMBID,
|
||||||
|
Tags: []string{},
|
||||||
AdditionalInfo: map[string]any{
|
AdditionalInfo: map[string]any{
|
||||||
"subsonic_id": song.ID,
|
"subsonic_id": song.ID,
|
||||||
},
|
},
|
||||||
|
@ -112,7 +124,13 @@ func SongAsLove(song subsonic.Child, username string) models.Love {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
if song.Genre != "" {
|
if len(song.Genres) > 0 {
|
||||||
|
genres := make([]string, 0, len(song.Genres))
|
||||||
|
for _, genre := range song.Genres {
|
||||||
|
genres = append(genres, genre.Name)
|
||||||
|
}
|
||||||
|
love.Track.Tags = genres
|
||||||
|
} else if song.Genre != "" {
|
||||||
love.Track.Tags = []string{song.Genre}
|
love.Track.Tags = []string{song.Genre}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,20 +20,21 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
go_subsonic "github.com/delucks/go-subsonic"
|
|
||||||
"github.com/spf13/viper"
|
"github.com/spf13/viper"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
go_subsonic "github.com/supersonic-app/go-subsonic/subsonic"
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/subsonic"
|
"go.uploadedlobster.com/scotty/internal/backends/subsonic"
|
||||||
"go.uploadedlobster.com/scotty/internal/config"
|
"go.uploadedlobster.com/scotty/internal/config"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestFromConfig(t *testing.T) {
|
func TestInitConfig(t *testing.T) {
|
||||||
c := viper.New()
|
c := viper.New()
|
||||||
c.Set("server-url", "https://subsonic.example.com")
|
c.Set("server-url", "https://subsonic.example.com")
|
||||||
c.Set("token", "thetoken")
|
c.Set("token", "thetoken")
|
||||||
service := config.NewServiceConfig("test", c)
|
service := config.NewServiceConfig("test", c)
|
||||||
backend := (&subsonic.SubsonicApiBackend{}).FromConfig(&service)
|
backend := subsonic.SubsonicApiBackend{}
|
||||||
assert.IsType(t, &subsonic.SubsonicApiBackend{}, backend)
|
err := backend.InitConfig(&service)
|
||||||
|
assert.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSongToLove(t *testing.T) {
|
func TestSongToLove(t *testing.T) {
|
||||||
|
|
|
@ -43,20 +43,20 @@ func AuthenticationFlow(service config.ServiceConfig, backend auth.OAuth2Authent
|
||||||
|
|
||||||
state := auth.RandomState()
|
state := auth.RandomState()
|
||||||
// Redirect user to consent page to ask for permission specified scopes.
|
// Redirect user to consent page to ask for permission specified scopes.
|
||||||
authUrl := strategy.AuthCodeURL(verifier, state)
|
authURL := strategy.AuthCodeURL(verifier, state)
|
||||||
|
|
||||||
// Start an HTTP server to listen for the response
|
// Start an HTTP server to listen for the response
|
||||||
responseChan := make(chan auth.CodeResponse)
|
responseChan := make(chan auth.CodeResponse)
|
||||||
auth.RunOauth2CallbackServer(*redirectURL, authUrl.Param, responseChan)
|
auth.RunOauth2CallbackServer(*redirectURL, authURL.Param, responseChan)
|
||||||
|
|
||||||
// Open the URL
|
// Open the URL
|
||||||
fmt.Println(i18n.Tr("Visit the URL for authorization: %v", authUrl.Url))
|
fmt.Println(i18n.Tr("Visit the URL for authorization: %v", authURL.URL))
|
||||||
err = browser.OpenURL(authUrl.Url)
|
err = browser.OpenURL(authURL.URL)
|
||||||
cobra.CheckErr(err)
|
cobra.CheckErr(err)
|
||||||
|
|
||||||
// Retrieve the code from the authentication callback
|
// Retrieve the code from the authentication callback
|
||||||
code := <-responseChan
|
code := <-responseChan
|
||||||
if code.State != authUrl.State {
|
if code.State != authURL.State {
|
||||||
cobra.CompErrorln(i18n.Tr("Error: OAuth state mismatch"))
|
cobra.CompErrorln(i18n.Tr("Error: OAuth state mismatch"))
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
This file is part of Scotty.
|
This file is part of Scotty.
|
||||||
|
|
||||||
|
@ -28,7 +28,19 @@ import (
|
||||||
"go.uploadedlobster.com/scotty/internal/models"
|
"go.uploadedlobster.com/scotty/internal/models"
|
||||||
)
|
)
|
||||||
|
|
||||||
func progressBar(wg *sync.WaitGroup, exportProgress chan models.Progress, importProgress chan models.Progress) *mpb.Progress {
|
type progressBarUpdater struct {
|
||||||
|
wg *sync.WaitGroup
|
||||||
|
progress *mpb.Progress
|
||||||
|
exportBar *mpb.Bar
|
||||||
|
importBar *mpb.Bar
|
||||||
|
updateChan chan models.TransferProgress
|
||||||
|
lastExportUpdate time.Time
|
||||||
|
totalItems int
|
||||||
|
importedItems int
|
||||||
|
}
|
||||||
|
|
||||||
|
func setupProgressBars(updateChan chan models.TransferProgress) progressBarUpdater {
|
||||||
|
wg := &sync.WaitGroup{}
|
||||||
p := mpb.New(
|
p := mpb.New(
|
||||||
mpb.WithWaitGroup(wg),
|
mpb.WithWaitGroup(wg),
|
||||||
mpb.WithOutput(color.Output),
|
mpb.WithOutput(color.Output),
|
||||||
|
@ -36,16 +48,81 @@ func progressBar(wg *sync.WaitGroup, exportProgress chan models.Progress, import
|
||||||
mpb.WithAutoRefresh(),
|
mpb.WithAutoRefresh(),
|
||||||
)
|
)
|
||||||
|
|
||||||
exportBar := setupProgressBar(p, i18n.Tr("exporting"))
|
u := progressBarUpdater{
|
||||||
importBar := setupProgressBar(p, i18n.Tr("importing"))
|
wg: wg,
|
||||||
go updateProgressBar(exportBar, wg, exportProgress)
|
progress: p,
|
||||||
go updateProgressBar(importBar, wg, importProgress)
|
exportBar: initExportProgressBar(p, i18n.Tr("exporting")),
|
||||||
|
importBar: initImportProgressBar(p, i18n.Tr("importing")),
|
||||||
return p
|
updateChan: updateChan,
|
||||||
}
|
}
|
||||||
|
|
||||||
func setupProgressBar(p *mpb.Progress, name string) *mpb.Bar {
|
go u.update()
|
||||||
|
return u
|
||||||
|
}
|
||||||
|
|
||||||
|
func (u *progressBarUpdater) close() {
|
||||||
|
close(u.updateChan)
|
||||||
|
u.progress.Wait()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (u *progressBarUpdater) update() {
|
||||||
|
u.wg.Add(1)
|
||||||
|
defer u.wg.Done()
|
||||||
|
u.lastExportUpdate = time.Now()
|
||||||
|
for progress := range u.updateChan {
|
||||||
|
if progress.Export != nil {
|
||||||
|
u.updateExportProgress(progress.Export)
|
||||||
|
}
|
||||||
|
|
||||||
|
if progress.Import != nil {
|
||||||
|
if int64(u.totalItems) > progress.Import.Total {
|
||||||
|
progress.Import.Total = int64(u.totalItems)
|
||||||
|
}
|
||||||
|
u.updateImportProgress(progress.Import)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (u *progressBarUpdater) updateExportProgress(progress *models.Progress) {
|
||||||
|
bar := u.exportBar
|
||||||
|
u.totalItems = progress.TotalItems
|
||||||
|
|
||||||
|
if progress.Aborted {
|
||||||
|
bar.Abort(false)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
oldIterTime := u.lastExportUpdate
|
||||||
|
u.lastExportUpdate = time.Now()
|
||||||
|
elapsedTime := u.lastExportUpdate.Sub(oldIterTime)
|
||||||
|
bar.EwmaSetCurrent(progress.Elapsed, elapsedTime)
|
||||||
|
bar.SetTotal(progress.Total, progress.Completed)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (u *progressBarUpdater) updateImportProgress(progress *models.Progress) {
|
||||||
|
bar := u.importBar
|
||||||
|
|
||||||
|
if progress.Aborted {
|
||||||
|
bar.Abort(false)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
bar.SetCurrent(progress.Elapsed)
|
||||||
|
bar.SetTotal(progress.Total, progress.Completed)
|
||||||
|
}
|
||||||
|
|
||||||
|
func initExportProgressBar(p *mpb.Progress, name string) *mpb.Bar {
|
||||||
|
return initProgressBar(p, name,
|
||||||
|
decor.EwmaETA(decor.ET_STYLE_GO, 0, decor.WC{C: decor.DSyncWidth}))
|
||||||
|
}
|
||||||
|
|
||||||
|
func initImportProgressBar(p *mpb.Progress, name string) *mpb.Bar {
|
||||||
|
return initProgressBar(p, name, decor.Counters(0, "%d / %d"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func initProgressBar(p *mpb.Progress, name string, progressDecorator decor.Decorator) *mpb.Bar {
|
||||||
green := color.New(color.FgGreen).SprintFunc()
|
green := color.New(color.FgGreen).SprintFunc()
|
||||||
|
red := color.New(color.FgHiRed, color.Bold).SprintFunc()
|
||||||
return p.New(0,
|
return p.New(0,
|
||||||
mpb.BarStyle(),
|
mpb.BarStyle(),
|
||||||
mpb.PrependDecorators(
|
mpb.PrependDecorators(
|
||||||
|
@ -58,23 +135,13 @@ func setupProgressBar(p *mpb.Progress, name string) *mpb.Bar {
|
||||||
),
|
),
|
||||||
mpb.AppendDecorators(
|
mpb.AppendDecorators(
|
||||||
decor.OnComplete(
|
decor.OnComplete(
|
||||||
decor.EwmaETA(decor.ET_STYLE_GO, 0, decor.WC{C: decor.DSyncWidth}),
|
decor.OnAbort(
|
||||||
|
progressDecorator,
|
||||||
|
red(i18n.Tr("aborted")),
|
||||||
|
),
|
||||||
i18n.Tr("done"),
|
i18n.Tr("done"),
|
||||||
),
|
),
|
||||||
// decor.OnComplete(decor.Percentage(decor.WC{W: 5, C: decor.DSyncWidthR}), "done"),
|
|
||||||
decor.Name(" "),
|
decor.Name(" "),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
func updateProgressBar(bar *mpb.Bar, wg *sync.WaitGroup, progressChan chan models.Progress) {
|
|
||||||
wg.Add(1)
|
|
||||||
defer wg.Done()
|
|
||||||
lastIterTime := time.Now()
|
|
||||||
for progress := range progressChan {
|
|
||||||
oldIterTime := lastIterTime
|
|
||||||
lastIterTime = time.Now()
|
|
||||||
bar.EwmaSetCurrent(progress.Elapsed, lastIterTime.Sub(oldIterTime))
|
|
||||||
bar.SetTotal(progress.Total, progress.Completed)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
Scotty is free software: you can redistribute it and/or modify it under the
|
Scotty is free software: you can redistribute it and/or modify it under the
|
||||||
terms of the GNU General Public License as published by the Free Software
|
terms of the GNU General Public License as published by the Free Software
|
||||||
|
@ -88,7 +88,7 @@ func (c *TransferCmd[E, I, R]) resolveBackends(source string, target string) err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *TransferCmd[E, I, R]) Transfer(exp backends.ExportProcessor[R], imp backends.ImportProcessor[R]) error {
|
func (c *TransferCmd[E, I, R]) Transfer(exp backends.ExportProcessor[R], imp backends.ImportProcessor[R]) error {
|
||||||
fmt.Println(i18n.Tr("Transferring %s from %s to %s...", c.entity, c.sourceName, c.targetName))
|
fmt.Println(i18n.Tr("Transferring %s from %s to %s…", c.entity, c.sourceName, c.targetName))
|
||||||
|
|
||||||
// Authenticate backends, if needed
|
// Authenticate backends, if needed
|
||||||
config := viper.GetViper()
|
config := viper.GetViper()
|
||||||
|
@ -110,36 +110,34 @@ func (c *TransferCmd[E, I, R]) Transfer(exp backends.ExportProcessor[R], imp bac
|
||||||
printTimestamp("From timestamp: %v (%v)", timestamp)
|
printTimestamp("From timestamp: %v (%v)", timestamp)
|
||||||
|
|
||||||
// Prepare progress bars
|
// Prepare progress bars
|
||||||
exportProgress := make(chan models.Progress)
|
progressChan := make(chan models.TransferProgress)
|
||||||
importProgress := make(chan models.Progress)
|
progress := setupProgressBars(progressChan)
|
||||||
var wg sync.WaitGroup
|
|
||||||
progress := progressBar(&wg, exportProgress, importProgress)
|
wg := &sync.WaitGroup{}
|
||||||
|
|
||||||
// Export from source
|
// Export from source
|
||||||
exportChan := make(chan R, 1000)
|
exportChan := make(chan R, 1000)
|
||||||
go exp.Process(timestamp, exportChan, exportProgress)
|
go exp.Process(wg, timestamp, exportChan, progressChan)
|
||||||
|
|
||||||
// Import into target
|
// Import into target
|
||||||
resultChan := make(chan models.ImportResult)
|
resultChan := make(chan models.ImportResult)
|
||||||
go imp.Process(exportChan, resultChan, importProgress)
|
go imp.Process(wg, exportChan, resultChan, progressChan)
|
||||||
result := <-resultChan
|
result := <-resultChan
|
||||||
if result.LastTimestamp.Unix() < timestamp.Unix() {
|
|
||||||
result.LastTimestamp = timestamp
|
|
||||||
}
|
|
||||||
wg.Wait()
|
wg.Wait()
|
||||||
progress.Wait()
|
progress.close()
|
||||||
|
|
||||||
|
// Update timestamp
|
||||||
|
err = c.updateTimestamp(&result, timestamp)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println(i18n.Tr("Imported %v of %v %s into %v.",
|
||||||
|
result.ImportCount, result.TotalCount, c.entity, c.targetName))
|
||||||
if result.Error != nil {
|
if result.Error != nil {
|
||||||
printTimestamp("Import failed, last reported timestamp was %v (%s)", result.LastTimestamp)
|
printTimestamp("Import failed, last reported timestamp was %v (%s)", result.LastTimestamp)
|
||||||
return result.Error
|
return result.Error
|
||||||
}
|
}
|
||||||
fmt.Println(i18n.Tr("Imported %v of %v %s into %v.",
|
|
||||||
result.ImportCount, result.TotalCount, c.entity, c.targetName))
|
|
||||||
|
|
||||||
// Update timestamp
|
|
||||||
err = c.updateTimestamp(result, timestamp)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Print errors
|
// Print errors
|
||||||
if len(result.ImportLog) > 0 {
|
if len(result.ImportLog) > 0 {
|
||||||
|
@ -179,8 +177,8 @@ func (c *TransferCmd[E, I, R]) timestamp() (time.Time, error) {
|
||||||
return time.Time{}, errors.New(i18n.Tr("invalid timestamp string \"%v\"", flagValue))
|
return time.Time{}, errors.New(i18n.Tr("invalid timestamp string \"%v\"", flagValue))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *TransferCmd[E, I, R]) updateTimestamp(result models.ImportResult, oldTimestamp time.Time) error {
|
func (c *TransferCmd[E, I, R]) updateTimestamp(result *models.ImportResult, oldTimestamp time.Time) error {
|
||||||
if result.LastTimestamp.Unix() < oldTimestamp.Unix() {
|
if oldTimestamp.After(result.LastTimestamp) {
|
||||||
result.LastTimestamp = oldTimestamp
|
result.LastTimestamp = oldTimestamp
|
||||||
}
|
}
|
||||||
printTimestamp("Latest timestamp: %v (%v)", result.LastTimestamp)
|
printTimestamp("Latest timestamp: %v (%v)", result.LastTimestamp)
|
||||||
|
|
|
@ -16,11 +16,10 @@ Scotty. If not, see <https://www.gnu.org/licenses/>.
|
||||||
package i18n
|
package i18n
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"log"
|
|
||||||
|
|
||||||
"github.com/Xuanwo/go-locale"
|
"github.com/Xuanwo/go-locale"
|
||||||
_ "go.uploadedlobster.com/scotty/internal/translations"
|
_ "go.uploadedlobster.com/scotty/internal/translations"
|
||||||
|
|
||||||
|
"golang.org/x/text/language"
|
||||||
"golang.org/x/text/message"
|
"golang.org/x/text/message"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -29,7 +28,7 @@ var localizer Localizer
|
||||||
func init() {
|
func init() {
|
||||||
tag, err := locale.Detect()
|
tag, err := locale.Detect()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
tag = language.English
|
||||||
}
|
}
|
||||||
localizer = New(tag)
|
localizer = New(tag)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
This file is part of Scotty.
|
This file is part of Scotty.
|
||||||
|
|
||||||
|
@ -30,7 +30,7 @@ type Backend interface {
|
||||||
Name() string
|
Name() string
|
||||||
|
|
||||||
// Initialize the backend from a config.
|
// Initialize the backend from a config.
|
||||||
FromConfig(config *config.ServiceConfig) Backend
|
InitConfig(config *config.ServiceConfig) error
|
||||||
|
|
||||||
// Return configuration options
|
// Return configuration options
|
||||||
Options() []BackendOption
|
Options() []BackendOption
|
||||||
|
@ -55,7 +55,7 @@ type ListensExport interface {
|
||||||
// Returns a list of all listens newer then oldestTimestamp.
|
// Returns a list of all listens newer then oldestTimestamp.
|
||||||
// The returned list of listens is supposed to be ordered by the
|
// The returned list of listens is supposed to be ordered by the
|
||||||
// Listen.ListenedAt timestamp, with the oldest entry first.
|
// Listen.ListenedAt timestamp, with the oldest entry first.
|
||||||
ExportListens(oldestTimestamp time.Time, results chan ListensResult, progress chan Progress)
|
ExportListens(oldestTimestamp time.Time, results chan ListensResult, progress chan TransferProgress)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Must be implemented by services supporting the import of listens.
|
// Must be implemented by services supporting the import of listens.
|
||||||
|
@ -63,7 +63,7 @@ type ListensImport interface {
|
||||||
ImportBackend
|
ImportBackend
|
||||||
|
|
||||||
// Imports the given list of listens.
|
// Imports the given list of listens.
|
||||||
ImportListens(export ListensResult, importResult ImportResult, progress chan Progress) (ImportResult, error)
|
ImportListens(export ListensResult, importResult ImportResult, progress chan TransferProgress) (ImportResult, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Must be implemented by services supporting the export of loves.
|
// Must be implemented by services supporting the export of loves.
|
||||||
|
@ -73,7 +73,7 @@ type LovesExport interface {
|
||||||
// Returns a list of all loves newer then oldestTimestamp.
|
// Returns a list of all loves newer then oldestTimestamp.
|
||||||
// The returned list of listens is supposed to be ordered by the
|
// The returned list of listens is supposed to be ordered by the
|
||||||
// Love.Created timestamp, with the oldest entry first.
|
// Love.Created timestamp, with the oldest entry first.
|
||||||
ExportLoves(oldestTimestamp time.Time, results chan LovesResult, progress chan Progress)
|
ExportLoves(oldestTimestamp time.Time, results chan LovesResult, progress chan TransferProgress)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Must be implemented by services supporting the import of loves.
|
// Must be implemented by services supporting the import of loves.
|
||||||
|
@ -81,5 +81,5 @@ type LovesImport interface {
|
||||||
ImportBackend
|
ImportBackend
|
||||||
|
|
||||||
// Imports the given list of loves.
|
// Imports the given list of loves.
|
||||||
ImportLoves(export LovesResult, importResult ImportResult, progress chan Progress) (ImportResult, error)
|
ImportLoves(export LovesResult, importResult ImportResult, progress chan TransferProgress) (ImportResult, error)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
@ -24,9 +24,10 @@ package models
|
||||||
import (
|
import (
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"go.uploadedlobster.com/mbtypes"
|
||||||
)
|
)
|
||||||
|
|
||||||
type MBID string
|
|
||||||
type Entity string
|
type Entity string
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
@ -43,12 +44,12 @@ type Track struct {
|
||||||
TrackNumber int
|
TrackNumber int
|
||||||
DiscNumber int
|
DiscNumber int
|
||||||
Duration time.Duration
|
Duration time.Duration
|
||||||
ISRC string
|
ISRC mbtypes.ISRC
|
||||||
RecordingMbid MBID
|
RecordingMBID mbtypes.MBID
|
||||||
ReleaseMbid MBID
|
ReleaseMBID mbtypes.MBID
|
||||||
ReleaseGroupMbid MBID
|
ReleaseGroupMBID mbtypes.MBID
|
||||||
ArtistMbids []MBID
|
ArtistMBIDs []mbtypes.MBID
|
||||||
WorkMbids []MBID
|
WorkMBIDs []mbtypes.MBID
|
||||||
Tags []string
|
Tags []string
|
||||||
AdditionalInfo AdditionalInfo
|
AdditionalInfo AdditionalInfo
|
||||||
}
|
}
|
||||||
|
@ -62,20 +63,20 @@ func (t *Track) FillAdditionalInfo() {
|
||||||
if t.AdditionalInfo == nil {
|
if t.AdditionalInfo == nil {
|
||||||
t.AdditionalInfo = make(AdditionalInfo, 5)
|
t.AdditionalInfo = make(AdditionalInfo, 5)
|
||||||
}
|
}
|
||||||
if t.RecordingMbid != "" {
|
if t.RecordingMBID != "" {
|
||||||
t.AdditionalInfo["recording_mbid"] = t.RecordingMbid
|
t.AdditionalInfo["recording_mbid"] = t.RecordingMBID
|
||||||
}
|
}
|
||||||
if t.ReleaseGroupMbid != "" {
|
if t.ReleaseGroupMBID != "" {
|
||||||
t.AdditionalInfo["release_group_mbid"] = t.ReleaseGroupMbid
|
t.AdditionalInfo["release_group_mbid"] = t.ReleaseGroupMBID
|
||||||
}
|
}
|
||||||
if t.ReleaseMbid != "" {
|
if t.ReleaseMBID != "" {
|
||||||
t.AdditionalInfo["release_mbid"] = t.ReleaseMbid
|
t.AdditionalInfo["release_mbid"] = t.ReleaseMBID
|
||||||
}
|
}
|
||||||
if len(t.ArtistMbids) > 0 {
|
if len(t.ArtistMBIDs) > 0 {
|
||||||
t.AdditionalInfo["artist_mbids"] = t.ArtistMbids
|
t.AdditionalInfo["artist_mbids"] = t.ArtistMBIDs
|
||||||
}
|
}
|
||||||
if len(t.WorkMbids) > 0 {
|
if len(t.WorkMBIDs) > 0 {
|
||||||
t.AdditionalInfo["work_mbids"] = t.WorkMbids
|
t.AdditionalInfo["work_mbids"] = t.WorkMBIDs
|
||||||
}
|
}
|
||||||
if t.ISRC != "" {
|
if t.ISRC != "" {
|
||||||
t.AdditionalInfo["isrc"] = t.ISRC
|
t.AdditionalInfo["isrc"] = t.ISRC
|
||||||
|
@ -110,8 +111,8 @@ type Love struct {
|
||||||
Track
|
Track
|
||||||
Created time.Time
|
Created time.Time
|
||||||
UserName string
|
UserName string
|
||||||
RecordingMbid MBID
|
RecordingMBID mbtypes.MBID
|
||||||
RecordingMsid MBID
|
RecordingMSID mbtypes.MBID
|
||||||
}
|
}
|
||||||
|
|
||||||
type ListensList []Listen
|
type ListensList []Listen
|
||||||
|
@ -120,7 +121,7 @@ type ListensList []Listen
|
||||||
func (l ListensList) NewerThan(t time.Time) ListensList {
|
func (l ListensList) NewerThan(t time.Time) ListensList {
|
||||||
result := make(ListensList, 0, len(l))
|
result := make(ListensList, 0, len(l))
|
||||||
for _, item := range l {
|
for _, item := range l {
|
||||||
if item.ListenedAt.Unix() > t.Unix() {
|
if item.ListenedAt.After(t) {
|
||||||
result = append(result, item)
|
result = append(result, item)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -132,7 +133,7 @@ func (l ListensList) Len() int {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l ListensList) Less(i, j int) bool {
|
func (l ListensList) Less(i, j int) bool {
|
||||||
return l[i].ListenedAt.Unix() < l[j].ListenedAt.Unix()
|
return l[j].ListenedAt.After(l[i].ListenedAt)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l ListensList) Swap(i, j int) {
|
func (l ListensList) Swap(i, j int) {
|
||||||
|
@ -146,7 +147,7 @@ func (l LovesList) Len() int {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l LovesList) Less(i, j int) bool {
|
func (l LovesList) Less(i, j int) bool {
|
||||||
return l[i].Created.Unix() < l[j].Created.Unix()
|
return l[j].Created.After(l[i].Created)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l LovesList) Swap(i, j int) {
|
func (l LovesList) Swap(i, j int) {
|
||||||
|
@ -189,7 +190,7 @@ type ImportResult struct {
|
||||||
|
|
||||||
// Sets LastTimestamp to newTime, if newTime is newer than LastTimestamp
|
// Sets LastTimestamp to newTime, if newTime is newer than LastTimestamp
|
||||||
func (i *ImportResult) UpdateTimestamp(newTime time.Time) {
|
func (i *ImportResult) UpdateTimestamp(newTime time.Time) {
|
||||||
if newTime.Unix() > i.LastTimestamp.Unix() {
|
if newTime.After(i.LastTimestamp) {
|
||||||
i.LastTimestamp = newTime
|
i.LastTimestamp = newTime
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -208,10 +209,25 @@ func (i *ImportResult) Log(t LogEntryType, msg string) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type TransferProgress struct {
|
||||||
|
Export *Progress
|
||||||
|
Import *Progress
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p TransferProgress) FromImportResult(result ImportResult, completed bool) TransferProgress {
|
||||||
|
importProgress := Progress{
|
||||||
|
Completed: completed,
|
||||||
|
}.FromImportResult(result)
|
||||||
|
p.Import = &importProgress
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
type Progress struct {
|
type Progress struct {
|
||||||
|
TotalItems int
|
||||||
Total int64
|
Total int64
|
||||||
Elapsed int64
|
Elapsed int64
|
||||||
Completed bool
|
Completed bool
|
||||||
|
Aborted bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p Progress) FromImportResult(result ImportResult) Progress {
|
func (p Progress) FromImportResult(result ImportResult) Progress {
|
||||||
|
@ -220,8 +236,11 @@ func (p Progress) FromImportResult(result ImportResult) Progress {
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p Progress) Complete() Progress {
|
func (p *Progress) Complete() {
|
||||||
p.Elapsed = p.Total
|
p.Elapsed = p.Total
|
||||||
p.Completed = true
|
p.Completed = true
|
||||||
return p
|
}
|
||||||
|
|
||||||
|
func (p *Progress) Abort() {
|
||||||
|
p.Aborted = true
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,6 +28,7 @@ import (
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
"go.uploadedlobster.com/mbtypes"
|
||||||
"go.uploadedlobster.com/scotty/internal/models"
|
"go.uploadedlobster.com/scotty/internal/models"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -44,25 +45,25 @@ func TestTrackArtistName(t *testing.T) {
|
||||||
|
|
||||||
func TestTrackFillAdditionalInfo(t *testing.T) {
|
func TestTrackFillAdditionalInfo(t *testing.T) {
|
||||||
track := models.Track{
|
track := models.Track{
|
||||||
RecordingMbid: models.MBID("c0a1fc94-5f04-4a5f-bc09-e5de0c49cd12"),
|
RecordingMBID: mbtypes.MBID("c0a1fc94-5f04-4a5f-bc09-e5de0c49cd12"),
|
||||||
ReleaseGroupMbid: models.MBID("80aca1ee-aa51-41be-9f75-024710d92ff4"),
|
ReleaseGroupMBID: mbtypes.MBID("80aca1ee-aa51-41be-9f75-024710d92ff4"),
|
||||||
ReleaseMbid: models.MBID("aa1ea1ac-7ec4-4542-a494-105afbfe547d"),
|
ReleaseMBID: mbtypes.MBID("aa1ea1ac-7ec4-4542-a494-105afbfe547d"),
|
||||||
ArtistMbids: []models.MBID{"24412926-c7bd-48e8-afad-8a285b42e131"},
|
ArtistMBIDs: []mbtypes.MBID{"24412926-c7bd-48e8-afad-8a285b42e131"},
|
||||||
WorkMbids: []models.MBID{"c0a1fc94-5f04-4a5f-bc09-e5de0c49cd12"},
|
WorkMBIDs: []mbtypes.MBID{"c0a1fc94-5f04-4a5f-bc09-e5de0c49cd12"},
|
||||||
TrackNumber: 5,
|
TrackNumber: 5,
|
||||||
DiscNumber: 1,
|
DiscNumber: 1,
|
||||||
Duration: time.Duration(413787 * time.Millisecond),
|
Duration: time.Duration(413787 * time.Millisecond),
|
||||||
ISRC: "DES561620801",
|
ISRC: mbtypes.ISRC("DES561620801"),
|
||||||
Tags: []string{"rock", "psychedelic rock"},
|
Tags: []string{"rock", "psychedelic rock"},
|
||||||
}
|
}
|
||||||
track.FillAdditionalInfo()
|
track.FillAdditionalInfo()
|
||||||
i := track.AdditionalInfo
|
i := track.AdditionalInfo
|
||||||
assert := assert.New(t)
|
assert := assert.New(t)
|
||||||
assert.Equal(track.RecordingMbid, i["recording_mbid"])
|
assert.Equal(track.RecordingMBID, i["recording_mbid"])
|
||||||
assert.Equal(track.ReleaseGroupMbid, i["release_group_mbid"])
|
assert.Equal(track.ReleaseGroupMBID, i["release_group_mbid"])
|
||||||
assert.Equal(track.ReleaseMbid, i["release_mbid"])
|
assert.Equal(track.ReleaseMBID, i["release_mbid"])
|
||||||
assert.Equal(track.ArtistMbids, i["artist_mbids"])
|
assert.Equal(track.ArtistMBIDs, i["artist_mbids"])
|
||||||
assert.Equal(track.WorkMbids, i["work_mbids"])
|
assert.Equal(track.WorkMBIDs, i["work_mbids"])
|
||||||
assert.Equal(track.TrackNumber, i["tracknumber"])
|
assert.Equal(track.TrackNumber, i["tracknumber"])
|
||||||
assert.Equal(track.DiscNumber, i["discnumber"])
|
assert.Equal(track.DiscNumber, i["discnumber"])
|
||||||
assert.Equal(track.Duration.Milliseconds(), i["duration_ms"])
|
assert.Equal(track.Duration.Milliseconds(), i["duration_ms"])
|
||||||
|
|
|
@ -63,7 +63,7 @@ func NormalizeTitle(s string) string {
|
||||||
// Compare two tracks for similarity.
|
// Compare two tracks for similarity.
|
||||||
func CompareTracks(t1 models.Track, t2 models.Track) float64 {
|
func CompareTracks(t1 models.Track, t2 models.Track) float64 {
|
||||||
// Identical recording MBID always compares 100%
|
// Identical recording MBID always compares 100%
|
||||||
if t1.RecordingMbid == t2.RecordingMbid && t1.RecordingMbid != "" {
|
if t1.RecordingMBID == t2.RecordingMBID && t1.RecordingMBID != "" {
|
||||||
return 1.0
|
return 1.0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,6 +20,7 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
"go.uploadedlobster.com/mbtypes"
|
||||||
"go.uploadedlobster.com/scotty/internal/models"
|
"go.uploadedlobster.com/scotty/internal/models"
|
||||||
"go.uploadedlobster.com/scotty/internal/similarity"
|
"go.uploadedlobster.com/scotty/internal/similarity"
|
||||||
)
|
)
|
||||||
|
@ -74,13 +75,13 @@ func TestCompareTracksSameMBID(t *testing.T) {
|
||||||
t1 := models.Track{
|
t1 := models.Track{
|
||||||
ArtistNames: []string{"Paradise Lost"},
|
ArtistNames: []string{"Paradise Lost"},
|
||||||
TrackName: "Forever After",
|
TrackName: "Forever After",
|
||||||
RecordingMbid: models.MBID("2886d15c-09b0-43c6-af56-932f70dde164"),
|
RecordingMBID: mbtypes.MBID("2886d15c-09b0-43c6-af56-932f70dde164"),
|
||||||
}
|
}
|
||||||
t2 := models.Track{
|
t2 := models.Track{
|
||||||
ArtistNames: []string{"Paradise Lost"},
|
ArtistNames: []string{"Paradise Lost"},
|
||||||
TrackName: "Forever Failure (radio edit)",
|
TrackName: "Forever Failure (radio edit)",
|
||||||
ReleaseName: "Draconian Times",
|
ReleaseName: "Draconian Times",
|
||||||
RecordingMbid: models.MBID("2886d15c-09b0-43c6-af56-932f70dde164"),
|
RecordingMBID: mbtypes.MBID("2886d15c-09b0-43c6-af56-932f70dde164"),
|
||||||
}
|
}
|
||||||
assert.Equal(t, 1.0, similarity.CompareTracks(t1, t2))
|
assert.Equal(t, 1.0, similarity.CompareTracks(t1, t2))
|
||||||
}
|
}
|
||||||
|
|
|
@ -42,60 +42,61 @@ var messageKeyToIndex = map[string]int{
|
||||||
"\tbackend: %v": 11,
|
"\tbackend: %v": 11,
|
||||||
"\texport: %s": 0,
|
"\texport: %s": 0,
|
||||||
"\timport: %s\n": 1,
|
"\timport: %s\n": 1,
|
||||||
"%v: %v": 52,
|
"%v: %v": 49,
|
||||||
"Aborted": 8,
|
"Aborted": 8,
|
||||||
"Access token": 19,
|
"Access token": 19,
|
||||||
"Access token received, you can use %v now.\n": 28,
|
"Access token received, you can use %v now.\n": 34,
|
||||||
"Append to file": 21,
|
"Append to file": 21,
|
||||||
"Backend": 36,
|
"Backend": 43,
|
||||||
"Check for duplicate listens on import (slower)": 45,
|
"Check for duplicate listens on import (slower)": 24,
|
||||||
"Client ID": 15,
|
"Client ID": 15,
|
||||||
"Client secret": 16,
|
"Client secret": 16,
|
||||||
"Delete the service configuration \"%v\"?": 7,
|
"Delete the service configuration \"%v\"?": 7,
|
||||||
"Directory path": 47,
|
"Directory path": 29,
|
||||||
"Disable auto correction of submitted listens": 24,
|
"Disable auto correction of submitted listens": 26,
|
||||||
"Error: OAuth state mismatch": 27,
|
"Error: OAuth state mismatch": 33,
|
||||||
"Failed reading config: %v": 2,
|
"Failed reading config: %v": 2,
|
||||||
"File path": 20,
|
"File path": 20,
|
||||||
"From timestamp: %v (%v)": 38,
|
"From timestamp: %v (%v)": 45,
|
||||||
"Ignore listens in incognito mode": 48,
|
"Ignore listens in incognito mode": 30,
|
||||||
"Ignore skipped listens": 49,
|
"Ignore skipped listens": 27,
|
||||||
"Ignored duplicate listen %v: \"%v\" by %v (%v)": 46,
|
"Ignored duplicate listen %v: \"%v\" by %v (%v)": 25,
|
||||||
"Import failed, last reported timestamp was %v (%s)": 39,
|
"Import failed, last reported timestamp was %v (%s)": 47,
|
||||||
"Import log:": 51,
|
"Import log:": 48,
|
||||||
"Imported %v of %v %s into %v.": 40,
|
"Imported %v of %v %s into %v.": 46,
|
||||||
"Include skipped listens": 25,
|
"Latest timestamp: %v (%v)": 51,
|
||||||
"Latest timestamp: %v (%v)": 41,
|
"Minimum playback duration for skipped tracks (seconds)": 31,
|
||||||
"Minimum playback duration for skipped tracks (seconds)": 50,
|
"No": 40,
|
||||||
"No": 33,
|
|
||||||
"Playlist title": 22,
|
"Playlist title": 22,
|
||||||
"Saved service %v using backend %v": 5,
|
"Saved service %v using backend %v": 5,
|
||||||
"Server URL": 17,
|
"Server URL": 17,
|
||||||
"Service": 35,
|
"Service": 42,
|
||||||
"Service \"%v\" deleted\n": 9,
|
"Service \"%v\" deleted\n": 9,
|
||||||
"Service name": 3,
|
"Service name": 3,
|
||||||
|
"Specify a time zone for the listen timestamps": 28,
|
||||||
"The backend %v requires authentication. Authenticate now?": 6,
|
"The backend %v requires authentication. Authenticate now?": 6,
|
||||||
"Token received, you can close this window now.": 12,
|
"Token received, you can close this window now.": 12,
|
||||||
"Transferring %s from %s to %s...": 37,
|
"Transferring %s from %s to %s…": 44,
|
||||||
"Unique playlist identifier": 23,
|
"Unique playlist identifier": 23,
|
||||||
"Updated service %v using backend %v\n": 10,
|
"Updated service %v using backend %v\n": 10,
|
||||||
"User name": 18,
|
"User name": 18,
|
||||||
"Visit the URL for authorization: %v": 26,
|
"Visit the URL for authorization: %v": 32,
|
||||||
"Yes": 32,
|
"Yes": 39,
|
||||||
"a service with this name already exists": 4,
|
"a service with this name already exists": 4,
|
||||||
|
"aborted": 37,
|
||||||
"backend %s does not implement %s": 13,
|
"backend %s does not implement %s": 13,
|
||||||
"done": 31,
|
"done": 38,
|
||||||
"exporting": 29,
|
"exporting": 35,
|
||||||
"importing": 30,
|
"importing": 36,
|
||||||
"invalid timestamp string \"%v\"": 53,
|
"invalid timestamp string \"%v\"": 50,
|
||||||
"key must only consist of A-Za-z0-9_-": 43,
|
"key must only consist of A-Za-z0-9_-": 53,
|
||||||
"no configuration file defined, cannot write config": 42,
|
"no configuration file defined, cannot write config": 52,
|
||||||
"no existing service configurations": 34,
|
"no existing service configurations": 41,
|
||||||
"no service configuration \"%v\"": 44,
|
"no service configuration \"%v\"": 54,
|
||||||
"unknown backend \"%s\"": 14,
|
"unknown backend \"%s\"": 14,
|
||||||
}
|
}
|
||||||
|
|
||||||
var deIndex = []uint32{ // 55 elements
|
var deIndex = []uint32{ // 56 elements
|
||||||
// Entry 0 - 1F
|
// Entry 0 - 1F
|
||||||
0x00000000, 0x00000013, 0x00000027, 0x00000052,
|
0x00000000, 0x00000013, 0x00000027, 0x00000052,
|
||||||
0x0000005e, 0x0000008d, 0x000000bd, 0x00000104,
|
0x0000005e, 0x0000008d, 0x000000bd, 0x00000104,
|
||||||
|
@ -103,18 +104,18 @@ var deIndex = []uint32{ // 55 elements
|
||||||
0x000001ac, 0x000001e7, 0x00000213, 0x00000233,
|
0x000001ac, 0x000001e7, 0x00000213, 0x00000233,
|
||||||
0x0000023d, 0x0000024b, 0x00000256, 0x00000263,
|
0x0000023d, 0x0000024b, 0x00000256, 0x00000263,
|
||||||
0x00000271, 0x0000027b, 0x0000028e, 0x000002a1,
|
0x00000271, 0x0000027b, 0x0000028e, 0x000002a1,
|
||||||
0x000002b8, 0x000002ec, 0x0000030d, 0x00000333,
|
0x000002b8, 0x000002ed, 0x00000328, 0x0000035c,
|
||||||
0x0000035d, 0x0000039d, 0x000003a8, 0x000003b3,
|
0x0000037e, 0x000003a4, 0x000003b4, 0x000003da,
|
||||||
// Entry 20 - 3F
|
// Entry 20 - 3F
|
||||||
0x000003ba, 0x000003bd, 0x000003c2, 0x000003eb,
|
0x00000418, 0x00000443, 0x0000046d, 0x000004ad,
|
||||||
0x000003f3, 0x000003fb, 0x00000424, 0x00000442,
|
0x000004b8, 0x000004c3, 0x000004cf, 0x000004d6,
|
||||||
0x0000047f, 0x000004aa, 0x000004cd, 0x0000051e,
|
0x000004d9, 0x000004de, 0x00000507, 0x0000050f,
|
||||||
0x00000555, 0x0000057c, 0x0000057c, 0x0000057c,
|
0x00000517, 0x00000540, 0x0000055e, 0x00000589,
|
||||||
0x0000057c, 0x0000057c, 0x0000057c, 0x0000057c,
|
0x000005c6, 0x000005d1, 0x000005de, 0x00000602,
|
||||||
0x0000057c, 0x0000057c, 0x0000057c,
|
0x00000625, 0x00000676, 0x000006ad, 0x000006d4,
|
||||||
} // Size: 244 bytes
|
} // Size: 248 bytes
|
||||||
|
|
||||||
const deData string = "" + // Size: 1404 bytes
|
const deData string = "" + // Size: 1748 bytes
|
||||||
"\x04\x01\x09\x00\x0e\x02Export: %[1]s\x04\x01\x09\x01\x0a\x0e\x02Import:" +
|
"\x04\x01\x09\x00\x0e\x02Export: %[1]s\x04\x01\x09\x01\x0a\x0e\x02Import:" +
|
||||||
" %[1]s\x02Fehler beim Lesen der Konfiguration: %[1]v\x02Servicename\x02e" +
|
" %[1]s\x02Fehler beim Lesen der Konfiguration: %[1]v\x02Servicename\x02e" +
|
||||||
"in Service mit diesem Namen existiert bereits\x02Service %[1]v mit dem B" +
|
"in Service mit diesem Namen existiert bereits\x02Service %[1]v mit dem B" +
|
||||||
|
@ -123,23 +124,28 @@ const deData string = "" + // Size: 1404 bytes
|
||||||
"\x02Abgebrochen\x04\x00\x01\x0a\x1e\x02Service „%[1]v“ gelöscht\x04\x00" +
|
"\x02Abgebrochen\x04\x00\x01\x0a\x1e\x02Service „%[1]v“ gelöscht\x04\x00" +
|
||||||
"\x01\x0a1\x02Service %[1]v mit dem Backend %[2]v aktualisiert\x04\x01" +
|
"\x01\x0a1\x02Service %[1]v mit dem Backend %[2]v aktualisiert\x04\x01" +
|
||||||
"\x09\x00\x0f\x02Backend: %[1]v\x02Token erhalten, das Fenster kann jetzt" +
|
"\x09\x00\x0f\x02Backend: %[1]v\x02Token erhalten, das Fenster kann jetzt" +
|
||||||
" geschlossen werden.\x02das backend %[1]s implementiert %[2]s nicht\x02u" +
|
" geschlossen werden.\x02das Backend %[1]s implementiert %[2]s nicht\x02u" +
|
||||||
"nbekanntes Backend „%[1]s“\x02Client-ID\x02Client-Secret\x02Server-URL" +
|
"nbekanntes Backend „%[1]s“\x02Client-ID\x02Client-Secret\x02Server-URL" +
|
||||||
"\x02Benutzername\x02Zugriffstoken\x02Dateipfad\x02An Datei anhängen\x02T" +
|
"\x02Benutzername\x02Zugriffstoken\x02Dateipfad\x02An Datei anhängen\x02T" +
|
||||||
"itel der Playlist\x02Eindeutige Playlist-ID\x02Autokorrektur für übermit" +
|
"itel der Playlist\x02Eindeutige Playlist-ID\x02Beim Import auf Listen-Du" +
|
||||||
"telte Titel deaktivieren\x02Übersprungene Titel einbeziehen\x02URL für A" +
|
"plikate prüfen (langsamer)\x02Listen-Duplikat ignoriert %[1]v: \x22%[2]v" +
|
||||||
"utorisierung öffnen: %[1]v\x02Fehler: OAuth-State stimmt nicht überein" +
|
"\x22 von %[3]v (%[4]v)\x02Autokorrektur für übermittelte Titel deaktivie" +
|
||||||
"\x04\x00\x01\x0a;\x02Zugriffstoken erhalten, %[1]v kann jetzt verwendet " +
|
"ren\x02Übersprungene Listens ignorieren\x02Zeitzone für den Abspiel-Zeit" +
|
||||||
"werden.\x02exportiere\x02importiere\x02fertig\x02Ja\x02Nein\x02keine bes" +
|
"stempel\x02Verzeichnispfad\x02Listens im Inkognito-Modus ignorieren\x02M" +
|
||||||
"tehenden Servicekonfigurationen\x02Service\x02Backend\x02Übertrage %[1]s" +
|
"inimale Wiedergabedauer für übersprungene Titel (Sekunden)\x02Zur Anmeld" +
|
||||||
" von %[2]s nach %[3]s...\x02Ab Zeitstempel: %[1]v (%[2]v)\x02Import fehl" +
|
"ung folgende URL aufrufen: %[1]v\x02Fehler: OAuth-State stimmt nicht übe" +
|
||||||
"geschlagen, letzter Zeitstempel war %[1]v (%[2]s)\x02%[1]v von %[2]v %[3" +
|
"rein\x04\x00\x01\x0a;\x02Zugriffstoken erhalten, %[1]v kann jetzt verwen" +
|
||||||
"]s in %[4]v importiert.\x02Letzter Zeitstempel: %[1]v (%[2]v)\x02keine K" +
|
"det werden.\x02exportiere\x02importiere\x02abgebrochen\x02fertig\x02Ja" +
|
||||||
"onfigurationsdatei definiert, Konfiguration kann nicht geschrieben werde" +
|
"\x02Nein\x02keine bestehenden Servicekonfigurationen\x02Service\x02Backe" +
|
||||||
"n\x02Schlüssel darf nur die Zeichen A-Za-z0-9_- beinhalten\x02keine Serv" +
|
"nd\x02Übertrage %[1]s von %[2]s nach %[3]s…\x02Ab Zeitstempel: %[1]v (%[" +
|
||||||
"icekonfiguration „%[1]v“"
|
"2]v)\x02%[1]v von %[2]v %[3]s in %[4]v importiert.\x02Import fehlgeschla" +
|
||||||
|
"gen, letzter Zeitstempel war %[1]v (%[2]s)\x02Importlog:\x02%[1]v: %[2]v" +
|
||||||
|
"\x02ungültiger Zeitstempel „%[1]v“\x02Letzter Zeitstempel: %[1]v (%[2]v)" +
|
||||||
|
"\x02keine Konfigurationsdatei definiert, Konfiguration kann nicht geschr" +
|
||||||
|
"ieben werden\x02Schlüssel darf nur die Zeichen A-Za-z0-9_- beinhalten" +
|
||||||
|
"\x02keine Servicekonfiguration „%[1]v“"
|
||||||
|
|
||||||
var enIndex = []uint32{ // 55 elements
|
var enIndex = []uint32{ // 56 elements
|
||||||
// Entry 0 - 1F
|
// Entry 0 - 1F
|
||||||
0x00000000, 0x00000013, 0x00000027, 0x00000044,
|
0x00000000, 0x00000013, 0x00000027, 0x00000044,
|
||||||
0x00000051, 0x00000079, 0x000000a1, 0x000000de,
|
0x00000051, 0x00000079, 0x000000a1, 0x000000de,
|
||||||
|
@ -147,18 +153,18 @@ var enIndex = []uint32{ // 55 elements
|
||||||
0x00000170, 0x0000019f, 0x000001c6, 0x000001de,
|
0x00000170, 0x0000019f, 0x000001c6, 0x000001de,
|
||||||
0x000001e8, 0x000001f6, 0x00000201, 0x0000020b,
|
0x000001e8, 0x000001f6, 0x00000201, 0x0000020b,
|
||||||
0x00000218, 0x00000222, 0x00000231, 0x00000240,
|
0x00000218, 0x00000222, 0x00000231, 0x00000240,
|
||||||
0x0000025b, 0x00000288, 0x000002a0, 0x000002c7,
|
0x0000025b, 0x0000028a, 0x000002c3, 0x000002f0,
|
||||||
0x000002e3, 0x00000316, 0x00000320, 0x0000032a,
|
0x00000307, 0x00000335, 0x00000344, 0x00000365,
|
||||||
// Entry 20 - 3F
|
// Entry 20 - 3F
|
||||||
0x0000032f, 0x00000333, 0x00000336, 0x00000359,
|
0x0000039c, 0x000003c3, 0x000003df, 0x00000412,
|
||||||
0x00000361, 0x00000369, 0x00000393, 0x000003b1,
|
0x0000041c, 0x00000426, 0x0000042e, 0x00000433,
|
||||||
0x000003ea, 0x00000414, 0x00000434, 0x00000467,
|
0x00000437, 0x0000043a, 0x0000045d, 0x00000465,
|
||||||
0x0000048c, 0x000004ad, 0x000004dc, 0x00000515,
|
0x0000046d, 0x00000497, 0x000004b5, 0x000004df,
|
||||||
0x00000524, 0x00000545, 0x0000055c, 0x00000593,
|
0x00000518, 0x00000524, 0x00000531, 0x00000552,
|
||||||
0x0000059f, 0x000005ac, 0x000005cd,
|
0x00000572, 0x000005a5, 0x000005ca, 0x000005eb,
|
||||||
} // Size: 244 bytes
|
} // Size: 248 bytes
|
||||||
|
|
||||||
const enData string = "" + // Size: 1485 bytes
|
const enData string = "" + // Size: 1515 bytes
|
||||||
"\x04\x01\x09\x00\x0e\x02export: %[1]s\x04\x01\x09\x01\x0a\x0e\x02import:" +
|
"\x04\x01\x09\x00\x0e\x02export: %[1]s\x04\x01\x09\x01\x0a\x0e\x02import:" +
|
||||||
" %[1]s\x02Failed reading config: %[1]v\x02Service name\x02a service with" +
|
" %[1]s\x02Failed reading config: %[1]v\x02Service name\x02a service with" +
|
||||||
" this name already exists\x02Saved service %[1]v using backend %[2]v\x02" +
|
" this name already exists\x02Saved service %[1]v using backend %[2]v\x02" +
|
||||||
|
@ -169,20 +175,21 @@ const enData string = "" + // Size: 1485 bytes
|
||||||
"eceived, you can close this window now.\x02backend %[1]s does not implem" +
|
"eceived, you can close this window now.\x02backend %[1]s does not implem" +
|
||||||
"ent %[2]s\x02unknown backend \x22%[1]s\x22\x02Client ID\x02Client secret" +
|
"ent %[2]s\x02unknown backend \x22%[1]s\x22\x02Client ID\x02Client secret" +
|
||||||
"\x02Server URL\x02User name\x02Access token\x02File path\x02Append to fi" +
|
"\x02Server URL\x02User name\x02Access token\x02File path\x02Append to fi" +
|
||||||
"le\x02Playlist title\x02Unique playlist identifier\x02Disable auto corre" +
|
"le\x02Playlist title\x02Unique playlist identifier\x02Check for duplicat" +
|
||||||
"ction of submitted listens\x02Include skipped listens\x02Visit the URL f" +
|
"e listens on import (slower)\x02Ignored duplicate listen %[1]v: \x22%[2]" +
|
||||||
"or authorization: %[1]v\x02Error: OAuth state mismatch\x04\x00\x01\x0a." +
|
"v\x22 by %[3]v (%[4]v)\x02Disable auto correction of submitted listens" +
|
||||||
"\x02Access token received, you can use %[1]v now.\x02exporting\x02import" +
|
"\x02Ignore skipped listens\x02Specify a time zone for the listen timesta" +
|
||||||
"ing\x02done\x02Yes\x02No\x02no existing service configurations\x02Servic" +
|
"mps\x02Directory path\x02Ignore listens in incognito mode\x02Minimum pla" +
|
||||||
"e\x02Backend\x02Transferring %[1]s from %[2]s to %[3]s...\x02From timest" +
|
"yback duration for skipped tracks (seconds)\x02Visit the URL for authori" +
|
||||||
"amp: %[1]v (%[2]v)\x02Import failed, last reported timestamp was %[1]v (" +
|
"zation: %[1]v\x02Error: OAuth state mismatch\x04\x00\x01\x0a.\x02Access " +
|
||||||
"%[2]s)\x02Imported %[1]v of %[2]v %[3]s into %[4]v.\x02Latest timestamp:" +
|
"token received, you can use %[1]v now.\x02exporting\x02importing\x02abor" +
|
||||||
" %[1]v (%[2]v)\x02no configuration file defined, cannot write config\x02" +
|
"ted\x02done\x02Yes\x02No\x02no existing service configurations\x02Servic" +
|
||||||
"key must only consist of A-Za-z0-9_-\x02no service configuration \x22%[1" +
|
"e\x02Backend\x02Transferring %[1]s from %[2]s to %[3]s…\x02From timestam" +
|
||||||
"]v\x22\x02Check for duplicate listens on import (slower)\x02Ignored dupl" +
|
"p: %[1]v (%[2]v)\x02Imported %[1]v of %[2]v %[3]s into %[4]v.\x02Import " +
|
||||||
"icate listen %[1]v: \x22%[2]v\x22 by %[3]v (%[4]v)\x02Directory path\x02" +
|
"failed, last reported timestamp was %[1]v (%[2]s)\x02Import log:\x02%[1]" +
|
||||||
"Ignore listens in incognito mode\x02Ignore skipped listens\x02Minimum pl" +
|
"v: %[2]v\x02invalid timestamp string \x22%[1]v\x22\x02Latest timestamp: " +
|
||||||
"ayback duration for skipped tracks (seconds)\x02Import log:\x02%[1]v: %[" +
|
"%[1]v (%[2]v)\x02no configuration file defined, cannot write config\x02k" +
|
||||||
"2]v\x02invalid timestamp string \x22%[1]v\x22"
|
"ey must only consist of A-Za-z0-9_-\x02no service configuration \x22%[1]" +
|
||||||
|
"v\x22"
|
||||||
|
|
||||||
// Total table size 3377 bytes (3KiB); checksum: 6715024
|
// Total table size 3759 bytes (3KiB); checksum: 7B4CF967
|
||||||
|
|
|
@ -175,7 +175,7 @@
|
||||||
{
|
{
|
||||||
"id": "backend {Backend} does not implement {InterfaceName}",
|
"id": "backend {Backend} does not implement {InterfaceName}",
|
||||||
"message": "backend {Backend} does not implement {InterfaceName}",
|
"message": "backend {Backend} does not implement {InterfaceName}",
|
||||||
"translation": "das backend {Backend} implementiert {InterfaceName} nicht",
|
"translation": "das Backend {Backend} implementiert {InterfaceName} nicht",
|
||||||
"placeholders": [
|
"placeholders": [
|
||||||
{
|
{
|
||||||
"id": "Backend",
|
"id": "Backend",
|
||||||
|
@ -261,9 +261,9 @@
|
||||||
"translation": "Beim Import auf Listen-Duplikate prüfen (langsamer)"
|
"translation": "Beim Import auf Listen-Duplikate prüfen (langsamer)"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Ignored duplicate listen {ListenedAt}: \"{TrackName}\" by {ArtistName} ({RecordingMbid})",
|
"id": "Ignored duplicate listen {ListenedAt}: \"{TrackName}\" by {ArtistName} ({RecordingMBID})",
|
||||||
"message": "Ignored duplicate listen {ListenedAt}: \"{TrackName}\" by {ArtistName} ({RecordingMbid})",
|
"message": "Ignored duplicate listen {ListenedAt}: \"{TrackName}\" by {ArtistName} ({RecordingMBID})",
|
||||||
"translation": "Listen-Duplikat ignoriert {ListenedAt}: „{TrackName}“ von {ArtistName} ({RecordingMbid})",
|
"translation": "Listen-Duplikat ignoriert {ListenedAt}: \"{TrackName}\" von {ArtistName} ({RecordingMBID})",
|
||||||
"placeholders": [
|
"placeholders": [
|
||||||
{
|
{
|
||||||
"id": "ListenedAt",
|
"id": "ListenedAt",
|
||||||
|
@ -290,12 +290,12 @@
|
||||||
"expr": "l.ArtistName()"
|
"expr": "l.ArtistName()"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "RecordingMbid",
|
"id": "RecordingMBID",
|
||||||
"string": "%[4]v",
|
"string": "%[4]v",
|
||||||
"type": "go.uploadedlobster.com/scotty/internal/models.MBID",
|
"type": "go.uploadedlobster.com/mbtypes.MBID",
|
||||||
"underlyingType": "string",
|
"underlyingType": "string",
|
||||||
"argNum": 4,
|
"argNum": 4,
|
||||||
"expr": "l.RecordingMbid"
|
"expr": "l.RecordingMBID"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
@ -305,9 +305,14 @@
|
||||||
"translation": "Autokorrektur für übermittelte Titel deaktivieren"
|
"translation": "Autokorrektur für übermittelte Titel deaktivieren"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Include skipped listens",
|
"id": "Ignore skipped listens",
|
||||||
"message": "Include skipped listens",
|
"message": "Ignore skipped listens",
|
||||||
"translation": "Übersprungene Titel einbeziehen"
|
"translation": "Übersprungene Listens ignorieren"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "Specify a time zone for the listen timestamps",
|
||||||
|
"message": "Specify a time zone for the listen timestamps",
|
||||||
|
"translation": "Zeitzone für den Abspiel-Zeitstempel"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Directory path",
|
"id": "Directory path",
|
||||||
|
@ -319,28 +324,23 @@
|
||||||
"message": "Ignore listens in incognito mode",
|
"message": "Ignore listens in incognito mode",
|
||||||
"translation": "Listens im Inkognito-Modus ignorieren"
|
"translation": "Listens im Inkognito-Modus ignorieren"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "Ignore skipped listens",
|
|
||||||
"message": "Ignore skipped listens",
|
|
||||||
"translation": "Übersprungene Listens ignorieren"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "Minimum playback duration for skipped tracks (seconds)",
|
"id": "Minimum playback duration for skipped tracks (seconds)",
|
||||||
"message": "Minimum playback duration for skipped tracks (seconds)",
|
"message": "Minimum playback duration for skipped tracks (seconds)",
|
||||||
"translation": "Minimale Wiedergabedauer für übersprungene Titel (Sekunden)"
|
"translation": "Minimale Wiedergabedauer für übersprungene Titel (Sekunden)"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Visit the URL for authorization: {Url}",
|
"id": "Visit the URL for authorization: {URL}",
|
||||||
"message": "Visit the URL for authorization: {Url}",
|
"message": "Visit the URL for authorization: {URL}",
|
||||||
"translation": "URL für Autorisierung öffnen: {Url}",
|
"translation": "Zur Anmeldung folgende URL aufrufen: {URL}",
|
||||||
"placeholders": [
|
"placeholders": [
|
||||||
{
|
{
|
||||||
"id": "Url",
|
"id": "URL",
|
||||||
"string": "%[1]v",
|
"string": "%[1]v",
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"underlyingType": "string",
|
"underlyingType": "string",
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "authUrl.Url"
|
"expr": "authURL.URL"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
@ -368,21 +368,23 @@
|
||||||
"id": "exporting",
|
"id": "exporting",
|
||||||
"message": "exporting",
|
"message": "exporting",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source.",
|
||||||
"fuzzy": true,
|
|
||||||
"translation": "exportiere"
|
"translation": "exportiere"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "importing",
|
"id": "importing",
|
||||||
"message": "importing",
|
"message": "importing",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source.",
|
||||||
"fuzzy": true,
|
|
||||||
"translation": "importiere"
|
"translation": "importiere"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"id": "aborted",
|
||||||
|
"message": "aborted",
|
||||||
|
"translation": "abgebrochen"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"id": "done",
|
"id": "done",
|
||||||
"message": "done",
|
"message": "done",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source.",
|
||||||
"fuzzy": true,
|
|
||||||
"translation": "fertig"
|
"translation": "fertig"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -411,9 +413,9 @@
|
||||||
"translation": "Backend"
|
"translation": "Backend"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Transferring {Entity} from {SourceName} to {TargetName}...",
|
"id": "Transferring {Entity} from {SourceName} to {TargetName}…",
|
||||||
"message": "Transferring {Entity} from {SourceName} to {TargetName}...",
|
"message": "Transferring {Entity} from {SourceName} to {TargetName}…",
|
||||||
"translation": "Übertrage {Entity} von {SourceName} nach {TargetName}...",
|
"translation": "Übertrage {Entity} von {SourceName} nach {TargetName}…",
|
||||||
"placeholders": [
|
"placeholders": [
|
||||||
{
|
{
|
||||||
"id": "Entity",
|
"id": "Entity",
|
||||||
|
@ -462,27 +464,6 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
|
|
||||||
"message": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
|
|
||||||
"translation": "Import fehlgeschlagen, letzter Zeitstempel war {Arg_1} ({Arg_2})",
|
|
||||||
"placeholders": [
|
|
||||||
{
|
|
||||||
"id": "Arg_1",
|
|
||||||
"string": "%[1]v",
|
|
||||||
"type": "",
|
|
||||||
"underlyingType": "interface{}",
|
|
||||||
"argNum": 1
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "Arg_2",
|
|
||||||
"string": "%[2]s",
|
|
||||||
"type": "",
|
|
||||||
"underlyingType": "string",
|
|
||||||
"argNum": 2
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "Imported {ImportCount} of {TotalCount} {Entity} into {TargetName}.",
|
"id": "Imported {ImportCount} of {TotalCount} {Entity} into {TargetName}.",
|
||||||
"message": "Imported {ImportCount} of {TotalCount} {Entity} into {TargetName}.",
|
"message": "Imported {ImportCount} of {TotalCount} {Entity} into {TargetName}.",
|
||||||
|
@ -522,6 +503,27 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"id": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
|
||||||
|
"message": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
|
||||||
|
"translation": "Import fehlgeschlagen, letzter Zeitstempel war {Arg_1} ({Arg_2})",
|
||||||
|
"placeholders": [
|
||||||
|
{
|
||||||
|
"id": "Arg_1",
|
||||||
|
"string": "%[1]v",
|
||||||
|
"type": "",
|
||||||
|
"underlyingType": "interface{}",
|
||||||
|
"argNum": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "Arg_2",
|
||||||
|
"string": "%[2]s",
|
||||||
|
"type": "",
|
||||||
|
"underlyingType": "string",
|
||||||
|
"argNum": 2
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"id": "Import log:",
|
"id": "Import log:",
|
||||||
"message": "Import log:",
|
"message": "Import log:",
|
||||||
|
|
|
@ -175,7 +175,7 @@
|
||||||
{
|
{
|
||||||
"id": "backend {Backend} does not implement {InterfaceName}",
|
"id": "backend {Backend} does not implement {InterfaceName}",
|
||||||
"message": "backend {Backend} does not implement {InterfaceName}",
|
"message": "backend {Backend} does not implement {InterfaceName}",
|
||||||
"translation": "das backend {Backend} implementiert {InterfaceName} nicht",
|
"translation": "das Backend {Backend} implementiert {InterfaceName} nicht",
|
||||||
"placeholders": [
|
"placeholders": [
|
||||||
{
|
{
|
||||||
"id": "Backend",
|
"id": "Backend",
|
||||||
|
@ -258,12 +258,12 @@
|
||||||
{
|
{
|
||||||
"id": "Check for duplicate listens on import (slower)",
|
"id": "Check for duplicate listens on import (slower)",
|
||||||
"message": "Check for duplicate listens on import (slower)",
|
"message": "Check for duplicate listens on import (slower)",
|
||||||
"translation": ""
|
"translation": "Beim Import auf Listen-Duplikate prüfen (langsamer)"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Ignored duplicate listen {ListenedAt}: \"{TrackName}\" by {ArtistName} ({RecordingMbid})",
|
"id": "Ignored duplicate listen {ListenedAt}: \"{TrackName}\" by {ArtistName} ({RecordingMBID})",
|
||||||
"message": "Ignored duplicate listen {ListenedAt}: \"{TrackName}\" by {ArtistName} ({RecordingMbid})",
|
"message": "Ignored duplicate listen {ListenedAt}: \"{TrackName}\" by {ArtistName} ({RecordingMBID})",
|
||||||
"translation": "",
|
"translation": "Listen-Duplikat ignoriert {ListenedAt}: \"{TrackName}\" von {ArtistName} ({RecordingMBID})",
|
||||||
"placeholders": [
|
"placeholders": [
|
||||||
{
|
{
|
||||||
"id": "ListenedAt",
|
"id": "ListenedAt",
|
||||||
|
@ -290,12 +290,12 @@
|
||||||
"expr": "l.ArtistName()"
|
"expr": "l.ArtistName()"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "RecordingMbid",
|
"id": "RecordingMBID",
|
||||||
"string": "%[4]v",
|
"string": "%[4]v",
|
||||||
"type": "go.uploadedlobster.com/scotty/internal/models.MBID",
|
"type": "go.uploadedlobster.com/mbtypes.MBID",
|
||||||
"underlyingType": "string",
|
"underlyingType": "string",
|
||||||
"argNum": 4,
|
"argNum": 4,
|
||||||
"expr": "l.RecordingMbid"
|
"expr": "l.RecordingMBID"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
@ -305,42 +305,42 @@
|
||||||
"translation": "Autokorrektur für übermittelte Titel deaktivieren"
|
"translation": "Autokorrektur für übermittelte Titel deaktivieren"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Include skipped listens",
|
"id": "Ignore skipped listens",
|
||||||
"message": "Include skipped listens",
|
"message": "Ignore skipped listens",
|
||||||
"translation": "Übersprungene Titel einbeziehen"
|
"translation": "Übersprungene Listens ignorieren"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "Specify a time zone for the listen timestamps",
|
||||||
|
"message": "Specify a time zone for the listen timestamps",
|
||||||
|
"translation": "Zeitzone für den Abspiel-Zeitstempel"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Directory path",
|
"id": "Directory path",
|
||||||
"message": "Directory path",
|
"message": "Directory path",
|
||||||
"translation": ""
|
"translation": "Verzeichnispfad"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Ignore listens in incognito mode",
|
"id": "Ignore listens in incognito mode",
|
||||||
"message": "Ignore listens in incognito mode",
|
"message": "Ignore listens in incognito mode",
|
||||||
"translation": ""
|
"translation": "Listens im Inkognito-Modus ignorieren"
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "Ignore skipped listens",
|
|
||||||
"message": "Ignore skipped listens",
|
|
||||||
"translation": ""
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Minimum playback duration for skipped tracks (seconds)",
|
"id": "Minimum playback duration for skipped tracks (seconds)",
|
||||||
"message": "Minimum playback duration for skipped tracks (seconds)",
|
"message": "Minimum playback duration for skipped tracks (seconds)",
|
||||||
"translation": ""
|
"translation": "Minimale Wiedergabedauer für übersprungene Titel (Sekunden)"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Visit the URL for authorization: {Url}",
|
"id": "Visit the URL for authorization: {URL}",
|
||||||
"message": "Visit the URL for authorization: {Url}",
|
"message": "Visit the URL for authorization: {URL}",
|
||||||
"translation": "URL für Autorisierung öffnen: {Url}",
|
"translation": "Zur Anmeldung folgende URL aufrufen: {URL}",
|
||||||
"placeholders": [
|
"placeholders": [
|
||||||
{
|
{
|
||||||
"id": "Url",
|
"id": "URL",
|
||||||
"string": "%[1]v",
|
"string": "%[1]v",
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"underlyingType": "string",
|
"underlyingType": "string",
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "authUrl.Url"
|
"expr": "authURL.URL"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
@ -368,22 +368,24 @@
|
||||||
"id": "exporting",
|
"id": "exporting",
|
||||||
"message": "exporting",
|
"message": "exporting",
|
||||||
"translation": "exportiere",
|
"translation": "exportiere",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "importing",
|
"id": "importing",
|
||||||
"message": "importing",
|
"message": "importing",
|
||||||
"translation": "importiere",
|
"translation": "importiere",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
},
|
||||||
|
{
|
||||||
|
"id": "aborted",
|
||||||
|
"message": "aborted",
|
||||||
|
"translation": "abgebrochen"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "done",
|
"id": "done",
|
||||||
"message": "done",
|
"message": "done",
|
||||||
"translation": "fertig",
|
"translation": "fertig",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Yes",
|
"id": "Yes",
|
||||||
|
@ -411,9 +413,9 @@
|
||||||
"translation": "Backend"
|
"translation": "Backend"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Transferring {Entity} from {SourceName} to {TargetName}...",
|
"id": "Transferring {Entity} from {SourceName} to {TargetName}…",
|
||||||
"message": "Transferring {Entity} from {SourceName} to {TargetName}...",
|
"message": "Transferring {Entity} from {SourceName} to {TargetName}…",
|
||||||
"translation": "Übertrage {Entity} von {SourceName} nach {TargetName}...",
|
"translation": "Übertrage {Entity} von {SourceName} nach {TargetName}…",
|
||||||
"placeholders": [
|
"placeholders": [
|
||||||
{
|
{
|
||||||
"id": "Entity",
|
"id": "Entity",
|
||||||
|
@ -462,27 +464,6 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
|
|
||||||
"message": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
|
|
||||||
"translation": "Import fehlgeschlagen, letzter Zeitstempel war {Arg_1} ({Arg_2})",
|
|
||||||
"placeholders": [
|
|
||||||
{
|
|
||||||
"id": "Arg_1",
|
|
||||||
"string": "%[1]v",
|
|
||||||
"type": "",
|
|
||||||
"underlyingType": "interface{}",
|
|
||||||
"argNum": 1
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "Arg_2",
|
|
||||||
"string": "%[2]s",
|
|
||||||
"type": "",
|
|
||||||
"underlyingType": "string",
|
|
||||||
"argNum": 2
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "Imported {ImportCount} of {TotalCount} {Entity} into {TargetName}.",
|
"id": "Imported {ImportCount} of {TotalCount} {Entity} into {TargetName}.",
|
||||||
"message": "Imported {ImportCount} of {TotalCount} {Entity} into {TargetName}.",
|
"message": "Imported {ImportCount} of {TotalCount} {Entity} into {TargetName}.",
|
||||||
|
@ -522,15 +503,36 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"id": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
|
||||||
|
"message": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
|
||||||
|
"translation": "Import fehlgeschlagen, letzter Zeitstempel war {Arg_1} ({Arg_2})",
|
||||||
|
"placeholders": [
|
||||||
|
{
|
||||||
|
"id": "Arg_1",
|
||||||
|
"string": "%[1]v",
|
||||||
|
"type": "",
|
||||||
|
"underlyingType": "interface{}",
|
||||||
|
"argNum": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "Arg_2",
|
||||||
|
"string": "%[2]s",
|
||||||
|
"type": "",
|
||||||
|
"underlyingType": "string",
|
||||||
|
"argNum": 2
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"id": "Import log:",
|
"id": "Import log:",
|
||||||
"message": "Import log:",
|
"message": "Import log:",
|
||||||
"translation": ""
|
"translation": "Importlog:"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "{Type}: {Message}",
|
"id": "{Type}: {Message}",
|
||||||
"message": "{Type}: {Message}",
|
"message": "{Type}: {Message}",
|
||||||
"translation": "",
|
"translation": "{Type}: {Message}",
|
||||||
"placeholders": [
|
"placeholders": [
|
||||||
{
|
{
|
||||||
"id": "Type",
|
"id": "Type",
|
||||||
|
@ -553,7 +555,7 @@
|
||||||
{
|
{
|
||||||
"id": "invalid timestamp string \"{FlagValue}\"",
|
"id": "invalid timestamp string \"{FlagValue}\"",
|
||||||
"message": "invalid timestamp string \"{FlagValue}\"",
|
"message": "invalid timestamp string \"{FlagValue}\"",
|
||||||
"translation": "",
|
"translation": "ungültiger Zeitstempel „{FlagValue}“",
|
||||||
"placeholders": [
|
"placeholders": [
|
||||||
{
|
{
|
||||||
"id": "FlagValue",
|
"id": "FlagValue",
|
||||||
|
|
|
@ -15,8 +15,7 @@
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "strings.Join(info.ExportCapabilities, \", \")"
|
"expr": "strings.Join(info.ExportCapabilities, \", \")"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "import: {ImportCapabilities__}",
|
"id": "import: {ImportCapabilities__}",
|
||||||
|
@ -32,8 +31,7 @@
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "strings.Join(info.ImportCapabilities, \", \")"
|
"expr": "strings.Join(info.ImportCapabilities, \", \")"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Failed reading config: {Err}",
|
"id": "Failed reading config: {Err}",
|
||||||
|
@ -49,22 +47,19 @@
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "err"
|
"expr": "err"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Service name",
|
"id": "Service name",
|
||||||
"message": "Service name",
|
"message": "Service name",
|
||||||
"translation": "Service name",
|
"translation": "Service name",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "a service with this name already exists",
|
"id": "a service with this name already exists",
|
||||||
"message": "a service with this name already exists",
|
"message": "a service with this name already exists",
|
||||||
"translation": "a service with this name already exists",
|
"translation": "a service with this name already exists",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Saved service {Name} using backend {Backend}",
|
"id": "Saved service {Name} using backend {Backend}",
|
||||||
|
@ -88,8 +83,7 @@
|
||||||
"argNum": 2,
|
"argNum": 2,
|
||||||
"expr": "service.Backend"
|
"expr": "service.Backend"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "The backend {Backend} requires authentication. Authenticate now?",
|
"id": "The backend {Backend} requires authentication. Authenticate now?",
|
||||||
|
@ -105,8 +99,7 @@
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "service.Backend"
|
"expr": "service.Backend"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Delete the service configuration \"{Service}\"?",
|
"id": "Delete the service configuration \"{Service}\"?",
|
||||||
|
@ -122,15 +115,13 @@
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "service"
|
"expr": "service"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Aborted",
|
"id": "Aborted",
|
||||||
"message": "Aborted",
|
"message": "Aborted",
|
||||||
"translation": "Aborted",
|
"translation": "Aborted",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Service \"{Name}\" deleted",
|
"id": "Service \"{Name}\" deleted",
|
||||||
|
@ -146,8 +137,7 @@
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "service.Name"
|
"expr": "service.Name"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Updated service {Name} using backend {Backend}",
|
"id": "Updated service {Name} using backend {Backend}",
|
||||||
|
@ -171,8 +161,7 @@
|
||||||
"argNum": 2,
|
"argNum": 2,
|
||||||
"expr": "service.Backend"
|
"expr": "service.Backend"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "backend: {Backend}",
|
"id": "backend: {Backend}",
|
||||||
|
@ -188,15 +177,13 @@
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "s.Backend"
|
"expr": "s.Backend"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Token received, you can close this window now.",
|
"id": "Token received, you can close this window now.",
|
||||||
"message": "Token received, you can close this window now.",
|
"message": "Token received, you can close this window now.",
|
||||||
"translation": "Token received, you can close this window now.",
|
"translation": "Token received, you can close this window now.",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "backend {Backend} does not implement {InterfaceName}",
|
"id": "backend {Backend} does not implement {InterfaceName}",
|
||||||
|
@ -220,8 +207,7 @@
|
||||||
"argNum": 2,
|
"argNum": 2,
|
||||||
"expr": "interfaceName"
|
"expr": "interfaceName"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "unknown backend \"{BackendName}\"",
|
"id": "unknown backend \"{BackendName}\"",
|
||||||
|
@ -237,83 +223,72 @@
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "backendName"
|
"expr": "backendName"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Client ID",
|
"id": "Client ID",
|
||||||
"message": "Client ID",
|
"message": "Client ID",
|
||||||
"translation": "Client ID",
|
"translation": "Client ID",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Client secret",
|
"id": "Client secret",
|
||||||
"message": "Client secret",
|
"message": "Client secret",
|
||||||
"translation": "Client secret",
|
"translation": "Client secret",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Server URL",
|
"id": "Server URL",
|
||||||
"message": "Server URL",
|
"message": "Server URL",
|
||||||
"translation": "Server URL",
|
"translation": "Server URL",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "User name",
|
"id": "User name",
|
||||||
"message": "User name",
|
"message": "User name",
|
||||||
"translation": "User name",
|
"translation": "User name",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Access token",
|
"id": "Access token",
|
||||||
"message": "Access token",
|
"message": "Access token",
|
||||||
"translation": "Access token",
|
"translation": "Access token",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "File path",
|
"id": "File path",
|
||||||
"message": "File path",
|
"message": "File path",
|
||||||
"translation": "File path",
|
"translation": "File path",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Append to file",
|
"id": "Append to file",
|
||||||
"message": "Append to file",
|
"message": "Append to file",
|
||||||
"translation": "Append to file",
|
"translation": "Append to file",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Playlist title",
|
"id": "Playlist title",
|
||||||
"message": "Playlist title",
|
"message": "Playlist title",
|
||||||
"translation": "Playlist title",
|
"translation": "Playlist title",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Unique playlist identifier",
|
"id": "Unique playlist identifier",
|
||||||
"message": "Unique playlist identifier",
|
"message": "Unique playlist identifier",
|
||||||
"translation": "Unique playlist identifier",
|
"translation": "Unique playlist identifier",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Check for duplicate listens on import (slower)",
|
"id": "Check for duplicate listens on import (slower)",
|
||||||
"message": "Check for duplicate listens on import (slower)",
|
"message": "Check for duplicate listens on import (slower)",
|
||||||
"translation": "Check for duplicate listens on import (slower)",
|
"translation": "Check for duplicate listens on import (slower)",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Ignored duplicate listen {ListenedAt}: \"{TrackName}\" by {ArtistName} ({RecordingMbid})",
|
"id": "Ignored duplicate listen {ListenedAt}: \"{TrackName}\" by {ArtistName} ({RecordingMBID})",
|
||||||
"message": "Ignored duplicate listen {ListenedAt}: \"{TrackName}\" by {ArtistName} ({RecordingMbid})",
|
"message": "Ignored duplicate listen {ListenedAt}: \"{TrackName}\" by {ArtistName} ({RecordingMBID})",
|
||||||
"translation": "Ignored duplicate listen {ListenedAt}: \"{TrackName}\" by {ArtistName} ({RecordingMbid})",
|
"translation": "Ignored duplicate listen {ListenedAt}: \"{TrackName}\" by {ArtistName} ({RecordingMBID})",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source.",
|
||||||
"placeholders": [
|
"placeholders": [
|
||||||
{
|
{
|
||||||
|
@ -341,81 +316,72 @@
|
||||||
"expr": "l.ArtistName()"
|
"expr": "l.ArtistName()"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "RecordingMbid",
|
"id": "RecordingMBID",
|
||||||
"string": "%[4]v",
|
"string": "%[4]v",
|
||||||
"type": "go.uploadedlobster.com/scotty/internal/models.MBID",
|
"type": "go.uploadedlobster.com/mbtypes.MBID",
|
||||||
"underlyingType": "string",
|
"underlyingType": "string",
|
||||||
"argNum": 4,
|
"argNum": 4,
|
||||||
"expr": "l.RecordingMbid"
|
"expr": "l.RecordingMBID"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Disable auto correction of submitted listens",
|
"id": "Disable auto correction of submitted listens",
|
||||||
"message": "Disable auto correction of submitted listens",
|
"message": "Disable auto correction of submitted listens",
|
||||||
"translation": "Disable auto correction of submitted listens",
|
"translation": "Disable auto correction of submitted listens",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "Include skipped listens",
|
|
||||||
"message": "Include skipped listens",
|
|
||||||
"translation": "Include skipped listens",
|
|
||||||
"translatorComment": "Copied from source.",
|
|
||||||
"fuzzy": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "Directory path",
|
|
||||||
"message": "Directory path",
|
|
||||||
"translation": "Directory path",
|
|
||||||
"translatorComment": "Copied from source.",
|
|
||||||
"fuzzy": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "Ignore listens in incognito mode",
|
|
||||||
"message": "Ignore listens in incognito mode",
|
|
||||||
"translation": "Ignore listens in incognito mode",
|
|
||||||
"translatorComment": "Copied from source.",
|
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Ignore skipped listens",
|
"id": "Ignore skipped listens",
|
||||||
"message": "Ignore skipped listens",
|
"message": "Ignore skipped listens",
|
||||||
"translation": "Ignore skipped listens",
|
"translation": "Ignore skipped listens",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
},
|
||||||
|
{
|
||||||
|
"id": "Specify a time zone for the listen timestamps",
|
||||||
|
"message": "Specify a time zone for the listen timestamps",
|
||||||
|
"translation": "Specify a time zone for the listen timestamps",
|
||||||
|
"translatorComment": "Copied from source."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "Directory path",
|
||||||
|
"message": "Directory path",
|
||||||
|
"translation": "Directory path",
|
||||||
|
"translatorComment": "Copied from source."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "Ignore listens in incognito mode",
|
||||||
|
"message": "Ignore listens in incognito mode",
|
||||||
|
"translation": "Ignore listens in incognito mode",
|
||||||
|
"translatorComment": "Copied from source."
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Minimum playback duration for skipped tracks (seconds)",
|
"id": "Minimum playback duration for skipped tracks (seconds)",
|
||||||
"message": "Minimum playback duration for skipped tracks (seconds)",
|
"message": "Minimum playback duration for skipped tracks (seconds)",
|
||||||
"translation": "Minimum playback duration for skipped tracks (seconds)",
|
"translation": "Minimum playback duration for skipped tracks (seconds)",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Visit the URL for authorization: {Url}",
|
"id": "Visit the URL for authorization: {URL}",
|
||||||
"message": "Visit the URL for authorization: {Url}",
|
"message": "Visit the URL for authorization: {URL}",
|
||||||
"translation": "Visit the URL for authorization: {Url}",
|
"translation": "Visit the URL for authorization: {URL}",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source.",
|
||||||
"placeholders": [
|
"placeholders": [
|
||||||
{
|
{
|
||||||
"id": "Url",
|
"id": "URL",
|
||||||
"string": "%[1]v",
|
"string": "%[1]v",
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"underlyingType": "string",
|
"underlyingType": "string",
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "authUrl.Url"
|
"expr": "authURL.URL"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Error: OAuth state mismatch",
|
"id": "Error: OAuth state mismatch",
|
||||||
"message": "Error: OAuth state mismatch",
|
"message": "Error: OAuth state mismatch",
|
||||||
"translation": "Error: OAuth state mismatch",
|
"translation": "Error: OAuth state mismatch",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Access token received, you can use {Name} now.",
|
"id": "Access token received, you can use {Name} now.",
|
||||||
|
@ -431,69 +397,60 @@
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "service.Name"
|
"expr": "service.Name"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "exporting",
|
"id": "exporting",
|
||||||
"message": "exporting",
|
"message": "exporting",
|
||||||
"translation": "exporting",
|
"translation": "exporting",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "importing",
|
"id": "importing",
|
||||||
"message": "importing",
|
"message": "importing",
|
||||||
"translation": "importing",
|
"translation": "importing",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "done",
|
"id": "done",
|
||||||
"message": "done",
|
"message": "done",
|
||||||
"translation": "done",
|
"translation": "done",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Yes",
|
"id": "Yes",
|
||||||
"message": "Yes",
|
"message": "Yes",
|
||||||
"translation": "Yes",
|
"translation": "Yes",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "No",
|
"id": "No",
|
||||||
"message": "No",
|
"message": "No",
|
||||||
"translation": "No",
|
"translation": "No",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "no existing service configurations",
|
"id": "no existing service configurations",
|
||||||
"message": "no existing service configurations",
|
"message": "no existing service configurations",
|
||||||
"translation": "no existing service configurations",
|
"translation": "no existing service configurations",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Service",
|
"id": "Service",
|
||||||
"message": "Service",
|
"message": "Service",
|
||||||
"translation": "Service",
|
"translation": "Service",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Backend",
|
"id": "Backend",
|
||||||
"message": "Backend",
|
"message": "Backend",
|
||||||
"translation": "Backend",
|
"translation": "Backend",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Transferring {Entity} from {SourceName} to {TargetName}...",
|
"id": "Transferring {Entity} from {SourceName} to {TargetName}…",
|
||||||
"message": "Transferring {Entity} from {SourceName} to {TargetName}...",
|
"message": "Transferring {Entity} from {SourceName} to {TargetName}…",
|
||||||
"translation": "Transferring {Entity} from {SourceName} to {TargetName}...",
|
"translation": "Transferring {Entity} from {SourceName} to {TargetName}…",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source.",
|
||||||
"placeholders": [
|
"placeholders": [
|
||||||
{
|
{
|
||||||
|
@ -520,8 +477,7 @@
|
||||||
"argNum": 3,
|
"argNum": 3,
|
||||||
"expr": "c.targetName"
|
"expr": "c.targetName"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "From timestamp: {Arg_1} ({Arg_2})",
|
"id": "From timestamp: {Arg_1} ({Arg_2})",
|
||||||
|
@ -543,8 +499,7 @@
|
||||||
"underlyingType": "interface{}",
|
"underlyingType": "interface{}",
|
||||||
"argNum": 2
|
"argNum": 2
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
|
"id": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
|
||||||
|
@ -566,8 +521,7 @@
|
||||||
"underlyingType": "string",
|
"underlyingType": "string",
|
||||||
"argNum": 2
|
"argNum": 2
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Imported {ImportCount} of {TotalCount} {Entity} into {TargetName}.",
|
"id": "Imported {ImportCount} of {TotalCount} {Entity} into {TargetName}.",
|
||||||
|
@ -607,15 +561,13 @@
|
||||||
"argNum": 4,
|
"argNum": 4,
|
||||||
"expr": "c.targetName"
|
"expr": "c.targetName"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Import log:",
|
"id": "Import log:",
|
||||||
"message": "Import log:",
|
"message": "Import log:",
|
||||||
"translation": "Import log:",
|
"translation": "Import log:",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "{Type}: {Message}",
|
"id": "{Type}: {Message}",
|
||||||
|
@ -639,8 +591,7 @@
|
||||||
"argNum": 2,
|
"argNum": 2,
|
||||||
"expr": "entry.Message"
|
"expr": "entry.Message"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "invalid timestamp string \"{FlagValue}\"",
|
"id": "invalid timestamp string \"{FlagValue}\"",
|
||||||
|
@ -656,8 +607,7 @@
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "flagValue"
|
"expr": "flagValue"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Latest timestamp: {Arg_1} ({Arg_2})",
|
"id": "Latest timestamp: {Arg_1} ({Arg_2})",
|
||||||
|
@ -679,22 +629,19 @@
|
||||||
"underlyingType": "interface{}",
|
"underlyingType": "interface{}",
|
||||||
"argNum": 2
|
"argNum": 2
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "no configuration file defined, cannot write config",
|
"id": "no configuration file defined, cannot write config",
|
||||||
"message": "no configuration file defined, cannot write config",
|
"message": "no configuration file defined, cannot write config",
|
||||||
"translation": "no configuration file defined, cannot write config",
|
"translation": "no configuration file defined, cannot write config",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "key must only consist of A-Za-z0-9_-",
|
"id": "key must only consist of A-Za-z0-9_-",
|
||||||
"message": "key must only consist of A-Za-z0-9_-",
|
"message": "key must only consist of A-Za-z0-9_-",
|
||||||
"translation": "key must only consist of A-Za-z0-9_-",
|
"translation": "key must only consist of A-Za-z0-9_-",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "no service configuration \"{Name}\"",
|
"id": "no service configuration \"{Name}\"",
|
||||||
|
@ -710,8 +657,7 @@
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "name"
|
"expr": "name"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
|
@ -15,8 +15,7 @@
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "strings.Join(info.ExportCapabilities, \", \")"
|
"expr": "strings.Join(info.ExportCapabilities, \", \")"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "import: {ImportCapabilities__}",
|
"id": "import: {ImportCapabilities__}",
|
||||||
|
@ -32,8 +31,7 @@
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "strings.Join(info.ImportCapabilities, \", \")"
|
"expr": "strings.Join(info.ImportCapabilities, \", \")"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Failed reading config: {Err}",
|
"id": "Failed reading config: {Err}",
|
||||||
|
@ -49,22 +47,19 @@
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "err"
|
"expr": "err"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Service name",
|
"id": "Service name",
|
||||||
"message": "Service name",
|
"message": "Service name",
|
||||||
"translation": "Service name",
|
"translation": "Service name",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "a service with this name already exists",
|
"id": "a service with this name already exists",
|
||||||
"message": "a service with this name already exists",
|
"message": "a service with this name already exists",
|
||||||
"translation": "a service with this name already exists",
|
"translation": "a service with this name already exists",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Saved service {Name} using backend {Backend}",
|
"id": "Saved service {Name} using backend {Backend}",
|
||||||
|
@ -88,8 +83,7 @@
|
||||||
"argNum": 2,
|
"argNum": 2,
|
||||||
"expr": "service.Backend"
|
"expr": "service.Backend"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "The backend {Backend} requires authentication. Authenticate now?",
|
"id": "The backend {Backend} requires authentication. Authenticate now?",
|
||||||
|
@ -105,8 +99,7 @@
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "service.Backend"
|
"expr": "service.Backend"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Delete the service configuration \"{Service}\"?",
|
"id": "Delete the service configuration \"{Service}\"?",
|
||||||
|
@ -122,15 +115,13 @@
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "service"
|
"expr": "service"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Aborted",
|
"id": "Aborted",
|
||||||
"message": "Aborted",
|
"message": "Aborted",
|
||||||
"translation": "Aborted",
|
"translation": "Aborted",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Service \"{Name}\" deleted",
|
"id": "Service \"{Name}\" deleted",
|
||||||
|
@ -146,8 +137,7 @@
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "service.Name"
|
"expr": "service.Name"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Updated service {Name} using backend {Backend}",
|
"id": "Updated service {Name} using backend {Backend}",
|
||||||
|
@ -171,8 +161,7 @@
|
||||||
"argNum": 2,
|
"argNum": 2,
|
||||||
"expr": "service.Backend"
|
"expr": "service.Backend"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "backend: {Backend}",
|
"id": "backend: {Backend}",
|
||||||
|
@ -188,15 +177,13 @@
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "s.Backend"
|
"expr": "s.Backend"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Token received, you can close this window now.",
|
"id": "Token received, you can close this window now.",
|
||||||
"message": "Token received, you can close this window now.",
|
"message": "Token received, you can close this window now.",
|
||||||
"translation": "Token received, you can close this window now.",
|
"translation": "Token received, you can close this window now.",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "backend {Backend} does not implement {InterfaceName}",
|
"id": "backend {Backend} does not implement {InterfaceName}",
|
||||||
|
@ -220,8 +207,7 @@
|
||||||
"argNum": 2,
|
"argNum": 2,
|
||||||
"expr": "interfaceName"
|
"expr": "interfaceName"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "unknown backend \"{BackendName}\"",
|
"id": "unknown backend \"{BackendName}\"",
|
||||||
|
@ -237,83 +223,72 @@
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "backendName"
|
"expr": "backendName"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Client ID",
|
"id": "Client ID",
|
||||||
"message": "Client ID",
|
"message": "Client ID",
|
||||||
"translation": "Client ID",
|
"translation": "Client ID",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Client secret",
|
"id": "Client secret",
|
||||||
"message": "Client secret",
|
"message": "Client secret",
|
||||||
"translation": "Client secret",
|
"translation": "Client secret",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Server URL",
|
"id": "Server URL",
|
||||||
"message": "Server URL",
|
"message": "Server URL",
|
||||||
"translation": "Server URL",
|
"translation": "Server URL",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "User name",
|
"id": "User name",
|
||||||
"message": "User name",
|
"message": "User name",
|
||||||
"translation": "User name",
|
"translation": "User name",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Access token",
|
"id": "Access token",
|
||||||
"message": "Access token",
|
"message": "Access token",
|
||||||
"translation": "Access token",
|
"translation": "Access token",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "File path",
|
"id": "File path",
|
||||||
"message": "File path",
|
"message": "File path",
|
||||||
"translation": "File path",
|
"translation": "File path",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Append to file",
|
"id": "Append to file",
|
||||||
"message": "Append to file",
|
"message": "Append to file",
|
||||||
"translation": "Append to file",
|
"translation": "Append to file",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Playlist title",
|
"id": "Playlist title",
|
||||||
"message": "Playlist title",
|
"message": "Playlist title",
|
||||||
"translation": "Playlist title",
|
"translation": "Playlist title",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Unique playlist identifier",
|
"id": "Unique playlist identifier",
|
||||||
"message": "Unique playlist identifier",
|
"message": "Unique playlist identifier",
|
||||||
"translation": "Unique playlist identifier",
|
"translation": "Unique playlist identifier",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Check for duplicate listens on import (slower)",
|
"id": "Check for duplicate listens on import (slower)",
|
||||||
"message": "Check for duplicate listens on import (slower)",
|
"message": "Check for duplicate listens on import (slower)",
|
||||||
"translation": "Check for duplicate listens on import (slower)",
|
"translation": "Check for duplicate listens on import (slower)",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Ignored duplicate listen {ListenedAt}: \"{TrackName}\" by {ArtistName} ({RecordingMbid})",
|
"id": "Ignored duplicate listen {ListenedAt}: \"{TrackName}\" by {ArtistName} ({RecordingMBID})",
|
||||||
"message": "Ignored duplicate listen {ListenedAt}: \"{TrackName}\" by {ArtistName} ({RecordingMbid})",
|
"message": "Ignored duplicate listen {ListenedAt}: \"{TrackName}\" by {ArtistName} ({RecordingMBID})",
|
||||||
"translation": "Ignored duplicate listen {ListenedAt}: \"{TrackName}\" by {ArtistName} ({RecordingMbid})",
|
"translation": "Ignored duplicate listen {ListenedAt}: \"{TrackName}\" by {ArtistName} ({RecordingMBID})",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source.",
|
||||||
"placeholders": [
|
"placeholders": [
|
||||||
{
|
{
|
||||||
|
@ -341,81 +316,72 @@
|
||||||
"expr": "l.ArtistName()"
|
"expr": "l.ArtistName()"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "RecordingMbid",
|
"id": "RecordingMBID",
|
||||||
"string": "%[4]v",
|
"string": "%[4]v",
|
||||||
"type": "go.uploadedlobster.com/scotty/internal/models.MBID",
|
"type": "go.uploadedlobster.com/mbtypes.MBID",
|
||||||
"underlyingType": "string",
|
"underlyingType": "string",
|
||||||
"argNum": 4,
|
"argNum": 4,
|
||||||
"expr": "l.RecordingMbid"
|
"expr": "l.RecordingMBID"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Disable auto correction of submitted listens",
|
"id": "Disable auto correction of submitted listens",
|
||||||
"message": "Disable auto correction of submitted listens",
|
"message": "Disable auto correction of submitted listens",
|
||||||
"translation": "Disable auto correction of submitted listens",
|
"translation": "Disable auto correction of submitted listens",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "Include skipped listens",
|
|
||||||
"message": "Include skipped listens",
|
|
||||||
"translation": "Include skipped listens",
|
|
||||||
"translatorComment": "Copied from source.",
|
|
||||||
"fuzzy": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "Directory path",
|
|
||||||
"message": "Directory path",
|
|
||||||
"translation": "Directory path",
|
|
||||||
"translatorComment": "Copied from source.",
|
|
||||||
"fuzzy": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "Ignore listens in incognito mode",
|
|
||||||
"message": "Ignore listens in incognito mode",
|
|
||||||
"translation": "Ignore listens in incognito mode",
|
|
||||||
"translatorComment": "Copied from source.",
|
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Ignore skipped listens",
|
"id": "Ignore skipped listens",
|
||||||
"message": "Ignore skipped listens",
|
"message": "Ignore skipped listens",
|
||||||
"translation": "Ignore skipped listens",
|
"translation": "Ignore skipped listens",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
},
|
||||||
|
{
|
||||||
|
"id": "Specify a time zone for the listen timestamps",
|
||||||
|
"message": "Specify a time zone for the listen timestamps",
|
||||||
|
"translation": "Specify a time zone for the listen timestamps",
|
||||||
|
"translatorComment": "Copied from source."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "Directory path",
|
||||||
|
"message": "Directory path",
|
||||||
|
"translation": "Directory path",
|
||||||
|
"translatorComment": "Copied from source."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "Ignore listens in incognito mode",
|
||||||
|
"message": "Ignore listens in incognito mode",
|
||||||
|
"translation": "Ignore listens in incognito mode",
|
||||||
|
"translatorComment": "Copied from source."
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Minimum playback duration for skipped tracks (seconds)",
|
"id": "Minimum playback duration for skipped tracks (seconds)",
|
||||||
"message": "Minimum playback duration for skipped tracks (seconds)",
|
"message": "Minimum playback duration for skipped tracks (seconds)",
|
||||||
"translation": "Minimum playback duration for skipped tracks (seconds)",
|
"translation": "Minimum playback duration for skipped tracks (seconds)",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Visit the URL for authorization: {Url}",
|
"id": "Visit the URL for authorization: {URL}",
|
||||||
"message": "Visit the URL for authorization: {Url}",
|
"message": "Visit the URL for authorization: {URL}",
|
||||||
"translation": "Visit the URL for authorization: {Url}",
|
"translation": "Visit the URL for authorization: {URL}",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source.",
|
||||||
"placeholders": [
|
"placeholders": [
|
||||||
{
|
{
|
||||||
"id": "Url",
|
"id": "URL",
|
||||||
"string": "%[1]v",
|
"string": "%[1]v",
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"underlyingType": "string",
|
"underlyingType": "string",
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "authUrl.Url"
|
"expr": "authURL.URL"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Error: OAuth state mismatch",
|
"id": "Error: OAuth state mismatch",
|
||||||
"message": "Error: OAuth state mismatch",
|
"message": "Error: OAuth state mismatch",
|
||||||
"translation": "Error: OAuth state mismatch",
|
"translation": "Error: OAuth state mismatch",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Access token received, you can use {Name} now.",
|
"id": "Access token received, you can use {Name} now.",
|
||||||
|
@ -431,20 +397,24 @@
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "service.Name"
|
"expr": "service.Name"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "exporting",
|
"id": "exporting",
|
||||||
"message": "exporting",
|
"message": "exporting",
|
||||||
"translation": "exporting",
|
"translation": "exporting",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "importing",
|
"id": "importing",
|
||||||
"message": "importing",
|
"message": "importing",
|
||||||
"translation": "importing",
|
"translation": "importing",
|
||||||
|
"translatorComment": "Copied from source."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "aborted",
|
||||||
|
"message": "aborted",
|
||||||
|
"translation": "aborted",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source.",
|
||||||
"fuzzy": true
|
"fuzzy": true
|
||||||
},
|
},
|
||||||
|
@ -452,48 +422,42 @@
|
||||||
"id": "done",
|
"id": "done",
|
||||||
"message": "done",
|
"message": "done",
|
||||||
"translation": "done",
|
"translation": "done",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Yes",
|
"id": "Yes",
|
||||||
"message": "Yes",
|
"message": "Yes",
|
||||||
"translation": "Yes",
|
"translation": "Yes",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "No",
|
"id": "No",
|
||||||
"message": "No",
|
"message": "No",
|
||||||
"translation": "No",
|
"translation": "No",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "no existing service configurations",
|
"id": "no existing service configurations",
|
||||||
"message": "no existing service configurations",
|
"message": "no existing service configurations",
|
||||||
"translation": "no existing service configurations",
|
"translation": "no existing service configurations",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Service",
|
"id": "Service",
|
||||||
"message": "Service",
|
"message": "Service",
|
||||||
"translation": "Service",
|
"translation": "Service",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Backend",
|
"id": "Backend",
|
||||||
"message": "Backend",
|
"message": "Backend",
|
||||||
"translation": "Backend",
|
"translation": "Backend",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Transferring {Entity} from {SourceName} to {TargetName}...",
|
"id": "Transferring {Entity} from {SourceName} to {TargetName}…",
|
||||||
"message": "Transferring {Entity} from {SourceName} to {TargetName}...",
|
"message": "Transferring {Entity} from {SourceName} to {TargetName}…",
|
||||||
"translation": "Transferring {Entity} from {SourceName} to {TargetName}...",
|
"translation": "Transferring {Entity} from {SourceName} to {TargetName}…",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source.",
|
||||||
"placeholders": [
|
"placeholders": [
|
||||||
{
|
{
|
||||||
|
@ -520,8 +484,7 @@
|
||||||
"argNum": 3,
|
"argNum": 3,
|
||||||
"expr": "c.targetName"
|
"expr": "c.targetName"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "From timestamp: {Arg_1} ({Arg_2})",
|
"id": "From timestamp: {Arg_1} ({Arg_2})",
|
||||||
|
@ -543,31 +506,7 @@
|
||||||
"underlyingType": "interface{}",
|
"underlyingType": "interface{}",
|
||||||
"argNum": 2
|
"argNum": 2
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
|
|
||||||
"message": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
|
|
||||||
"translation": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
|
|
||||||
"translatorComment": "Copied from source.",
|
|
||||||
"placeholders": [
|
|
||||||
{
|
|
||||||
"id": "Arg_1",
|
|
||||||
"string": "%[1]v",
|
|
||||||
"type": "",
|
|
||||||
"underlyingType": "interface{}",
|
|
||||||
"argNum": 1
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "Arg_2",
|
|
||||||
"string": "%[2]s",
|
|
||||||
"type": "",
|
|
||||||
"underlyingType": "string",
|
|
||||||
"argNum": 2
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Imported {ImportCount} of {TotalCount} {Entity} into {TargetName}.",
|
"id": "Imported {ImportCount} of {TotalCount} {Entity} into {TargetName}.",
|
||||||
|
@ -607,15 +546,35 @@
|
||||||
"argNum": 4,
|
"argNum": 4,
|
||||||
"expr": "c.targetName"
|
"expr": "c.targetName"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
},
|
||||||
|
{
|
||||||
|
"id": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
|
||||||
|
"message": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
|
||||||
|
"translation": "Import failed, last reported timestamp was {Arg_1} ({Arg_2})",
|
||||||
|
"translatorComment": "Copied from source.",
|
||||||
|
"placeholders": [
|
||||||
|
{
|
||||||
|
"id": "Arg_1",
|
||||||
|
"string": "%[1]v",
|
||||||
|
"type": "",
|
||||||
|
"underlyingType": "interface{}",
|
||||||
|
"argNum": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "Arg_2",
|
||||||
|
"string": "%[2]s",
|
||||||
|
"type": "",
|
||||||
|
"underlyingType": "string",
|
||||||
|
"argNum": 2
|
||||||
|
}
|
||||||
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Import log:",
|
"id": "Import log:",
|
||||||
"message": "Import log:",
|
"message": "Import log:",
|
||||||
"translation": "Import log:",
|
"translation": "Import log:",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "{Type}: {Message}",
|
"id": "{Type}: {Message}",
|
||||||
|
@ -639,8 +598,7 @@
|
||||||
"argNum": 2,
|
"argNum": 2,
|
||||||
"expr": "entry.Message"
|
"expr": "entry.Message"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "invalid timestamp string \"{FlagValue}\"",
|
"id": "invalid timestamp string \"{FlagValue}\"",
|
||||||
|
@ -656,8 +614,7 @@
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "flagValue"
|
"expr": "flagValue"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "Latest timestamp: {Arg_1} ({Arg_2})",
|
"id": "Latest timestamp: {Arg_1} ({Arg_2})",
|
||||||
|
@ -679,22 +636,19 @@
|
||||||
"underlyingType": "interface{}",
|
"underlyingType": "interface{}",
|
||||||
"argNum": 2
|
"argNum": 2
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "no configuration file defined, cannot write config",
|
"id": "no configuration file defined, cannot write config",
|
||||||
"message": "no configuration file defined, cannot write config",
|
"message": "no configuration file defined, cannot write config",
|
||||||
"translation": "no configuration file defined, cannot write config",
|
"translation": "no configuration file defined, cannot write config",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "key must only consist of A-Za-z0-9_-",
|
"id": "key must only consist of A-Za-z0-9_-",
|
||||||
"message": "key must only consist of A-Za-z0-9_-",
|
"message": "key must only consist of A-Za-z0-9_-",
|
||||||
"translation": "key must only consist of A-Za-z0-9_-",
|
"translation": "key must only consist of A-Za-z0-9_-",
|
||||||
"translatorComment": "Copied from source.",
|
"translatorComment": "Copied from source."
|
||||||
"fuzzy": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "no service configuration \"{Name}\"",
|
"id": "no service configuration \"{Name}\"",
|
||||||
|
@ -710,8 +664,7 @@
|
||||||
"argNum": 1,
|
"argNum": 1,
|
||||||
"expr": "name"
|
"expr": "name"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"fuzzy": true
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
|
@ -6,4 +6,4 @@ package are published under the conditions of CC0 1.0 Universal (CC0 1.0)
|
||||||
|
|
||||||
package translations
|
package translations
|
||||||
|
|
||||||
//go:generate gotext -srclang=en update -out=catalog.go -lang=en,de go.uploadedlobster.com/scotty
|
//go:generate go tool gotext -srclang=en update -out=catalog.go -lang=en,de go.uploadedlobster.com/scotty
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023-2024 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
Scotty is free software: you can redistribute it and/or modify it under the
|
Scotty is free software: you can redistribute it and/or modify it under the
|
||||||
terms of the GNU General Public License as published by the Free Software
|
terms of the GNU General Public License as published by the Free Software
|
||||||
|
@ -17,7 +17,8 @@ package version
|
||||||
|
|
||||||
const (
|
const (
|
||||||
AppName = "scotty"
|
AppName = "scotty"
|
||||||
AppVersion = "0.4.1"
|
AppVersion = "0.5.2"
|
||||||
|
AppURL = "https://git.sr.ht/~phw/scotty/"
|
||||||
)
|
)
|
||||||
|
|
||||||
func UserAgent() string {
|
func UserAgent() string {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
@ -22,13 +22,34 @@ THE SOFTWARE.
|
||||||
|
|
||||||
package jspf
|
package jspf
|
||||||
|
|
||||||
import "time"
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Represents a JSPF extension
|
||||||
|
type Extension any
|
||||||
|
|
||||||
|
// A map of JSPF extensions
|
||||||
|
type ExtensionMap map[string]Extension
|
||||||
|
|
||||||
|
// Parses the extension with the given ID and unmarshals it into "v".
|
||||||
|
// If the extensions is not found or the data cannot be unmarshalled,
|
||||||
|
// an error is returned.
|
||||||
|
func (e ExtensionMap) Get(id string, v any) error {
|
||||||
|
ext, ok := e[id]
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("extension %q not found", id)
|
||||||
|
}
|
||||||
|
return unmarshalExtension(ext, v)
|
||||||
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
// The identifier for the MusicBrainz / ListenBrainz JSPF playlist extension
|
// The identifier for the MusicBrainz / ListenBrainz JSPF playlist extension
|
||||||
MusicBrainzPlaylistExtensionId = "https://musicbrainz.org/doc/jspf#playlist"
|
MusicBrainzPlaylistExtensionID = "https://musicbrainz.org/doc/jspf#playlist"
|
||||||
// The identifier for the MusicBrainz / ListenBrainz JSPF track extension
|
// The identifier for the MusicBrainz / ListenBrainz JSPF track extension
|
||||||
MusicBrainzTrackExtensionId = "https://musicbrainz.org/doc/jspf#track"
|
MusicBrainzTrackExtensionID = "https://musicbrainz.org/doc/jspf#track"
|
||||||
)
|
)
|
||||||
|
|
||||||
// MusicBrainz / ListenBrainz JSPF track extension
|
// MusicBrainz / ListenBrainz JSPF track extension
|
||||||
|
@ -83,3 +104,11 @@ type MusicBrainzTrackExtension struct {
|
||||||
// this document.
|
// this document.
|
||||||
AdditionalMetadata map[string]any `json:"additional_metadata,omitempty"`
|
AdditionalMetadata map[string]any `json:"additional_metadata,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func unmarshalExtension(ext Extension, v any) error {
|
||||||
|
asJson, err := json.Marshal(ext)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return json.Unmarshal(asJson, v)
|
||||||
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
@ -26,6 +26,7 @@ import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
"log"
|
||||||
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"go.uploadedlobster.com/scotty/pkg/jspf"
|
"go.uploadedlobster.com/scotty/pkg/jspf"
|
||||||
|
@ -38,8 +39,8 @@ func ExampleMusicBrainzTrackExtension() {
|
||||||
Tracks: []jspf.Track{
|
Tracks: []jspf.Track{
|
||||||
{
|
{
|
||||||
Title: "Oweynagat",
|
Title: "Oweynagat",
|
||||||
Extension: map[string]any{
|
Extension: jspf.ExtensionMap{
|
||||||
jspf.MusicBrainzTrackExtensionId: jspf.MusicBrainzTrackExtension{
|
jspf.MusicBrainzTrackExtensionID: jspf.MusicBrainzTrackExtension{
|
||||||
AddedAt: time.Date(2023, 11, 24, 07, 47, 50, 0, time.UTC),
|
AddedAt: time.Date(2023, 11, 24, 07, 47, 50, 0, time.UTC),
|
||||||
AddedBy: "scotty",
|
AddedBy: "scotty",
|
||||||
},
|
},
|
||||||
|
@ -72,3 +73,29 @@ func ExampleMusicBrainzTrackExtension() {
|
||||||
// }
|
// }
|
||||||
// }
|
// }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestExtensionMapGet(t *testing.T) {
|
||||||
|
ext := jspf.ExtensionMap{
|
||||||
|
jspf.MusicBrainzTrackExtensionID: jspf.MusicBrainzTrackExtension{
|
||||||
|
AddedAt: time.Date(2023, 11, 24, 07, 47, 50, 0, time.UTC),
|
||||||
|
AddedBy: "scotty",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
var trackExt jspf.MusicBrainzTrackExtension
|
||||||
|
err := ext.Get(jspf.MusicBrainzTrackExtensionID, &trackExt)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if trackExt.AddedBy != "scotty" {
|
||||||
|
t.Fatalf("expected 'scotty', got '%s'", trackExt.AddedBy)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExtensionMapGetNotFound(t *testing.T) {
|
||||||
|
ext := jspf.ExtensionMap{}
|
||||||
|
var trackExt jspf.MusicBrainzTrackExtension
|
||||||
|
err := ext.Get(jspf.MusicBrainzTrackExtensionID, &trackExt)
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("expected ExtensionMap.Get to return an error")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
Copyright © 2023 Philipp Wolfer <phw@uploadedlobster.com>
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
@ -44,7 +44,7 @@ type Playlist struct {
|
||||||
Attribution []Attribution `json:"attribution,omitempty"`
|
Attribution []Attribution `json:"attribution,omitempty"`
|
||||||
Links []Link `json:"link,omitempty"`
|
Links []Link `json:"link,omitempty"`
|
||||||
Meta []Meta `json:"meta,omitempty"`
|
Meta []Meta `json:"meta,omitempty"`
|
||||||
Extension map[string]any `json:"extension,omitempty"`
|
Extension ExtensionMap `json:"extension,omitempty"`
|
||||||
Tracks []Track `json:"track"`
|
Tracks []Track `json:"track"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -57,10 +57,10 @@ type Track struct {
|
||||||
Info string `json:"info,omitempty"`
|
Info string `json:"info,omitempty"`
|
||||||
Album string `json:"album,omitempty"`
|
Album string `json:"album,omitempty"`
|
||||||
TrackNum int `json:"trackNum,omitempty"`
|
TrackNum int `json:"trackNum,omitempty"`
|
||||||
Duration int `json:"duration,omitempty"`
|
Duration int64 `json:"duration,omitempty"`
|
||||||
Links []Link `json:"link,omitempty"`
|
Links []Link `json:"link,omitempty"`
|
||||||
Meta []Meta `json:"meta,omitempty"`
|
Meta []Meta `json:"meta,omitempty"`
|
||||||
Extension map[string]any `json:"extension,omitempty"`
|
Extension ExtensionMap `json:"extension,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Attribution map[string]string
|
type Attribution map[string]string
|
||||||
|
|
|
@ -13,6 +13,7 @@ You should have received a copy of the GNU General Public License along with
|
||||||
Scotty. If not, see <https://www.gnu.org/licenses/>.
|
Scotty. If not, see <https://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
// Helper functions to set up rate limiting with resty.
|
||||||
package ratelimit
|
package ratelimit
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
|
286
pkg/scrobblerlog/parser.go
Normal file
286
pkg/scrobblerlog/parser.go
Normal file
|
@ -0,0 +1,286 @@
|
||||||
|
/*
|
||||||
|
Copyright © 2023-2025 Philipp Wolfer <phw@uploadedlobster.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Package to parse and write .scrobbler.log files as written by Rockbox.
|
||||||
|
//
|
||||||
|
// The parser supports reading version 1.1 and 1.0 of the scrobbler log file
|
||||||
|
// format. The latter is only supported if encoded in UTF-8.
|
||||||
|
//
|
||||||
|
// When written it always writes version 1.1 of the scrobbler log file format,
|
||||||
|
// which includes the MusicBrainz recording ID as the last field of each row.
|
||||||
|
//
|
||||||
|
// See
|
||||||
|
// - https://www.rockbox.org/wiki/LastFMLog
|
||||||
|
// - https://git.rockbox.org/cgit/rockbox.git/tree/apps/plugins/lastfm_scrobbler.c
|
||||||
|
// - https://web.archive.org/web/20110110053056/http://www.audioscrobbler.net/wiki/Portable_Player_Logging
|
||||||
|
package scrobblerlog
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"encoding/csv"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"go.uploadedlobster.com/mbtypes"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TZInfo is the timezone information in the header of the scrobbler log file.
|
||||||
|
// It can be "UTC" or "UNKNOWN", if the device writing the scrobbler log file
|
||||||
|
// knows the time, but not the timezone.
|
||||||
|
type TZInfo string
|
||||||
|
|
||||||
|
const (
|
||||||
|
TimezoneUnknown TZInfo = "UNKNOWN"
|
||||||
|
TimezoneUTC TZInfo = "UTC"
|
||||||
|
)
|
||||||
|
|
||||||
|
// L if listened at least 50% or S if skipped
|
||||||
|
type Rating string
|
||||||
|
|
||||||
|
const (
|
||||||
|
RatingListened Rating = "L"
|
||||||
|
RatingSkipped Rating = "S"
|
||||||
|
)
|
||||||
|
|
||||||
|
// A single entry of a track in the scrobbler log file.
|
||||||
|
type Record struct {
|
||||||
|
ArtistName string
|
||||||
|
AlbumName string
|
||||||
|
TrackName string
|
||||||
|
TrackNumber int
|
||||||
|
Duration time.Duration
|
||||||
|
Rating Rating
|
||||||
|
Timestamp time.Time
|
||||||
|
MusicBrainzRecordingID mbtypes.MBID
|
||||||
|
}
|
||||||
|
|
||||||
|
// Represents a scrobbler log file.
|
||||||
|
type ScrobblerLog struct {
|
||||||
|
TZ TZInfo
|
||||||
|
Client string
|
||||||
|
Records []Record
|
||||||
|
// Timezone to be used for timestamps in the log file,
|
||||||
|
// if TZ is set to [TimezoneUnknown].
|
||||||
|
FallbackTimezone *time.Location
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parses a scrobbler log file from the given reader.
|
||||||
|
//
|
||||||
|
// The reader must provide a valid scrobbler log file with a valid header.
|
||||||
|
// This function implicitly calls [ScrobblerLog.ReadHeader].
|
||||||
|
func (l *ScrobblerLog) Parse(data io.Reader, ignoreSkipped bool) error {
|
||||||
|
l.Records = make([]Record, 0)
|
||||||
|
|
||||||
|
reader := bufio.NewReader(data)
|
||||||
|
err := l.ReadHeader(reader)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
tsvReader := csv.NewReader(reader)
|
||||||
|
tsvReader.Comma = '\t'
|
||||||
|
// Row length is often flexible
|
||||||
|
tsvReader.FieldsPerRecord = -1
|
||||||
|
|
||||||
|
for {
|
||||||
|
// A row is:
|
||||||
|
// artistName releaseName trackName trackNumber duration rating timestamp recordingMBID
|
||||||
|
row, err := tsvReader.Read()
|
||||||
|
if err == io.EOF {
|
||||||
|
break
|
||||||
|
} else if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// fmt.Printf("row: %v\n", row)
|
||||||
|
|
||||||
|
// We consider only the last field (recording MBID) optional
|
||||||
|
// This was added in the 1.1 file format.
|
||||||
|
if len(row) < 7 {
|
||||||
|
line, _ := tsvReader.FieldPos(0)
|
||||||
|
return fmt.Errorf("invalid record in scrobblerlog line %v", line)
|
||||||
|
}
|
||||||
|
|
||||||
|
record, err := l.rowToRecord(row)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if ignoreSkipped && record.Rating == RatingSkipped {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
l.Records = append(l.Records, record)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Append writes the given records to the writer.
|
||||||
|
//
|
||||||
|
// The writer should be for an existing scrobbler log file or
|
||||||
|
// [ScrobblerLog.WriteHeader] should be called before this function.
|
||||||
|
// Returns the last timestamp of the records written.
|
||||||
|
func (l *ScrobblerLog) Append(data io.Writer, records []Record) (lastTimestamp time.Time, err error) {
|
||||||
|
tsvWriter := csv.NewWriter(data)
|
||||||
|
tsvWriter.Comma = '\t'
|
||||||
|
|
||||||
|
for _, record := range records {
|
||||||
|
if record.Timestamp.After(lastTimestamp) {
|
||||||
|
lastTimestamp = record.Timestamp
|
||||||
|
}
|
||||||
|
|
||||||
|
// A row is:
|
||||||
|
// artistName releaseName trackName trackNumber duration rating timestamp recordingMBID
|
||||||
|
err = tsvWriter.Write([]string{
|
||||||
|
record.ArtistName,
|
||||||
|
record.AlbumName,
|
||||||
|
record.TrackName,
|
||||||
|
strconv.Itoa(record.TrackNumber),
|
||||||
|
strconv.Itoa(int(record.Duration.Seconds())),
|
||||||
|
string(record.Rating),
|
||||||
|
strconv.FormatInt(record.Timestamp.Unix(), 10),
|
||||||
|
string(record.MusicBrainzRecordingID),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
tsvWriter.Flush()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parses just the header of a scrobbler log file from the given reader.
|
||||||
|
//
|
||||||
|
// This function sets [ScrobblerLog.TZ] and [ScrobblerLog.Client].
|
||||||
|
func (l *ScrobblerLog) ReadHeader(reader *bufio.Reader) error {
|
||||||
|
// Skip header
|
||||||
|
for i := 0; i < 3; i++ {
|
||||||
|
line, _, err := reader.ReadLine()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(line) == 0 || line[0] != '#' {
|
||||||
|
err = fmt.Errorf("unexpected header (line %v)", i)
|
||||||
|
} else {
|
||||||
|
text := string(line)
|
||||||
|
if i == 0 && !strings.HasPrefix(text, "#AUDIOSCROBBLER/1") {
|
||||||
|
err = fmt.Errorf("not a scrobbler log file")
|
||||||
|
}
|
||||||
|
|
||||||
|
// The timezone can be set to "UTC" or "UNKNOWN", if the device writing
|
||||||
|
// the log knows the time, but not the timezone.
|
||||||
|
timezone, found := strings.CutPrefix(text, "#TZ/")
|
||||||
|
if found {
|
||||||
|
l.TZ = TZInfo(timezone)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
client, found := strings.CutPrefix(text, "#CLIENT/")
|
||||||
|
if found {
|
||||||
|
l.Client = client
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Writes the header of a scrobbler log file to the given writer.
|
||||||
|
func (l *ScrobblerLog) WriteHeader(writer io.Writer) error {
|
||||||
|
headers := []string{
|
||||||
|
"#AUDIOSCROBBLER/1.1\n",
|
||||||
|
"#TZ/" + string(l.TZ) + "\n",
|
||||||
|
"#CLIENT/" + l.Client + "\n",
|
||||||
|
}
|
||||||
|
for _, line := range headers {
|
||||||
|
_, err := writer.Write([]byte(line))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l ScrobblerLog) rowToRecord(row []string) (Record, error) {
|
||||||
|
var record Record
|
||||||
|
trackNumber, err := strconv.Atoi(row[3])
|
||||||
|
if err != nil {
|
||||||
|
return record, err
|
||||||
|
}
|
||||||
|
|
||||||
|
duration, err := strconv.Atoi(row[4])
|
||||||
|
if err != nil {
|
||||||
|
return record, err
|
||||||
|
}
|
||||||
|
|
||||||
|
timestamp, err := strconv.ParseInt(row[6], 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
return record, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var timezone *time.Location = nil
|
||||||
|
if l.TZ == TimezoneUnknown {
|
||||||
|
timezone = l.FallbackTimezone
|
||||||
|
}
|
||||||
|
|
||||||
|
record = Record{
|
||||||
|
ArtistName: row[0],
|
||||||
|
AlbumName: row[1],
|
||||||
|
TrackName: row[2],
|
||||||
|
TrackNumber: trackNumber,
|
||||||
|
Duration: time.Duration(duration) * time.Second,
|
||||||
|
Rating: Rating(row[5]),
|
||||||
|
Timestamp: timeFromLocalTimestamp(timestamp, timezone),
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(row) > 7 {
|
||||||
|
record.MusicBrainzRecordingID = mbtypes.MBID(row[7])
|
||||||
|
}
|
||||||
|
|
||||||
|
return record, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert a Unix timestamp to a [time.Time] object, but treat the timestamp
|
||||||
|
// as being in the given location's timezone instead of UTC.
|
||||||
|
// If location is nil, the timestamp is returned assumed to already be in UTC
|
||||||
|
// and returned unchanged.
|
||||||
|
func timeFromLocalTimestamp(timestamp int64, location *time.Location) time.Time {
|
||||||
|
t := time.Unix(timestamp, 0)
|
||||||
|
|
||||||
|
// The time is now in UTC. Get the offset to the requested timezone
|
||||||
|
// and shift the time accordingly.
|
||||||
|
if location != nil {
|
||||||
|
_, offset := t.In(location).Zone()
|
||||||
|
if offset != 0 {
|
||||||
|
t = t.Add(-time.Duration(offset) * time.Second)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return t
|
||||||
|
}
|
|
@ -30,8 +30,8 @@ import (
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"go.uploadedlobster.com/scotty/internal/backends/scrobblerlog"
|
"go.uploadedlobster.com/mbtypes"
|
||||||
"go.uploadedlobster.com/scotty/internal/models"
|
"go.uploadedlobster.com/scotty/pkg/scrobblerlog"
|
||||||
)
|
)
|
||||||
|
|
||||||
var testScrobblerLog = `#AUDIOSCROBBLER/1.1
|
var testScrobblerLog = `#AUDIOSCROBBLER/1.1
|
||||||
|
@ -47,67 +47,84 @@ Teeth Agency You Don't Have To Live In Pain Wolfs Jam 2 107 L 1260359404 1262bea
|
||||||
func TestParser(t *testing.T) {
|
func TestParser(t *testing.T) {
|
||||||
assert := assert.New(t)
|
assert := assert.New(t)
|
||||||
data := bytes.NewBufferString(testScrobblerLog)
|
data := bytes.NewBufferString(testScrobblerLog)
|
||||||
result, err := scrobblerlog.Parse(data, true)
|
result := scrobblerlog.ScrobblerLog{}
|
||||||
|
err := result.Parse(data, false)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal("UNKNOWN", result.Timezone)
|
assert.Equal(scrobblerlog.TimezoneUnknown, result.TZ)
|
||||||
assert.Equal("Rockbox sansaclipplus $Revision$", result.Client)
|
assert.Equal("Rockbox sansaclipplus $Revision$", result.Client)
|
||||||
assert.Len(result.Listens, 5)
|
assert.Len(result.Records, 5)
|
||||||
listen1 := result.Listens[0]
|
record1 := result.Records[0]
|
||||||
assert.Equal("Özcan Deniz", listen1.ArtistName())
|
assert.Equal("Özcan Deniz", record1.ArtistName)
|
||||||
assert.Equal("Ses ve Ayrilik", listen1.ReleaseName)
|
assert.Equal("Ses ve Ayrilik", record1.AlbumName)
|
||||||
assert.Equal("Sevdanin rengi (sipacik) byMrTurkey", listen1.TrackName)
|
assert.Equal("Sevdanin rengi (sipacik) byMrTurkey", record1.TrackName)
|
||||||
assert.Equal(5, listen1.TrackNumber)
|
assert.Equal(5, record1.TrackNumber)
|
||||||
assert.Equal(time.Duration(306*time.Second), listen1.Duration)
|
assert.Equal(time.Duration(306*time.Second), record1.Duration)
|
||||||
assert.Equal("L", listen1.AdditionalInfo["rockbox_rating"])
|
assert.Equal(scrobblerlog.RatingListened, record1.Rating)
|
||||||
assert.Equal(time.Unix(1260342084, 0), listen1.ListenedAt)
|
assert.Equal(time.Unix(1260342084, 0), record1.Timestamp)
|
||||||
assert.Equal(models.MBID(""), listen1.RecordingMbid)
|
assert.Equal(mbtypes.MBID(""), record1.MusicBrainzRecordingID)
|
||||||
listen4 := result.Listens[3]
|
record4 := result.Records[3]
|
||||||
assert.Equal("S", listen4.AdditionalInfo["rockbox_rating"])
|
assert.Equal(scrobblerlog.RatingSkipped, record4.Rating)
|
||||||
assert.Equal(models.MBID("385ba9e9-626d-4750-a607-58e541dca78e"), listen4.RecordingMbid)
|
assert.Equal(mbtypes.MBID("385ba9e9-626d-4750-a607-58e541dca78e"),
|
||||||
|
record4.MusicBrainzRecordingID)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParserExcludeSkipped(t *testing.T) {
|
func TestParserIgnoreSkipped(t *testing.T) {
|
||||||
assert := assert.New(t)
|
assert := assert.New(t)
|
||||||
data := bytes.NewBufferString(testScrobblerLog)
|
data := bytes.NewBufferString(testScrobblerLog)
|
||||||
result, err := scrobblerlog.Parse(data, false)
|
result := scrobblerlog.ScrobblerLog{}
|
||||||
|
err := result.Parse(data, true)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(result.Listens, 4)
|
assert.Len(result.Records, 4)
|
||||||
listen4 := result.Listens[3]
|
record4 := result.Records[3]
|
||||||
assert.Equal("L", listen4.AdditionalInfo["rockbox_rating"])
|
assert.Equal(scrobblerlog.RatingListened, record4.Rating)
|
||||||
assert.Equal(models.MBID("1262beaf-19f8-4534-b9ed-7eef9ca8e83f"), listen4.RecordingMbid)
|
assert.Equal(mbtypes.MBID("1262beaf-19f8-4534-b9ed-7eef9ca8e83f"),
|
||||||
|
record4.MusicBrainzRecordingID)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestWrite(t *testing.T) {
|
func TestParserFallbackTimezone(t *testing.T) {
|
||||||
|
assert := assert.New(t)
|
||||||
|
data := bytes.NewBufferString(testScrobblerLog)
|
||||||
|
result := scrobblerlog.ScrobblerLog{
|
||||||
|
FallbackTimezone: time.FixedZone("UTC+2", 7200),
|
||||||
|
}
|
||||||
|
err := result.Parse(data, false)
|
||||||
|
require.NoError(t, err)
|
||||||
|
record1 := result.Records[0]
|
||||||
|
assert.Equal(
|
||||||
|
time.Unix(1260342084, 0).Add(-2*time.Hour),
|
||||||
|
record1.Timestamp,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAppend(t *testing.T) {
|
||||||
assert := assert.New(t)
|
assert := assert.New(t)
|
||||||
data := make([]byte, 0, 10)
|
data := make([]byte, 0, 10)
|
||||||
buffer := bytes.NewBuffer(data)
|
buffer := bytes.NewBuffer(data)
|
||||||
log := scrobblerlog.ScrobblerLog{
|
log := scrobblerlog.ScrobblerLog{
|
||||||
Timezone: "Unknown",
|
TZ: scrobblerlog.TimezoneUnknown,
|
||||||
Client: "Rockbox foo $Revision$",
|
Client: "Rockbox foo $Revision$",
|
||||||
Listens: []models.Listen{
|
}
|
||||||
|
records := []scrobblerlog.Record{
|
||||||
{
|
{
|
||||||
ListenedAt: time.Unix(1699572072, 0),
|
ArtistName: "Prinzhorn Dance School",
|
||||||
Track: models.Track{
|
AlbumName: "Home Economics",
|
||||||
ArtistNames: []string{"Prinzhorn Dance School"},
|
|
||||||
ReleaseName: "Home Economics",
|
|
||||||
TrackName: "Reign",
|
TrackName: "Reign",
|
||||||
TrackNumber: 1,
|
TrackNumber: 1,
|
||||||
Duration: 271 * time.Second,
|
Duration: 271 * time.Second,
|
||||||
RecordingMbid: models.MBID("b59cf4e7-caee-4019-a844-79d2c58d4dff"),
|
Rating: scrobblerlog.RatingListened,
|
||||||
AdditionalInfo: models.AdditionalInfo{"rockbox_rating": "L"},
|
Timestamp: time.Unix(1699572072, 0),
|
||||||
},
|
MusicBrainzRecordingID: mbtypes.MBID("b59cf4e7-caee-4019-a844-79d2c58d4dff"),
|
||||||
},
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
err := scrobblerlog.WriteHeader(buffer, &log)
|
err := log.WriteHeader(buffer)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
lastTimestamp, err := scrobblerlog.Write(buffer, log.Listens)
|
lastTimestamp, err := log.Append(buffer, records)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
result := buffer.String()
|
result := buffer.String()
|
||||||
lines := strings.Split(result, "\n")
|
lines := strings.Split(result, "\n")
|
||||||
assert.Equal(5, len(lines))
|
assert.Equal(5, len(lines))
|
||||||
assert.Equal("#AUDIOSCROBBLER/1.1", lines[0])
|
assert.Equal("#AUDIOSCROBBLER/1.1", lines[0])
|
||||||
assert.Equal("#TZ/Unknown", lines[1])
|
assert.Equal("#TZ/UNKNOWN", lines[1])
|
||||||
assert.Equal("#CLIENT/Rockbox foo $Revision$", lines[2])
|
assert.Equal("#CLIENT/Rockbox foo $Revision$", lines[2])
|
||||||
assert.Equal(
|
assert.Equal(
|
||||||
"Prinzhorn Dance School\tHome Economics\tReign\t1\t271\tL\t1699572072\tb59cf4e7-caee-4019-a844-79d2c58d4dff",
|
"Prinzhorn Dance School\tHome Economics\tReign\t1\t271\tL\t1699572072\tb59cf4e7-caee-4019-a844-79d2c58d4dff",
|
||||||
|
@ -120,9 +137,9 @@ func TestReadHeader(t *testing.T) {
|
||||||
data := bytes.NewBufferString(testScrobblerLog)
|
data := bytes.NewBufferString(testScrobblerLog)
|
||||||
reader := bufio.NewReader(data)
|
reader := bufio.NewReader(data)
|
||||||
log := scrobblerlog.ScrobblerLog{}
|
log := scrobblerlog.ScrobblerLog{}
|
||||||
err := scrobblerlog.ReadHeader(reader, &log)
|
err := log.ReadHeader(reader)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, log.Timezone, "UNKNOWN")
|
assert.Equal(t, log.TZ, scrobblerlog.TimezoneUnknown)
|
||||||
assert.Equal(t, log.Client, "Rockbox sansaclipplus $Revision$")
|
assert.Equal(t, log.Client, "Rockbox sansaclipplus $Revision$")
|
||||||
assert.Empty(t, log.Listens)
|
assert.Empty(t, log.Records)
|
||||||
}
|
}
|
Loading…
Add table
Add a link
Reference in a new issue