Update to new Kirika, fixed cuetools source, add replaygain, initial metadata handler
This commit is contained in:
parent
3eb36def70
commit
5d1739d4be
|
@ -155,6 +155,8 @@ module media {
|
||||||
property cddb1 -> int32;
|
property cddb1 -> int32;
|
||||||
property discid -> str;
|
property discid -> str;
|
||||||
property tocid -> str;
|
property tocid -> str;
|
||||||
|
property replayGain -> float64;
|
||||||
|
property replayPeak -> float64;
|
||||||
required multi link recordings -> Recording {
|
required multi link recordings -> Recording {
|
||||||
property number -> int32;
|
property number -> int32;
|
||||||
}
|
}
|
||||||
|
@ -171,6 +173,8 @@ module media {
|
||||||
property cuetools_crc32 -> int32;
|
property cuetools_crc32 -> int32;
|
||||||
property accurip_v1 -> int32;
|
property accurip_v1 -> int32;
|
||||||
property accurip_v2 -> int32;
|
property accurip_v2 -> int32;
|
||||||
|
property replayGain -> float64;
|
||||||
|
property replayPeak -> float64;
|
||||||
|
|
||||||
required property length -> duration;
|
required property length -> duration;
|
||||||
required property lossless -> bool;
|
required property lossless -> bool;
|
||||||
|
|
12
dbschema/migrations/00002.edgeql
Normal file
12
dbschema/migrations/00002.edgeql
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
CREATE MIGRATION m1c5m2jbnf4ngpwdwpgcf4ocsfoipunwi76olkef6emlzhn43xo2la
|
||||||
|
ONTO m1cfqatfhjy63l6shobgylrpgg7yhilkvjcuy5kucknb7odov7d6oq
|
||||||
|
{
|
||||||
|
ALTER TYPE media::Recording {
|
||||||
|
CREATE PROPERTY replayGain -> std::float64;
|
||||||
|
CREATE PROPERTY replayPeak -> std::float64;
|
||||||
|
};
|
||||||
|
ALTER TYPE media::RecordingGroup {
|
||||||
|
CREATE PROPERTY replayGain -> std::float64;
|
||||||
|
CREATE PROPERTY replayPeak -> std::float64;
|
||||||
|
};
|
||||||
|
};
|
14
go.mod
14
go.mod
|
@ -4,7 +4,8 @@ go 1.18
|
||||||
|
|
||||||
require (
|
require (
|
||||||
facette.io/natsort v0.0.0-20181210072756-2cd4dd1e2dcb
|
facette.io/natsort v0.0.0-20181210072756-2cd4dd1e2dcb
|
||||||
git.gammaspectra.live/S.O.N.G/Hibiki v0.0.0-20220216151616-63d8894466c0
|
git.gammaspectra.live/S.O.N.G/Hibiki v0.0.0-20220308142904-fa72f717703c
|
||||||
|
git.gammaspectra.live/S.O.N.G/Kirika v0.0.0-20220308124222-54bb437c0b50
|
||||||
git.gammaspectra.live/S.O.N.G/MakyuuIchaival v0.0.0-20220131114831-c08c7d9b4153
|
git.gammaspectra.live/S.O.N.G/MakyuuIchaival v0.0.0-20220131114831-c08c7d9b4153
|
||||||
git.gammaspectra.live/S.O.N.G/wikitext-parser v0.0.0-20220220212802-e21f1e249ca9
|
git.gammaspectra.live/S.O.N.G/wikitext-parser v0.0.0-20220220212802-e21f1e249ca9
|
||||||
github.com/dgraph-io/badger/v3 v3.2103.2
|
github.com/dgraph-io/badger/v3 v3.2103.2
|
||||||
|
@ -20,13 +21,16 @@ require (
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
git.gammaspectra.live/S.O.N.G/go-pus v0.0.0-20220130003320-c9b07c6bec7a // indirect
|
git.gammaspectra.live/S.O.N.G/go-ebur128 v0.0.0-20220308113719-afad5c6e5c28 // indirect
|
||||||
|
git.gammaspectra.live/S.O.N.G/go-fdkaac v0.0.0-20220228131722-e9cb84c52f48 // indirect
|
||||||
|
git.gammaspectra.live/S.O.N.G/go-pus v0.0.0-20220227175608-6cc027f24dba // indirect
|
||||||
|
git.gammaspectra.live/S.O.N.G/go-tta v0.2.1-0.20220226150007-096de1072bd6 // indirect
|
||||||
git.gammaspectra.live/S.O.N.G/goborator v0.0.0-20220201143845-faddd6ec920b // indirect
|
git.gammaspectra.live/S.O.N.G/goborator v0.0.0-20220201143845-faddd6ec920b // indirect
|
||||||
|
git.gammaspectra.live/S.O.N.G/goflac v0.0.0-20220305093419-2fd5e3285566 // indirect
|
||||||
github.com/andybalholm/brotli v1.0.4 // indirect
|
github.com/andybalholm/brotli v1.0.4 // indirect
|
||||||
github.com/cespare/xxhash v1.1.0 // indirect
|
github.com/cespare/xxhash v1.1.0 // indirect
|
||||||
github.com/cespare/xxhash/v2 v2.1.2 // indirect
|
github.com/cespare/xxhash/v2 v2.1.2 // indirect
|
||||||
github.com/cheekybits/genny v1.0.0 // indirect
|
github.com/cheekybits/genny v1.0.0 // indirect
|
||||||
github.com/cocoonlife/goflac v0.0.0-20170210142907-50ea06ed5a9d // indirect
|
|
||||||
github.com/dgraph-io/ristretto v0.1.0 // indirect
|
github.com/dgraph-io/ristretto v0.1.0 // indirect
|
||||||
github.com/dgrr/http2 v0.3.4 // indirect
|
github.com/dgrr/http2 v0.3.4 // indirect
|
||||||
github.com/dh1tw/gosamplerate v0.1.2 // indirect
|
github.com/dh1tw/gosamplerate v0.1.2 // indirect
|
||||||
|
@ -40,7 +44,10 @@ require (
|
||||||
github.com/golang/snappy v0.0.4 // indirect
|
github.com/golang/snappy v0.0.4 // indirect
|
||||||
github.com/google/flatbuffers v2.0.5+incompatible // indirect
|
github.com/google/flatbuffers v2.0.5+incompatible // indirect
|
||||||
github.com/ikawaha/kagome-dict v1.0.4 // indirect
|
github.com/ikawaha/kagome-dict v1.0.4 // indirect
|
||||||
|
github.com/jfreymuth/oggvorbis v1.0.3 // indirect
|
||||||
|
github.com/jfreymuth/vorbis v1.0.2 // indirect
|
||||||
github.com/klauspost/compress v1.14.2 // indirect
|
github.com/klauspost/compress v1.14.2 // indirect
|
||||||
|
github.com/klauspost/cpuid v1.3.1 // indirect
|
||||||
github.com/klauspost/cpuid/v2 v2.0.10 // indirect
|
github.com/klauspost/cpuid/v2 v2.0.10 // indirect
|
||||||
github.com/kvark128/minimp3 v0.0.0-20211109174940-101188771a65 // indirect
|
github.com/kvark128/minimp3 v0.0.0-20211109174940-101188771a65 // indirect
|
||||||
github.com/lucas-clemente/quic-go v0.25.0 // indirect
|
github.com/lucas-clemente/quic-go v0.25.0 // indirect
|
||||||
|
@ -62,6 +69,7 @@ require (
|
||||||
github.com/valyala/bytebufferpool v1.0.0 // indirect
|
github.com/valyala/bytebufferpool v1.0.0 // indirect
|
||||||
github.com/valyala/fasthttp v1.33.0 // indirect
|
github.com/valyala/fasthttp v1.33.0 // indirect
|
||||||
github.com/valyala/fastrand v1.1.0 // indirect
|
github.com/valyala/fastrand v1.1.0 // indirect
|
||||||
|
github.com/viert/go-lame v0.0.0-20201108052322-bb552596b11d // indirect
|
||||||
go.opencensus.io v0.23.0 // indirect
|
go.opencensus.io v0.23.0 // indirect
|
||||||
golang.org/x/crypto v0.0.0-20220131195533-30dcbda58838 // indirect
|
golang.org/x/crypto v0.0.0-20220131195533-30dcbda58838 // indirect
|
||||||
golang.org/x/mod v0.5.1 // indirect
|
golang.org/x/mod v0.5.1 // indirect
|
||||||
|
|
28
go.sum
28
go.sum
|
@ -9,14 +9,24 @@ dmitri.shuralyov.com/state v0.0.0-20180228185332-28bcc343414c/go.mod h1:0PRwlb0D
|
||||||
facette.io/natsort v0.0.0-20181210072756-2cd4dd1e2dcb h1:1pSweJFeR3Pqx7uoelppkzeegfUBXL6I2FFAbfXw570=
|
facette.io/natsort v0.0.0-20181210072756-2cd4dd1e2dcb h1:1pSweJFeR3Pqx7uoelppkzeegfUBXL6I2FFAbfXw570=
|
||||||
facette.io/natsort v0.0.0-20181210072756-2cd4dd1e2dcb/go.mod h1:npRYmtaITVom7rcSo+pRURltHSG2r4TQM1cdqJ2dUB0=
|
facette.io/natsort v0.0.0-20181210072756-2cd4dd1e2dcb/go.mod h1:npRYmtaITVom7rcSo+pRURltHSG2r4TQM1cdqJ2dUB0=
|
||||||
git.apache.org/thrift.git v0.0.0-20180902110319-2566ecd5d999/go.mod h1:fPE2ZNJGynbRyZ4dJvy6G277gSllfV2HJqblrnkyeyg=
|
git.apache.org/thrift.git v0.0.0-20180902110319-2566ecd5d999/go.mod h1:fPE2ZNJGynbRyZ4dJvy6G277gSllfV2HJqblrnkyeyg=
|
||||||
git.gammaspectra.live/S.O.N.G/Hibiki v0.0.0-20220216151616-63d8894466c0 h1:MdhCDoFatXYEyweos0PnvWaOOPhw0xs6Y448lqBRa5s=
|
git.gammaspectra.live/S.O.N.G/Hibiki v0.0.0-20220308142904-fa72f717703c h1:7WUYTVqVVR22SFKVqJM69SsQGvo3QXVEWuKoUXmZbQs=
|
||||||
git.gammaspectra.live/S.O.N.G/Hibiki v0.0.0-20220216151616-63d8894466c0/go.mod h1:/NY+4FrfPnEXNCmF16085cSGWZ89YS+Glpg4cTJhamg=
|
git.gammaspectra.live/S.O.N.G/Hibiki v0.0.0-20220308142904-fa72f717703c/go.mod h1:rJu74T30Co9pxd/vPnSu4uwnHOOfAuQVi0LMt0Yf0co=
|
||||||
|
git.gammaspectra.live/S.O.N.G/Kirika v0.0.0-20220308124222-54bb437c0b50 h1:mncq7NhkVifcjIuNZEKWQ3QtuZNopnP4MWQhAaqLeVM=
|
||||||
|
git.gammaspectra.live/S.O.N.G/Kirika v0.0.0-20220308124222-54bb437c0b50/go.mod h1:S3VhlpN5phBm/HfYqxh9Ik7ZsWj2EAO4+ZgAwX8wAk0=
|
||||||
git.gammaspectra.live/S.O.N.G/MakyuuIchaival v0.0.0-20220131114831-c08c7d9b4153 h1:RMDA05IEOytScNSiE2ms98x/CVMHSlA+eVBC0VCq4po=
|
git.gammaspectra.live/S.O.N.G/MakyuuIchaival v0.0.0-20220131114831-c08c7d9b4153 h1:RMDA05IEOytScNSiE2ms98x/CVMHSlA+eVBC0VCq4po=
|
||||||
git.gammaspectra.live/S.O.N.G/MakyuuIchaival v0.0.0-20220131114831-c08c7d9b4153/go.mod h1:z6KcP5RPhMxDJaVU48sBhiYRCJ6ZJBbx1iIhkUrrhfY=
|
git.gammaspectra.live/S.O.N.G/MakyuuIchaival v0.0.0-20220131114831-c08c7d9b4153/go.mod h1:z6KcP5RPhMxDJaVU48sBhiYRCJ6ZJBbx1iIhkUrrhfY=
|
||||||
git.gammaspectra.live/S.O.N.G/go-pus v0.0.0-20220130003320-c9b07c6bec7a h1:LxrTp9gf4w5KnFHRPFLXYfoxC58GCSEmZrHI6Ogtrm0=
|
git.gammaspectra.live/S.O.N.G/go-ebur128 v0.0.0-20220308113719-afad5c6e5c28 h1:7YLU2eyGBX8juV445KlBxW71NjFAzbRvfotZBUP16Bs=
|
||||||
git.gammaspectra.live/S.O.N.G/go-pus v0.0.0-20220130003320-c9b07c6bec7a/go.mod h1:vkoHSHVM9p6vAUmXAik0gvaLcIfiQYrD6bQqVpOulUk=
|
git.gammaspectra.live/S.O.N.G/go-ebur128 v0.0.0-20220308113719-afad5c6e5c28/go.mod h1:5H4eVW9uknpn8REFr+C3ejhvXdncgm/pbGqKGC43gFY=
|
||||||
|
git.gammaspectra.live/S.O.N.G/go-fdkaac v0.0.0-20220228131722-e9cb84c52f48 h1:MaKiBfXQl0keyfdCi1PxGOKRTiWhIs8PqCal5GhKDi0=
|
||||||
|
git.gammaspectra.live/S.O.N.G/go-fdkaac v0.0.0-20220228131722-e9cb84c52f48/go.mod h1:pkWt//S9hLVEQaJDPu/cHHPk8vPpo/0+zHy0me4LIP4=
|
||||||
|
git.gammaspectra.live/S.O.N.G/go-pus v0.0.0-20220227175608-6cc027f24dba h1:JEaxCVgdr3XXAuDCPAx7ttLFZaaHzTEzG+oRnVUtUKU=
|
||||||
|
git.gammaspectra.live/S.O.N.G/go-pus v0.0.0-20220227175608-6cc027f24dba/go.mod h1:vkoHSHVM9p6vAUmXAik0gvaLcIfiQYrD6bQqVpOulUk=
|
||||||
|
git.gammaspectra.live/S.O.N.G/go-tta v0.2.1-0.20220226150007-096de1072bd6 h1:ITVVisbHPnUclp3PBkCbXFeBhOCBcOjPdgjJ9wRH3TI=
|
||||||
|
git.gammaspectra.live/S.O.N.G/go-tta v0.2.1-0.20220226150007-096de1072bd6/go.mod h1:cobkT8u8vq/+ngLy+feKS2M2ZT2HoCec5riA/0Cex3Q=
|
||||||
git.gammaspectra.live/S.O.N.G/goborator v0.0.0-20220201143845-faddd6ec920b h1:h7+SZUINAMVCY5h3E5UFT64GLaI+tJ3V758e9inPyeA=
|
git.gammaspectra.live/S.O.N.G/goborator v0.0.0-20220201143845-faddd6ec920b h1:h7+SZUINAMVCY5h3E5UFT64GLaI+tJ3V758e9inPyeA=
|
||||||
git.gammaspectra.live/S.O.N.G/goborator v0.0.0-20220201143845-faddd6ec920b/go.mod h1:ySjuueqe5HUqvf7lWS51Cy5UP2tgJWsezOv8UIm2arA=
|
git.gammaspectra.live/S.O.N.G/goborator v0.0.0-20220201143845-faddd6ec920b/go.mod h1:ySjuueqe5HUqvf7lWS51Cy5UP2tgJWsezOv8UIm2arA=
|
||||||
|
git.gammaspectra.live/S.O.N.G/goflac v0.0.0-20220305093419-2fd5e3285566 h1:nhnwjyaAydpSU3UADA9BRJmwpmJ8UlffxvBDuHC1T+8=
|
||||||
|
git.gammaspectra.live/S.O.N.G/goflac v0.0.0-20220305093419-2fd5e3285566/go.mod h1:/po1QgOh3xynbvi4sxdY6Iw8m5WPJfGGmry2boZD8fs=
|
||||||
git.gammaspectra.live/S.O.N.G/wikitext-parser v0.0.0-20220220212802-e21f1e249ca9 h1:lIiSlBlge43zULALq20yrd4Ern1XejSRaIwWQlsc4uM=
|
git.gammaspectra.live/S.O.N.G/wikitext-parser v0.0.0-20220220212802-e21f1e249ca9 h1:lIiSlBlge43zULALq20yrd4Ern1XejSRaIwWQlsc4uM=
|
||||||
git.gammaspectra.live/S.O.N.G/wikitext-parser v0.0.0-20220220212802-e21f1e249ca9/go.mod h1:WRXSVczbEaJc+qb8f8C9ZLi4naQl32HS0WK/eccO9Hk=
|
git.gammaspectra.live/S.O.N.G/wikitext-parser v0.0.0-20220220212802-e21f1e249ca9/go.mod h1:WRXSVczbEaJc+qb8f8C9ZLi4naQl32HS0WK/eccO9Hk=
|
||||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||||
|
@ -40,8 +50,6 @@ github.com/cheekybits/genny v1.0.0 h1:uGGa4nei+j20rOSeDeP5Of12XVm7TGUd4dJA9RDitf
|
||||||
github.com/cheekybits/genny v1.0.0/go.mod h1:+tQajlRqAUrPI7DOSpB0XAqZYtQakVtB7wXkRAgjxjQ=
|
github.com/cheekybits/genny v1.0.0/go.mod h1:+tQajlRqAUrPI7DOSpB0XAqZYtQakVtB7wXkRAgjxjQ=
|
||||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||||
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
||||||
github.com/cocoonlife/goflac v0.0.0-20170210142907-50ea06ed5a9d h1:utj98F6D5jVv2tHYMsYzM6Z5sG71/W12Ivkd/SnFiN0=
|
|
||||||
github.com/cocoonlife/goflac v0.0.0-20170210142907-50ea06ed5a9d/go.mod h1:swNVb00X8NOH/qeHuqnqiyfecAnWlThLX+NbH8r6yHw=
|
|
||||||
github.com/cocoonlife/testify v0.0.0-20160218172820-792cc1faeb64 h1:LjPYdzoFSAJ5Tr/ElL8kzTJghXgpnOjJVbgd1UvZB1o=
|
github.com/cocoonlife/testify v0.0.0-20160218172820-792cc1faeb64 h1:LjPYdzoFSAJ5Tr/ElL8kzTJghXgpnOjJVbgd1UvZB1o=
|
||||||
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
||||||
github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk=
|
github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk=
|
||||||
|
@ -151,6 +159,10 @@ github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANyt
|
||||||
github.com/ipfs/go-cid v0.1.0 h1:YN33LQulcRHjfom/i25yoOZR4Telp1Hr/2RU3d0PnC0=
|
github.com/ipfs/go-cid v0.1.0 h1:YN33LQulcRHjfom/i25yoOZR4Telp1Hr/2RU3d0PnC0=
|
||||||
github.com/ipfs/go-cid v0.1.0/go.mod h1:rH5/Xv83Rfy8Rw6xG+id3DYAMUVmem1MowoKwdXmN2o=
|
github.com/ipfs/go-cid v0.1.0/go.mod h1:rH5/Xv83Rfy8Rw6xG+id3DYAMUVmem1MowoKwdXmN2o=
|
||||||
github.com/jellevandenhooff/dkim v0.0.0-20150330215556-f50fe3d243e1/go.mod h1:E0B/fFc00Y+Rasa88328GlI/XbtyysCtTHZS8h7IrBU=
|
github.com/jellevandenhooff/dkim v0.0.0-20150330215556-f50fe3d243e1/go.mod h1:E0B/fFc00Y+Rasa88328GlI/XbtyysCtTHZS8h7IrBU=
|
||||||
|
github.com/jfreymuth/oggvorbis v1.0.3 h1:MLNGGyhOMiVcvea9Dp5+gbs2SAwqwQbtrWnonYa0M0Y=
|
||||||
|
github.com/jfreymuth/oggvorbis v1.0.3/go.mod h1:1U4pqWmghcoVsCJJ4fRBKv9peUJMBHixthRlBeD6uII=
|
||||||
|
github.com/jfreymuth/vorbis v1.0.2 h1:m1xH6+ZI4thH927pgKD8JOH4eaGRm18rEE9/0WKjvNE=
|
||||||
|
github.com/jfreymuth/vorbis v1.0.2/go.mod h1:DoftRo4AznKnShRl1GxiTFCseHr4zR9BN3TWXyuzrqQ=
|
||||||
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
|
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
|
||||||
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
||||||
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
||||||
|
@ -160,6 +172,8 @@ github.com/klauspost/compress v1.12.3/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8
|
||||||
github.com/klauspost/compress v1.14.1/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
github.com/klauspost/compress v1.14.1/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||||
github.com/klauspost/compress v1.14.2 h1:S0OHlFk/Gbon/yauFJ4FfJJF5V0fc5HbBTJazi28pRw=
|
github.com/klauspost/compress v1.14.2 h1:S0OHlFk/Gbon/yauFJ4FfJJF5V0fc5HbBTJazi28pRw=
|
||||||
github.com/klauspost/compress v1.14.2/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
github.com/klauspost/compress v1.14.2/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||||
|
github.com/klauspost/cpuid v1.3.1 h1:5JNjFYYQrZeKRJ0734q51WCEEn2huer72Dc7K+R/b6s=
|
||||||
|
github.com/klauspost/cpuid v1.3.1/go.mod h1:bYW4mA6ZgKPob1/Dlai2LviZJO7KGI3uoWLd42rAQw4=
|
||||||
github.com/klauspost/cpuid/v2 v2.0.4/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
github.com/klauspost/cpuid/v2 v2.0.4/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||||
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||||
github.com/klauspost/cpuid/v2 v2.0.10 h1:fv5GKR+e2UgD+gcxQECVT5rBwAmlFLl2mkKm7WK3ODY=
|
github.com/klauspost/cpuid/v2 v2.0.10 h1:fv5GKR+e2UgD+gcxQECVT5rBwAmlFLl2mkKm7WK3ODY=
|
||||||
|
@ -303,6 +317,8 @@ github.com/valyala/fastrand v1.1.0/go.mod h1:HWqCzkrkg6QXT8V2EXWvXCoow7vLwOFN002
|
||||||
github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc=
|
github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc=
|
||||||
github.com/viant/assertly v0.4.8/go.mod h1:aGifi++jvCrUaklKEKT0BU95igDNaqkvz+49uaYMPRU=
|
github.com/viant/assertly v0.4.8/go.mod h1:aGifi++jvCrUaklKEKT0BU95igDNaqkvz+49uaYMPRU=
|
||||||
github.com/viant/toolbox v0.24.0/go.mod h1:OxMCG57V0PXuIP2HNQrtJf2CjqdmbrOx5EkMILuUhzM=
|
github.com/viant/toolbox v0.24.0/go.mod h1:OxMCG57V0PXuIP2HNQrtJf2CjqdmbrOx5EkMILuUhzM=
|
||||||
|
github.com/viert/go-lame v0.0.0-20201108052322-bb552596b11d h1:LptdD7GTUZeklomtW5vZ1AHwBvDBUCZ2Ftpaz7uEI7g=
|
||||||
|
github.com/viert/go-lame v0.0.0-20201108052322-bb552596b11d/go.mod h1:EqTcYM7y4JlSfeTI47pmNu3EZQuCuLQefsQyg1Imlz8=
|
||||||
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
|
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
|
||||||
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||||
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||||
|
|
|
@ -1,19 +1,21 @@
|
||||||
package metadata
|
package handler
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/binary"
|
"encoding/binary"
|
||||||
"facette.io/natsort"
|
"facette.io/natsort"
|
||||||
"fmt"
|
"fmt"
|
||||||
"git.gammaspectra.live/S.O.N.G/Hibiki/panako"
|
"git.gammaspectra.live/S.O.N.G/Hibiki/panako"
|
||||||
"git.gammaspectra.live/S.O.N.G/Hibiki/utilities/audio"
|
|
||||||
"git.gammaspectra.live/S.O.N.G/Hibiki/utilities/audio/format/flac"
|
|
||||||
"git.gammaspectra.live/S.O.N.G/Hibiki/utilities/audio/format/mp3"
|
|
||||||
"git.gammaspectra.live/S.O.N.G/Hibiki/utilities/audio/format/opus"
|
|
||||||
"git.gammaspectra.live/S.O.N.G/Hibiki/utilities/specializedstore"
|
"git.gammaspectra.live/S.O.N.G/Hibiki/utilities/specializedstore"
|
||||||
|
"git.gammaspectra.live/S.O.N.G/Kirika/audio"
|
||||||
|
"git.gammaspectra.live/S.O.N.G/Kirika/audio/format"
|
||||||
|
"git.gammaspectra.live/S.O.N.G/Kirika/audio/format/guess"
|
||||||
|
"git.gammaspectra.live/S.O.N.G/Kirika/audio/replaygain"
|
||||||
|
"git.gammaspectra.live/S.O.N.G/Kirika/hasher"
|
||||||
|
"git.gammaspectra.live/S.O.N.G/METANOIA/metadata"
|
||||||
"git.gammaspectra.live/S.O.N.G/METANOIA/utilities"
|
"git.gammaspectra.live/S.O.N.G/METANOIA/utilities"
|
||||||
"github.com/dhowden/tag"
|
"github.com/dhowden/tag"
|
||||||
"github.com/oriser/regroup"
|
"github.com/oriser/regroup"
|
||||||
"golang.org/x/text/unicode/norm"
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"log"
|
"log"
|
||||||
"os"
|
"os"
|
||||||
|
@ -29,10 +31,6 @@ import (
|
||||||
|
|
||||||
type fileEntryList []fileEntry
|
type fileEntryList []fileEntry
|
||||||
|
|
||||||
var flacFormat = flac.NewFormat()
|
|
||||||
var mp3Format = mp3.NewFormat()
|
|
||||||
var opusFormat = opus.NewFormat()
|
|
||||||
|
|
||||||
const separatorTrimSet = ",.-_()[]{}"
|
const separatorTrimSet = ",.-_()[]{}"
|
||||||
|
|
||||||
func isSeparator(b byte) bool {
|
func isSeparator(b byte) bool {
|
||||||
|
@ -56,25 +54,33 @@ type analyzeEntry struct {
|
||||||
channels int
|
channels int
|
||||||
samples int
|
samples int
|
||||||
}
|
}
|
||||||
|
replayGain struct {
|
||||||
|
albumGain float64
|
||||||
|
albumPeak float64
|
||||||
|
trackGain float64
|
||||||
|
trackPeak float64
|
||||||
|
}
|
||||||
fileMetadata tag.Metadata
|
fileMetadata tag.Metadata
|
||||||
panakoFingerprints []*panako.Fingerprint
|
panakoFingerprints []*panako.Fingerprint
|
||||||
hasherCrc32 *Hasher
|
hasherCrc32 *hasher.Hasher
|
||||||
hasherCueToolsCrc32 *Hasher
|
hasherCueToolsCrc32 *hasher.Hasher
|
||||||
hasherAccurateRipV1 *Hasher
|
hasherAccurateRipV1 *hasher.Hasher
|
||||||
hasherAccurateRipV2 *Hasher
|
hasherAccurateRipV2 *hasher.Hasher
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l fileEntryList) analyze(directory string, panakoInstance *panako.Instance) (entries []*analyzeEntry, fullCRC32 uint32, fullCTDBCRC32 uint32) {
|
func (l fileEntryList) analyze(directory string, panakoInstance *panako.Instance) (entries []*analyzeEntry, fullCRC32 uint32, fullCTDBCRC32 uint32) {
|
||||||
var waitGroups []*sync.WaitGroup
|
var waitGroups []*sync.WaitGroup
|
||||||
|
|
||||||
printStrategy := panakoInstance.GetStrategy(specializedstore.NewMemoryStore(), audio.RESAMPLER_QUALITY_LINEAR)
|
printStrategy := panakoInstance.GetStrategy(specializedstore.NewMemoryStore(), audio.Linear)
|
||||||
|
|
||||||
var joinedCTDBChannels []HasherChannel
|
var joinedCTDBChannels []format.AnalyzerChannel
|
||||||
var joinedChannels []HasherChannel
|
var joinedChannels []format.AnalyzerChannel
|
||||||
|
|
||||||
var preLastTotalSamplesWaitGroup sync.WaitGroup
|
var preLastTotalSamplesWaitGroup sync.WaitGroup
|
||||||
preLastTotalSamples := uint32(0)
|
preLastTotalSamples := uint32(0)
|
||||||
|
|
||||||
|
var replayGainSources []audio.Source
|
||||||
|
|
||||||
for trackIndex, e := range l {
|
for trackIndex, e := range l {
|
||||||
f, err := os.Open(path.Join(directory, e.Name))
|
f, err := os.Open(path.Join(directory, e.Name))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -83,27 +89,27 @@ func (l fileEntryList) analyze(directory string, panakoInstance *panako.Instance
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
var stream *audio.Stream
|
var source audio.Source
|
||||||
var analyzer HasherChannel
|
var analyzer format.AnalyzerChannel
|
||||||
|
|
||||||
meta, err := tag.ReadFrom(f)
|
meta, err := tag.ReadFrom(f)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Print(err)
|
log.Print(err)
|
||||||
err = nil
|
err = nil
|
||||||
}
|
}
|
||||||
f.Seek(0, 0)
|
f.Seek(0, io.SeekStart)
|
||||||
|
|
||||||
switch utilities.GetMimeTypeFromExtension(path.Ext(e.Name)) {
|
decoders, err := guess.GetDecoders(f, f.Name())
|
||||||
case "audio/flac":
|
if err != nil { //cannot decode
|
||||||
stream, analyzer, err = flacFormat.OpenAnalyzer(f, panakoInstance.BlockSize)
|
//TODO
|
||||||
|
log.Print(err)
|
||||||
|
f.Close()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
case "audio/mpeg;codecs=mp3":
|
if source, analyzer, err = guess.OpenAnalyzer(f, decoders); err != nil || source.Blocks == nil {
|
||||||
stream, err = mp3Format.Open(f, panakoInstance.BlockSize)
|
analyzer = nil
|
||||||
|
source, err = guess.Open(f, decoders)
|
||||||
case "audio/ogg":
|
|
||||||
fallthrough
|
|
||||||
case "audio/opus":
|
|
||||||
stream, err = opusFormat.Open(f, panakoInstance.BlockSize)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil { //cannot decode
|
if err != nil { //cannot decode
|
||||||
|
@ -113,7 +119,7 @@ func (l fileEntryList) analyze(directory string, panakoInstance *panako.Instance
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if stream == nil { //no known decoder
|
if source.Blocks == nil { //no known decoder
|
||||||
//TODO
|
//TODO
|
||||||
log.Print(fmt.Errorf("no known decoder for %s", f.Name()))
|
log.Print(fmt.Errorf("no known decoder for %s", f.Name()))
|
||||||
f.Close()
|
f.Close()
|
||||||
|
@ -126,51 +132,67 @@ func (l fileEntryList) analyze(directory string, panakoInstance *panako.Instance
|
||||||
fileMetadata: meta,
|
fileMetadata: meta,
|
||||||
}
|
}
|
||||||
|
|
||||||
var panakoWaitGroup sync.WaitGroup
|
sources := source.Split(3)
|
||||||
panakoWaitGroup.Add(1)
|
|
||||||
|
var sinkWaitGroup sync.WaitGroup
|
||||||
|
|
||||||
|
sinkWaitGroup.Add(1)
|
||||||
preLastTotalSamplesWaitGroup.Add(1)
|
preLastTotalSamplesWaitGroup.Add(1)
|
||||||
go func(add bool) {
|
go func(add bool) {
|
||||||
defer panakoWaitGroup.Done()
|
defer sinkWaitGroup.Done()
|
||||||
defer preLastTotalSamplesWaitGroup.Done()
|
defer preLastTotalSamplesWaitGroup.Done()
|
||||||
entry.panakoFingerprints = printStrategy.StreamToFingerprints(stream)
|
|
||||||
entry.audioMetadata.sampleRate = int(stream.GetSampleRate())
|
entry.audioMetadata.sampleRate = sources[0].SampleRate
|
||||||
entry.audioMetadata.channels = stream.GetChannels()
|
entry.audioMetadata.channels = sources[0].Channels
|
||||||
entry.audioMetadata.samples = stream.GetSamplesProcessed()
|
var samples int
|
||||||
|
for block := range sources[0].Blocks {
|
||||||
|
samples += len(block) / sources[0].Channels
|
||||||
|
}
|
||||||
|
entry.audioMetadata.samples = samples
|
||||||
if add {
|
if add {
|
||||||
atomic.AddUint32(&preLastTotalSamples, uint32(entry.audioMetadata.samples/entry.audioMetadata.channels))
|
atomic.AddUint32(&preLastTotalSamples, uint32(samples))
|
||||||
}
|
}
|
||||||
}(trackIndex < len(l)-1)
|
}(trackIndex < len(l)-1)
|
||||||
|
|
||||||
|
sinkWaitGroup.Add(1)
|
||||||
|
go func(add bool) {
|
||||||
|
defer sinkWaitGroup.Done()
|
||||||
|
entry.panakoFingerprints = printStrategy.BlockChannelToFingerprints(sources[1].Blocks)
|
||||||
|
}(trackIndex < len(l)-1)
|
||||||
|
|
||||||
|
replayGainSources = append(replayGainSources, sources[2])
|
||||||
|
|
||||||
//TODO: handle extra appended/prepended silence
|
//TODO: handle extra appended/prepended silence
|
||||||
|
|
||||||
if analyzer != nil {
|
if analyzer != nil {
|
||||||
if trackIndex == 0 {
|
if trackIndex == 0 {
|
||||||
channels := analyzer.Split(4)
|
channels := analyzer.Split(4)
|
||||||
joinedChannels = append(joinedChannels, channels[0])
|
joinedChannels = append(joinedChannels, channels[0])
|
||||||
ctChannels := channels[1].SkipStartSamples(Int16SamplesPerSector * 10).Split(2)
|
ctChannels := channels[1].SkipStartSamples(metadata.Int16SamplesPerSector * 10).Split(2)
|
||||||
joinedCTDBChannels = append(joinedCTDBChannels, ctChannels[0])
|
joinedCTDBChannels = append(joinedCTDBChannels, ctChannels[0])
|
||||||
entry.hasherCueToolsCrc32 = NewHasher(ctChannels[1], HashtypeCrc32)
|
entry.hasherCueToolsCrc32 = hasher.NewHasher(ctChannels[1], hasher.HashtypeCrc32)
|
||||||
arChannels := channels[2].SkipStartSamples(Int16SamplesPerSector*5 - 1).Split(2)
|
arChannels := channels[2].SkipStartSamples(metadata.Int16SamplesPerSector*5 - 1).Split(2)
|
||||||
entry.hasherAccurateRipV1 = NewHasher(arChannels[0], HashtypeAccurateRipV1Start)
|
entry.hasherAccurateRipV1 = hasher.NewHasher(arChannels[0], hasher.HashtypeAccurateRipV1Start)
|
||||||
entry.hasherAccurateRipV2 = NewHasher(arChannels[1], HashtypeAccurateRipV2Start)
|
entry.hasherAccurateRipV2 = hasher.NewHasher(arChannels[1], hasher.HashtypeAccurateRipV2Start)
|
||||||
entry.hasherCrc32 = NewHasher(channels[3], HashtypeCrc32)
|
entry.hasherCrc32 = hasher.NewHasher(channels[3], hasher.HashtypeCrc32)
|
||||||
} else if trackIndex == len(l)-1 {
|
} else if trackIndex == len(l)-1 {
|
||||||
|
|
||||||
channels := analyzer.Split(4)
|
channels := analyzer.Split(4)
|
||||||
joinedChannels = append(joinedChannels, channels[0])
|
joinedChannels = append(joinedChannels, channels[0])
|
||||||
ctChannels := channels[1].SkipEndSamplesMultiple(&preLastTotalSamplesWaitGroup, &preLastTotalSamples, Int16SamplesPerSector*10).Split(2)
|
ctChannels := channels[1].SkipEndSamplesMultiple(&preLastTotalSamplesWaitGroup, &preLastTotalSamples, metadata.Int16SamplesPerSector*10).Split(2)
|
||||||
joinedCTDBChannels = append(joinedCTDBChannels, ctChannels[0])
|
joinedCTDBChannels = append(joinedCTDBChannels, ctChannels[0])
|
||||||
entry.hasherCueToolsCrc32 = NewHasher(ctChannels[1], HashtypeCrc32)
|
entry.hasherCueToolsCrc32 = hasher.NewHasher(ctChannels[1], hasher.HashtypeCrc32)
|
||||||
arChannels := channels[2].SkipEndSamples(Int16SamplesPerSector * 5).Split(2)
|
arChannels := channels[2].SkipEndSamples(metadata.Int16SamplesPerSector * 5).Split(2)
|
||||||
entry.hasherAccurateRipV1 = NewHasher(arChannels[0], HashtypeAccurateRipV1)
|
entry.hasherAccurateRipV1 = hasher.NewHasher(arChannels[0], hasher.HashtypeAccurateRipV1)
|
||||||
entry.hasherAccurateRipV2 = NewHasher(arChannels[1], HashtypeAccurateRipV2)
|
entry.hasherAccurateRipV2 = hasher.NewHasher(arChannels[1], hasher.HashtypeAccurateRipV2)
|
||||||
entry.hasherCrc32 = NewHasher(channels[3], HashtypeCrc32)
|
entry.hasherCrc32 = hasher.NewHasher(channels[3], hasher.HashtypeCrc32)
|
||||||
} else {
|
} else {
|
||||||
channels := analyzer.Split(5)
|
channels := analyzer.Split(5)
|
||||||
joinedChannels = append(joinedChannels, channels[0])
|
joinedChannels = append(joinedChannels, channels[0])
|
||||||
joinedCTDBChannels = append(joinedCTDBChannels, channels[1])
|
joinedCTDBChannels = append(joinedCTDBChannels, channels[1])
|
||||||
entry.hasherCrc32 = NewHasher(channels[2], HashtypeCrc32)
|
entry.hasherCrc32 = hasher.NewHasher(channels[2], hasher.HashtypeCrc32)
|
||||||
entry.hasherAccurateRipV1 = NewHasher(channels[3], HashtypeAccurateRipV1)
|
entry.hasherAccurateRipV1 = hasher.NewHasher(channels[3], hasher.HashtypeAccurateRipV1)
|
||||||
entry.hasherAccurateRipV2 = NewHasher(channels[4], HashtypeAccurateRipV2)
|
entry.hasherAccurateRipV2 = hasher.NewHasher(channels[4], hasher.HashtypeAccurateRipV2)
|
||||||
}
|
}
|
||||||
|
|
||||||
waitGroups = append(waitGroups, entry.hasherCrc32.GetWaitGroup(), entry.hasherAccurateRipV1.GetWaitGroup(), entry.hasherAccurateRipV2.GetWaitGroup())
|
waitGroups = append(waitGroups, entry.hasherCrc32.GetWaitGroup(), entry.hasherAccurateRipV1.GetWaitGroup(), entry.hasherAccurateRipV2.GetWaitGroup())
|
||||||
|
@ -179,13 +201,32 @@ func (l fileEntryList) analyze(directory string, panakoInstance *panako.Instance
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
waitGroups = append(waitGroups, &panakoWaitGroup)
|
waitGroups = append(waitGroups, &sinkWaitGroup)
|
||||||
entries = append(entries, entry)
|
entries = append(entries, entry)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fullHasher := NewHasher(MergeHasherChannels(joinedChannels...), HashtypeCrc32)
|
var rgwg sync.WaitGroup
|
||||||
fullCTDBHasher := NewHasher(MergeHasherChannels(joinedCTDBChannels...), HashtypeCrc32)
|
rgwg.Add(1)
|
||||||
|
go func() {
|
||||||
|
defer rgwg.Done()
|
||||||
|
albumGain, albumPeak, trackGains, trackPeaks, err := replaygain.GetAlbumReplayGain(replayGainSources)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, e := range entries {
|
||||||
|
e.replayGain.albumGain = albumGain
|
||||||
|
e.replayGain.albumPeak = albumPeak
|
||||||
|
e.replayGain.trackGain = trackGains[i]
|
||||||
|
e.replayGain.trackPeak = trackPeaks[i]
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
waitGroups = append(waitGroups, &rgwg)
|
||||||
|
|
||||||
|
fullHasher := hasher.NewHasher(format.MergeHasherChannels(joinedChannels...), hasher.HashtypeCrc32)
|
||||||
|
fullCTDBHasher := hasher.NewHasher(format.MergeHasherChannels(joinedCTDBChannels...), hasher.HashtypeCrc32)
|
||||||
|
|
||||||
fullHasher.Wait()
|
fullHasher.Wait()
|
||||||
fullCTDBHasher.Wait()
|
fullCTDBHasher.Wait()
|
||||||
|
@ -213,17 +254,7 @@ func processAudioFiles(files []string) (result fileEntryList) {
|
||||||
result = make(fileEntryList, 0, len(files))
|
result = make(fileEntryList, 0, len(files))
|
||||||
|
|
||||||
for _, f := range files {
|
for _, f := range files {
|
||||||
normalized := norm.NFC.String(f)
|
normalized := utilities.NormalizeUnicode(f)
|
||||||
normalized = strings.ReplaceAll(normalized, "0", "0")
|
|
||||||
normalized = strings.ReplaceAll(normalized, "1", "1")
|
|
||||||
normalized = strings.ReplaceAll(normalized, "2", "2")
|
|
||||||
normalized = strings.ReplaceAll(normalized, "3", "3")
|
|
||||||
normalized = strings.ReplaceAll(normalized, "4", "4")
|
|
||||||
normalized = strings.ReplaceAll(normalized, "5", "5")
|
|
||||||
normalized = strings.ReplaceAll(normalized, "6", "6")
|
|
||||||
normalized = strings.ReplaceAll(normalized, "7", "7")
|
|
||||||
normalized = strings.ReplaceAll(normalized, "8", "8")
|
|
||||||
normalized = strings.ReplaceAll(normalized, "9", "9")
|
|
||||||
|
|
||||||
ext := strings.LastIndex(normalized, ".")
|
ext := strings.LastIndex(normalized, ".")
|
||||||
for k := 0; k < ext; k++ {
|
for k := 0; k < ext; k++ {
|
||||||
|
@ -270,13 +301,13 @@ func processAudioFiles(files []string) (result fileEntryList) {
|
||||||
}
|
}
|
||||||
|
|
||||||
type DiscHandlerResult struct {
|
type DiscHandlerResult struct {
|
||||||
TOC TOC
|
TOC metadata.TOC
|
||||||
CRC32 uint32
|
CRC32 uint32
|
||||||
CueToolsCRC32 uint32
|
CueToolsCRC32 uint32
|
||||||
Directory string
|
Directory string
|
||||||
Tracks []DiscHandlerTrack
|
Tracks []DiscHandlerTrack
|
||||||
CommonMetadata map[string]string
|
CommonMetadata map[string]string
|
||||||
Identifiers []Name
|
Identifiers []metadata.Name
|
||||||
Album string
|
Album string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -293,12 +324,13 @@ type DiscHandlerTrack struct {
|
||||||
}
|
}
|
||||||
FileMetadata struct {
|
FileMetadata struct {
|
||||||
DiscNumber int
|
DiscNumber int
|
||||||
Artists []Name
|
Artists []metadata.Name
|
||||||
Album string
|
Album string
|
||||||
Year int
|
Year int
|
||||||
TrackNumber int
|
TrackNumber int
|
||||||
Title string
|
Title string
|
||||||
OriginalTitle string
|
OriginalTitle string
|
||||||
|
Lyrics string
|
||||||
EmbeddedPicture []byte
|
EmbeddedPicture []byte
|
||||||
}
|
}
|
||||||
AudioMetadata struct {
|
AudioMetadata struct {
|
||||||
|
@ -349,7 +381,7 @@ func HandleDiscEntry(panakoInstance *panako.Instance, pathEntry string) *DiscHan
|
||||||
|
|
||||||
disc := &DiscHandlerResult{
|
disc := &DiscHandlerResult{
|
||||||
Directory: pathEntry,
|
Directory: pathEntry,
|
||||||
TOC: TOC{TocPregap},
|
TOC: metadata.TOC{metadata.TocPregap},
|
||||||
CommonMetadata: make(map[string]string),
|
CommonMetadata: make(map[string]string),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -372,11 +404,11 @@ func HandleDiscEntry(panakoInstance *panako.Instance, pathEntry string) *DiscHan
|
||||||
}
|
}
|
||||||
track.AudioMetadata.SampleRate = entry.audioMetadata.sampleRate
|
track.AudioMetadata.SampleRate = entry.audioMetadata.sampleRate
|
||||||
track.AudioMetadata.Channels = entry.audioMetadata.channels
|
track.AudioMetadata.Channels = entry.audioMetadata.channels
|
||||||
track.AudioMetadata.NumberOfFullSamples = entry.audioMetadata.samples / entry.audioMetadata.channels
|
track.AudioMetadata.NumberOfFullSamples = entry.audioMetadata.samples
|
||||||
track.AudioMetadata.Duration = time.Duration(float64(time.Second) * float64(track.AudioMetadata.NumberOfFullSamples) / float64(track.AudioMetadata.SampleRate))
|
track.AudioMetadata.Duration = time.Duration(float64(time.Second) * float64(track.AudioMetadata.NumberOfFullSamples) / float64(track.AudioMetadata.SampleRate))
|
||||||
track.Fingerprints.Panako = entry.panakoFingerprints
|
track.Fingerprints.Panako = entry.panakoFingerprints
|
||||||
|
|
||||||
disc.TOC = append(disc.TOC, disc.TOC[len(disc.TOC)-1]+track.AudioMetadata.NumberOfFullSamples/Int16SamplesPerSector)
|
disc.TOC = append(disc.TOC, disc.TOC[len(disc.TOC)-1]+track.AudioMetadata.NumberOfFullSamples/metadata.Int16SamplesPerSector)
|
||||||
|
|
||||||
if entry.hasherCrc32 != nil {
|
if entry.hasherCrc32 != nil {
|
||||||
track.Fingerprints.CRC32 = binary.BigEndian.Uint32(entry.hasherCrc32.GetResult())
|
track.Fingerprints.CRC32 = binary.BigEndian.Uint32(entry.hasherCrc32.GetResult())
|
||||||
|
@ -396,19 +428,19 @@ func HandleDiscEntry(panakoInstance *panako.Instance, pathEntry string) *DiscHan
|
||||||
track.FileMetadata.TrackNumber, _ = entry.fileMetadata.Track()
|
track.FileMetadata.TrackNumber, _ = entry.fileMetadata.Track()
|
||||||
track.FileMetadata.Year = entry.fileMetadata.Year()
|
track.FileMetadata.Year = entry.fileMetadata.Year()
|
||||||
if entry.fileMetadata.Artist() != "" {
|
if entry.fileMetadata.Artist() != "" {
|
||||||
track.FileMetadata.Artists = append(track.FileMetadata.Artists, Name{
|
track.FileMetadata.Artists = append(track.FileMetadata.Artists, metadata.Name{
|
||||||
Kind: "artist",
|
Kind: "artist",
|
||||||
Name: entry.fileMetadata.Artist(),
|
Name: entry.fileMetadata.Artist(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if entry.fileMetadata.AlbumArtist() != "" {
|
if entry.fileMetadata.AlbumArtist() != "" {
|
||||||
track.FileMetadata.Artists = append(track.FileMetadata.Artists, Name{
|
track.FileMetadata.Artists = append(track.FileMetadata.Artists, metadata.Name{
|
||||||
Kind: "albumartist",
|
Kind: "albumartist",
|
||||||
Name: entry.fileMetadata.AlbumArtist(),
|
Name: entry.fileMetadata.AlbumArtist(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if entry.fileMetadata.Composer() != "" {
|
if entry.fileMetadata.Composer() != "" {
|
||||||
track.FileMetadata.Artists = append(track.FileMetadata.Artists, Name{
|
track.FileMetadata.Artists = append(track.FileMetadata.Artists, metadata.Name{
|
||||||
Kind: "composer",
|
Kind: "composer",
|
||||||
Name: entry.fileMetadata.Composer(),
|
Name: entry.fileMetadata.Composer(),
|
||||||
})
|
})
|
||||||
|
@ -426,43 +458,43 @@ func HandleDiscEntry(panakoInstance *panako.Instance, pathEntry string) *DiscHan
|
||||||
}
|
}
|
||||||
if ok && len(str) > 0 {
|
if ok && len(str) > 0 {
|
||||||
if k == "mastering" {
|
if k == "mastering" {
|
||||||
track.FileMetadata.Artists = append(track.FileMetadata.Artists, Name{
|
track.FileMetadata.Artists = append(track.FileMetadata.Artists, metadata.Name{
|
||||||
Kind: "mastering",
|
Kind: "mastering",
|
||||||
Name: str,
|
Name: str,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if k == "lyricist" {
|
if k == "lyricist" {
|
||||||
track.FileMetadata.Artists = append(track.FileMetadata.Artists, Name{
|
track.FileMetadata.Artists = append(track.FileMetadata.Artists, metadata.Name{
|
||||||
Kind: "lyrics",
|
Kind: "lyrics",
|
||||||
Name: str,
|
Name: str,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if k == "guitar" {
|
if k == "guitar" {
|
||||||
track.FileMetadata.Artists = append(track.FileMetadata.Artists, Name{
|
track.FileMetadata.Artists = append(track.FileMetadata.Artists, metadata.Name{
|
||||||
Kind: "performer, guitar",
|
Kind: "performer, guitar",
|
||||||
Name: str,
|
Name: str,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if k == "arrange" {
|
if k == "arrange" {
|
||||||
track.FileMetadata.Artists = append(track.FileMetadata.Artists, Name{
|
track.FileMetadata.Artists = append(track.FileMetadata.Artists, metadata.Name{
|
||||||
Kind: "arranger",
|
Kind: "arranger",
|
||||||
Name: str,
|
Name: str,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if k == "vocal" {
|
if k == "vocal" {
|
||||||
track.FileMetadata.Artists = append(track.FileMetadata.Artists, Name{
|
track.FileMetadata.Artists = append(track.FileMetadata.Artists, metadata.Name{
|
||||||
Kind: "vocals",
|
Kind: "vocals",
|
||||||
Name: str,
|
Name: str,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if k == "chorus" {
|
if k == "chorus" {
|
||||||
track.FileMetadata.Artists = append(track.FileMetadata.Artists, Name{
|
track.FileMetadata.Artists = append(track.FileMetadata.Artists, metadata.Name{
|
||||||
Kind: "vocals",
|
Kind: "vocals",
|
||||||
Name: str,
|
Name: str,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if k == "performer" {
|
if k == "performer" {
|
||||||
track.FileMetadata.Artists = append(track.FileMetadata.Artists, Name{
|
track.FileMetadata.Artists = append(track.FileMetadata.Artists, metadata.Name{
|
||||||
Kind: "performer",
|
Kind: "performer",
|
||||||
Name: str,
|
Name: str,
|
||||||
})
|
})
|
||||||
|
@ -470,6 +502,9 @@ func HandleDiscEntry(panakoInstance *panako.Instance, pathEntry string) *DiscHan
|
||||||
if k == "originaltitle" {
|
if k == "originaltitle" {
|
||||||
track.FileMetadata.OriginalTitle = str
|
track.FileMetadata.OriginalTitle = str
|
||||||
}
|
}
|
||||||
|
if k == "unsyncedlyrics" {
|
||||||
|
track.FileMetadata.Lyrics = str
|
||||||
|
}
|
||||||
|
|
||||||
value, exists := disc.CommonMetadata[k]
|
value, exists := disc.CommonMetadata[k]
|
||||||
if !exists {
|
if !exists {
|
||||||
|
@ -503,17 +538,36 @@ func HandleDiscEntry(panakoInstance *panako.Instance, pathEntry string) *DiscHan
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
catno, ok := disc.CommonMetadata["catalogid"]
|
if catno, ok := disc.CommonMetadata["catalogid"]; ok {
|
||||||
if ok {
|
for _, n := range strings.Split(catno, ";") {
|
||||||
disc.Identifiers = append(disc.Identifiers, Name{
|
disc.Identifiers = append(disc.Identifiers, metadata.Name{
|
||||||
Kind: "catalog",
|
Kind: "catalog",
|
||||||
Name: catno,
|
Name: n,
|
||||||
})
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if catno, ok := disc.CommonMetadata["catalognumber"]; ok {
|
||||||
|
for _, n := range strings.Split(catno, ";") {
|
||||||
|
disc.Identifiers = append(disc.Identifiers, metadata.Name{
|
||||||
|
Kind: "catalog",
|
||||||
|
Name: n,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if catno, ok := disc.CommonMetadata["labelno"]; ok {
|
||||||
|
for _, n := range strings.Split(catno, ";") {
|
||||||
|
disc.Identifiers = append(disc.Identifiers, metadata.Name{
|
||||||
|
Kind: "catalog",
|
||||||
|
Name: n,
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
album, ok := disc.CommonMetadata["TALB"] //ID3v2
|
album, ok := disc.CommonMetadata["TALB"] //ID3v2
|
||||||
if ok {
|
if ok {
|
||||||
disc.Identifiers = append(disc.Identifiers, Name{
|
disc.Identifiers = append(disc.Identifiers, metadata.Name{
|
||||||
Kind: "album",
|
Kind: "album",
|
||||||
Name: album,
|
Name: album,
|
||||||
})
|
})
|
||||||
|
@ -522,7 +576,7 @@ func HandleDiscEntry(panakoInstance *panako.Instance, pathEntry string) *DiscHan
|
||||||
|
|
||||||
album, ok = disc.CommonMetadata["album"]
|
album, ok = disc.CommonMetadata["album"]
|
||||||
if ok {
|
if ok {
|
||||||
disc.Identifiers = append(disc.Identifiers, Name{
|
disc.Identifiers = append(disc.Identifiers, metadata.Name{
|
||||||
Kind: "album",
|
Kind: "album",
|
||||||
Name: album,
|
Name: album,
|
||||||
})
|
})
|
||||||
|
@ -531,7 +585,7 @@ func HandleDiscEntry(panakoInstance *panako.Instance, pathEntry string) *DiscHan
|
||||||
|
|
||||||
discid, ok := disc.CommonMetadata["discid"]
|
discid, ok := disc.CommonMetadata["discid"]
|
||||||
if ok {
|
if ok {
|
||||||
disc.Identifiers = append(disc.Identifiers, Name{
|
disc.Identifiers = append(disc.Identifiers, metadata.Name{
|
||||||
Kind: "discid",
|
Kind: "discid",
|
||||||
Name: discid,
|
Name: discid,
|
||||||
})
|
})
|
||||||
|
@ -539,37 +593,37 @@ func HandleDiscEntry(panakoInstance *panako.Instance, pathEntry string) *DiscHan
|
||||||
|
|
||||||
cdtoc, ok := disc.CommonMetadata["cdtoc"]
|
cdtoc, ok := disc.CommonMetadata["cdtoc"]
|
||||||
if ok {
|
if ok {
|
||||||
toc := TOC{}
|
toc := metadata.TOC{}
|
||||||
for _, v := range strings.Split(cdtoc, "+")[1:] {
|
for _, v := range strings.Split(cdtoc, "+")[1:] {
|
||||||
number, err := strconv.ParseInt(v, 16, 0)
|
number, err := strconv.ParseInt(v, 16, 0)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
toc = TOC{}
|
toc = metadata.TOC{}
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
toc = append(toc, int(number))
|
toc = append(toc, int(number))
|
||||||
}
|
}
|
||||||
if len(toc) > 0 {
|
if len(toc) > 0 {
|
||||||
toc = append(TOC{toc[len(toc)-1]}, toc[0:len(toc)-1]...)
|
toc = append(metadata.TOC{toc[len(toc)-1]}, toc[0:len(toc)-1]...)
|
||||||
disc.Identifiers = append(disc.Identifiers, Name{
|
disc.Identifiers = append(disc.Identifiers, metadata.Name{
|
||||||
Kind: "toc",
|
Kind: "toc",
|
||||||
Name: toc.String(),
|
Name: toc.String(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
catalogRE := regroup.MustCompile(`(?i)[\[\(\{](?P<catno>(?:[a-z]{2,}-?[0-9][a-z0-9\-~~]*)|(?:[0-9 ]{5,}))[\}\)\]]`)
|
catalogRE := regroup.MustCompile(`(?i)[\[\(\{](?P<catno>(?:[a-z]{2,}-?[0-9][a-z0-9\-~]*)|(?:[0-9 ]{5,}))[\}\)\]]`)
|
||||||
m := &struct {
|
m := &struct {
|
||||||
CatalogNumber string `regroup:"catno"`
|
CatalogNumber string `regroup:"catno"`
|
||||||
}{}
|
}{}
|
||||||
err = catalogRE.MatchToTarget(disc.Directory, m)
|
err = catalogRE.MatchToTarget(utilities.NormalizeUnicode(disc.Directory), m)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
disc.Identifiers = append(disc.Identifiers, Name{
|
disc.Identifiers = append(disc.Identifiers, metadata.Name{
|
||||||
Kind: "catalog",
|
Kind: "catalog",
|
||||||
Name: strings.ReplaceAll(m.CatalogNumber, "~", "~"),
|
Name: m.CatalogNumber,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
disc.TOC = append(TOC{disc.TOC[len(disc.TOC)-1]}, disc.TOC[0:len(disc.TOC)-1]...)
|
disc.TOC = append(metadata.TOC{disc.TOC[len(disc.TOC)-1]}, disc.TOC[0:len(disc.TOC)-1]...)
|
||||||
|
|
||||||
return disc
|
return disc
|
||||||
}
|
}
|
65
handler/metadata.go
Normal file
65
handler/metadata.go
Normal file
|
@ -0,0 +1,65 @@
|
||||||
|
package handler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.gammaspectra.live/S.O.N.G/METANOIA/metadata"
|
||||||
|
accuraterip_com "git.gammaspectra.live/S.O.N.G/METANOIA/metadata/accuraterip.com"
|
||||||
|
cuetools_net "git.gammaspectra.live/S.O.N.G/METANOIA/metadata/cuetools.net"
|
||||||
|
en_touhouwiki_net "git.gammaspectra.live/S.O.N.G/METANOIA/metadata/en.touhouwiki.net"
|
||||||
|
musicbrainz_org "git.gammaspectra.live/S.O.N.G/METANOIA/metadata/musicbrainz.org"
|
||||||
|
thwiki_cc "git.gammaspectra.live/S.O.N.G/METANOIA/metadata/thwiki.cc"
|
||||||
|
vgmdb_net "git.gammaspectra.live/S.O.N.G/METANOIA/metadata/vgmdb.net"
|
||||||
|
)
|
||||||
|
|
||||||
|
var metadataSources = []metadata.SourceMetadata{
|
||||||
|
accuraterip_com.NewSource(),
|
||||||
|
cuetools_net.NewSource(),
|
||||||
|
musicbrainz_org.NewSource(),
|
||||||
|
vgmdb_net.NewSource(),
|
||||||
|
thwiki_cc.NewSource(),
|
||||||
|
en_touhouwiki_net.NewSource(),
|
||||||
|
}
|
||||||
|
|
||||||
|
func SearchMetadata(disc *DiscHandlerResult, fuzzy bool) []*metadata.Album {
|
||||||
|
|
||||||
|
var albums []*metadata.Album
|
||||||
|
|
||||||
|
for _, source := range metadataSources {
|
||||||
|
var foundAlbums []*metadata.Album
|
||||||
|
if tocSource, ok := source.(metadata.TOCSource); ok {
|
||||||
|
foundAlbums = tocSource.FindByTOC(disc.TOC)
|
||||||
|
}
|
||||||
|
if cddb1Source, ok := source.(metadata.CDDB1Source); (fuzzy || len(foundAlbums) == 0) && ok {
|
||||||
|
foundAlbums = cddb1Source.FindByCDDB1(disc.TOC.GetCDDB1())
|
||||||
|
if fuzzy || len(foundAlbums) == 0 {
|
||||||
|
for _, id := range disc.Identifiers {
|
||||||
|
if id.Kind == "cddb1" {
|
||||||
|
foundAlbums = append(foundAlbums, cddb1Source.FindByCDDB1(metadata.NewCDDB1FromString(id.Name))...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if catalogSource, ok := source.(metadata.CatalogSource); (fuzzy || len(foundAlbums) == 0) && ok {
|
||||||
|
var catalogNumbers []metadata.CatalogNumber
|
||||||
|
for _, id := range disc.Identifiers {
|
||||||
|
if id.Kind == "catalog" {
|
||||||
|
catalogNumbers = append(catalogNumbers, metadata.CatalogNumber(id.Name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//TODO: also search combined number (aka EX-1000~11 or EX-1234~6)
|
||||||
|
for _, catno := range catalogNumbers {
|
||||||
|
foundAlbums = append(foundAlbums, catalogSource.FindByCatalogNumber(catno)...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if nameSource, ok := source.(metadata.AlbumNameSource); (fuzzy || len(foundAlbums) == 0) && ok && len(disc.Album) > 0 {
|
||||||
|
nameSource.FindByAlbumNames([]metadata.Name{
|
||||||
|
{Kind: "original", Name: disc.Album},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
albums = append(albums, foundAlbums...)
|
||||||
|
}
|
||||||
|
|
||||||
|
return albums
|
||||||
|
|
||||||
|
}
|
|
@ -1,48 +1,103 @@
|
||||||
package metadata
|
package metadata
|
||||||
|
|
||||||
import "time"
|
import (
|
||||||
|
"git.gammaspectra.live/S.O.N.G/METANOIA/utilities"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
type Album struct {
|
type Album struct {
|
||||||
License License
|
License License
|
||||||
SourceUniqueIdentifier string
|
SourceUniqueIdentifier string
|
||||||
Name []Name
|
Name NameSlice
|
||||||
Roles []Role
|
Roles RoleSlice
|
||||||
Art []Name
|
Art NameSlice
|
||||||
Identifiers []Name
|
Identifiers NameSlice
|
||||||
Tags []Name
|
Tags NameSlice
|
||||||
Links []Link
|
Links []Link
|
||||||
Discs []Disc
|
Discs []Disc
|
||||||
ReleaseDate time.Time
|
ReleaseDate time.Time
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (n *Album) Normalize() {
|
||||||
|
n.Name.Normalize()
|
||||||
|
n.Identifiers.Normalize()
|
||||||
|
n.Roles.Normalize()
|
||||||
|
n.Art.Normalize()
|
||||||
|
n.Tags.Normalize()
|
||||||
|
for i := range n.Discs {
|
||||||
|
n.Discs[i].Normalize()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
type Disc struct {
|
type Disc struct {
|
||||||
//TODO: add name?
|
Name NameSlice
|
||||||
Name []Name
|
Identifiers NameSlice
|
||||||
Identifiers []Name
|
|
||||||
Links []Link
|
Links []Link
|
||||||
Tracks []Track
|
Tracks []Track
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (n *Disc) Normalize() {
|
||||||
|
n.Name.Normalize()
|
||||||
|
n.Identifiers.Normalize()
|
||||||
|
for i := range n.Tracks {
|
||||||
|
n.Tracks[i].Normalize()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
type Track struct {
|
type Track struct {
|
||||||
Name []Name
|
Name NameSlice
|
||||||
Roles []Role
|
Roles RoleSlice
|
||||||
Links []Link
|
Links []Link
|
||||||
Duration time.Duration
|
Duration time.Duration
|
||||||
Lyrics LyricGetter
|
Lyrics LyricGetter
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (n *Track) Normalize() {
|
||||||
|
n.Name.Normalize()
|
||||||
|
n.Roles.Normalize()
|
||||||
|
}
|
||||||
|
|
||||||
|
type RoleSlice []Role
|
||||||
|
|
||||||
|
func (s RoleSlice) Normalize() {
|
||||||
|
for i := range s {
|
||||||
|
s[i].Normalize()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
type Role struct {
|
type Role struct {
|
||||||
Kind string
|
Kind string
|
||||||
Name []Name
|
Name NameSlice
|
||||||
Group string
|
Group string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (n *Role) Normalize() {
|
||||||
|
n.Name.Normalize()
|
||||||
|
n.Group = utilities.NormalizeUnicode(n.Group)
|
||||||
|
}
|
||||||
|
|
||||||
type Link struct {
|
type Link struct {
|
||||||
Kind string
|
Kind string
|
||||||
Name []Name
|
Name NameSlice
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *Link) Normalize() {
|
||||||
|
n.Name.Normalize()
|
||||||
|
}
|
||||||
|
|
||||||
|
type NameSlice []Name
|
||||||
|
|
||||||
|
func (s NameSlice) Normalize() {
|
||||||
|
for i := range s {
|
||||||
|
s[i].Normalize()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type Name struct {
|
type Name struct {
|
||||||
Kind string
|
Kind string
|
||||||
Name string
|
Name string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (n *Name) Normalize() {
|
||||||
|
n.Name = utilities.NormalizeUnicode(n.Name)
|
||||||
|
}
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
package metadata
|
package metadata
|
||||||
|
|
||||||
type CatalogNumber string
|
type CatalogNumber string
|
||||||
|
|
||||||
|
//TODO: split/merge functions (aka EX-1000~11 or EX-1234~6)
|
||||||
|
|
|
@ -171,48 +171,51 @@ func (s *Source) FindByTOC(toc metadata.TOC) (albums []*metadata.Album) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/*
|
|
||||||
if len(f.Metadata) > 0 {
|
|
||||||
for _, e := range f.Metadata {
|
|
||||||
if e.Source == "musicbrainz" {
|
|
||||||
album := f.GetMusicbrainzAlbums()
|
|
||||||
if album != nil {
|
|
||||||
if len(album.Discs) >= e.DiscNumber {
|
|
||||||
var identifiers []metadata.Name
|
|
||||||
for i := range f.Entries {
|
|
||||||
identifiers = append(identifiers, metadata.Name{
|
|
||||||
Kind: "toc",
|
|
||||||
Name: tocs[i].String(),
|
|
||||||
})
|
|
||||||
identifiers = append(identifiers, metadata.Name{
|
|
||||||
Kind: "cddb1",
|
|
||||||
Name: tocs[i].GetCDDB1().String(),
|
|
||||||
})
|
|
||||||
identifiers = append(identifiers, metadata.Name{
|
|
||||||
Kind: "tocid",
|
|
||||||
Name: string(tocs[i].GetTocID()),
|
|
||||||
})
|
|
||||||
identifiers = append(identifiers, metadata.Name{
|
|
||||||
Kind: "discid",
|
|
||||||
Name: string(tocs[i].GetDiscID()),
|
|
||||||
})
|
|
||||||
identifiers = append(identifiers, names[i]...)
|
|
||||||
}
|
|
||||||
|
|
||||||
for ti := range album.Discs[e.DiscNumber-1].Tracks {
|
if len(f.Metadata) > 0 {
|
||||||
album.Discs[e.DiscNumber-1].Tracks[ti].Links = append(album.Discs[e.DiscNumber-1].Tracks[ti].Links, trackCRC[ti]...)
|
for _, e := range f.Metadata {
|
||||||
}
|
if e.Source == "musicbrainz" {
|
||||||
album.Discs[e.DiscNumber-1].Identifiers = append(album.Discs[e.DiscNumber-1].Identifiers, identifiers...)
|
album := musicbrainz_org.NewSource().GetRelease(e.Id)
|
||||||
|
if album != nil {
|
||||||
|
if len(album.Discs) >= e.DiscNumber {
|
||||||
|
var identifiers []metadata.Name
|
||||||
|
for i := range f.Entries {
|
||||||
|
identifiers = append(identifiers, metadata.Name{
|
||||||
|
Kind: "toc",
|
||||||
|
Name: tocs[i].String(),
|
||||||
|
})
|
||||||
|
identifiers = append(identifiers, metadata.Name{
|
||||||
|
Kind: "cddb1",
|
||||||
|
Name: tocs[i].GetCDDB1().String(),
|
||||||
|
})
|
||||||
|
identifiers = append(identifiers, metadata.Name{
|
||||||
|
Kind: "tocid",
|
||||||
|
Name: string(tocs[i].GetTocID()),
|
||||||
|
})
|
||||||
|
identifiers = append(identifiers, metadata.Name{
|
||||||
|
Kind: "discid",
|
||||||
|
Name: string(tocs[i].GetDiscID()),
|
||||||
|
})
|
||||||
|
identifiers = append(identifiers, names[i]...)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for ti := range album.Discs[e.DiscNumber-1].Tracks {
|
||||||
|
album.Discs[e.DiscNumber-1].Tracks[ti].Links = append(album.Discs[e.DiscNumber-1].Tracks[ti].Links, trackCRC[ti]...)
|
||||||
|
}
|
||||||
|
album.Discs[e.DiscNumber-1].Identifiers = append(album.Discs[e.DiscNumber-1].Identifiers, identifiers...)
|
||||||
|
|
||||||
albums = append(albums, album)
|
albums = append(albums, album)
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
*/
|
if len(albums) == 0 {
|
||||||
|
//TODO fallback
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
|
@ -276,7 +276,7 @@ func (s *Source) GetSongLyrics(lyricsName string) (result []*metadata.TextLyrics
|
||||||
|
|
||||||
if len(l.Entries.Kanji) > 0 {
|
if len(l.Entries.Kanji) > 0 {
|
||||||
result = append(result, &metadata.TextLyrics{
|
result = append(result, &metadata.TextLyrics{
|
||||||
Language: "japanese",
|
Language: "original", //TODO: detect original language
|
||||||
Identifiers: identifiers,
|
Identifiers: identifiers,
|
||||||
Entries: l.Entries.Kanji,
|
Entries: l.Entries.Kanji,
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,82 +0,0 @@
|
||||||
package metadata
|
|
||||||
|
|
||||||
import (
|
|
||||||
"hash"
|
|
||||||
"unsafe"
|
|
||||||
)
|
|
||||||
|
|
||||||
type accurateRipDigestV1 struct {
|
|
||||||
crc uint32
|
|
||||||
pos uint32
|
|
||||||
offset uint32
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewAccurateRipV1(offset uint32) hash.Hash32 {
|
|
||||||
return &accurateRipDigestV1{0, offset + 1, offset}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *accurateRipDigestV1) Size() int { return 4 }
|
|
||||||
|
|
||||||
func (d *accurateRipDigestV1) BlockSize() int { return 1 }
|
|
||||||
|
|
||||||
func (d *accurateRipDigestV1) Reset() { d.crc = 0; d.pos = d.offset + 1 }
|
|
||||||
|
|
||||||
func (d *accurateRipDigestV1) Sum32() uint32 { return d.crc }
|
|
||||||
|
|
||||||
func (d *accurateRipDigestV1) Sum(in []byte) []byte {
|
|
||||||
s := d.Sum32()
|
|
||||||
return append(in, byte(s>>24), byte(s>>16), byte(s>>8), byte(s))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *accurateRipDigestV1) Write(p []byte) (n int, err error) {
|
|
||||||
numWords := uintptr(len(p)) * unsafe.Sizeof(p[0]) / unsafe.Sizeof(uint32(0))
|
|
||||||
words := unsafe.Slice((*uint32)(unsafe.Pointer(&p[0])), numWords)
|
|
||||||
|
|
||||||
for _, w := range words {
|
|
||||||
//this can wrap
|
|
||||||
d.crc += d.pos * w
|
|
||||||
d.pos++
|
|
||||||
}
|
|
||||||
|
|
||||||
return len(p), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type accurateRipDigestV2 struct {
|
|
||||||
crc uint32
|
|
||||||
multiplier uint32
|
|
||||||
offset uint32
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewAccurateRipV2(offset uint32) hash.Hash32 {
|
|
||||||
return &accurateRipDigestV2{0, offset + 1, offset}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *accurateRipDigestV2) Size() int { return 4 }
|
|
||||||
|
|
||||||
func (d *accurateRipDigestV2) BlockSize() int { return 1 }
|
|
||||||
|
|
||||||
func (d *accurateRipDigestV2) Reset() { d.crc = 0; d.multiplier = d.offset + 1 }
|
|
||||||
|
|
||||||
func (d *accurateRipDigestV2) Sum32() uint32 { return d.crc }
|
|
||||||
|
|
||||||
func (d *accurateRipDigestV2) Sum(in []byte) []byte {
|
|
||||||
s := d.Sum32()
|
|
||||||
return append(in, byte(s>>24), byte(s>>16), byte(s>>8), byte(s))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *accurateRipDigestV2) Write(p []byte) (n int, err error) {
|
|
||||||
numWords := uintptr(len(p)) * unsafe.Sizeof(p[0]) / unsafe.Sizeof(uint32(0))
|
|
||||||
words := unsafe.Slice((*uint32)(unsafe.Pointer(&p[0])), numWords)
|
|
||||||
|
|
||||||
for _, w := range words {
|
|
||||||
crcNew := uint64(w) * uint64(d.multiplier)
|
|
||||||
LO := crcNew & 0xFFFFFFFF
|
|
||||||
HI := crcNew / 0x100000000
|
|
||||||
//this can wrap
|
|
||||||
d.crc += uint32(HI)
|
|
||||||
d.crc += uint32(LO)
|
|
||||||
d.multiplier++
|
|
||||||
}
|
|
||||||
|
|
||||||
return len(p), nil
|
|
||||||
}
|
|
|
@ -1,362 +0,0 @@
|
||||||
package metadata
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/binary"
|
|
||||||
"git.gammaspectra.live/S.O.N.G/Hibiki/utilities/audio/format"
|
|
||||||
"github.com/minio/sha256-simd"
|
|
||||||
"hash"
|
|
||||||
"hash/crc32"
|
|
||||||
"sync"
|
|
||||||
"sync/atomic"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
const chanBuf = 16
|
|
||||||
|
|
||||||
type HasherChannel chan *format.AnalyzerPacket
|
|
||||||
|
|
||||||
func (c HasherChannel) Split(n int) (channels []HasherChannel) {
|
|
||||||
channels = make([]HasherChannel, n)
|
|
||||||
for i := range channels {
|
|
||||||
channels[i] = make(HasherChannel, chanBuf)
|
|
||||||
}
|
|
||||||
|
|
||||||
go func() {
|
|
||||||
defer func() {
|
|
||||||
for _, channel := range channels {
|
|
||||||
close(channel)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
for packet := range c {
|
|
||||||
for _, channel := range channels {
|
|
||||||
channel <- packet
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c HasherChannel) PrependGap(samples, sampleRate, channels, bitDepth int) (channel HasherChannel) {
|
|
||||||
return MergeHasherChannels(NewHasherAudioGap(samples, sampleRate, channels, bitDepth), c)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c HasherChannel) AppendGap(samples, sampleRate, channels, bitDepth int) (channel HasherChannel) {
|
|
||||||
return MergeHasherChannels(c, NewHasherAudioGap(samples, sampleRate, channels, bitDepth))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c HasherChannel) SkipStartSamples(samples int) (channel HasherChannel) {
|
|
||||||
channel = make(HasherChannel, chanBuf)
|
|
||||||
go func() {
|
|
||||||
defer close(channel)
|
|
||||||
|
|
||||||
for samples > 0 {
|
|
||||||
packet, ok := <-c
|
|
||||||
if !ok {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(packet.Samples)/packet.Channels > samples {
|
|
||||||
startIndex := samples * packet.Channels
|
|
||||||
channel <- &format.AnalyzerPacket{
|
|
||||||
Samples: packet.Samples[startIndex:],
|
|
||||||
Channels: packet.Channels,
|
|
||||||
SampleRate: packet.SampleRate,
|
|
||||||
BitDepth: packet.BitDepth,
|
|
||||||
}
|
|
||||||
samples = 0
|
|
||||||
break
|
|
||||||
} else {
|
|
||||||
samples -= len(packet.Samples) / packet.Channels
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for packet := range c {
|
|
||||||
channel <- packet
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c HasherChannel) SkipEndSamples(samples int) (channel HasherChannel) {
|
|
||||||
channel = make(HasherChannel, chanBuf)
|
|
||||||
go func() {
|
|
||||||
defer close(channel)
|
|
||||||
|
|
||||||
var buffer []*format.AnalyzerPacket
|
|
||||||
bufferSamples := 0
|
|
||||||
|
|
||||||
for packet := range c {
|
|
||||||
for len(buffer) > 0 && (bufferSamples-len(buffer[0].Samples)/buffer[0].Channels) > samples {
|
|
||||||
channel <- buffer[0]
|
|
||||||
bufferSamples -= len(buffer[0].Samples) / buffer[0].Channels
|
|
||||||
buffer = buffer[1:]
|
|
||||||
}
|
|
||||||
|
|
||||||
bufferSamples += len(packet.Samples) / packet.Channels
|
|
||||||
buffer = append(buffer, packet)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, packet := range buffer {
|
|
||||||
//TODO: check this
|
|
||||||
leftSamples := bufferSamples - len(packet.Samples)/packet.Channels
|
|
||||||
|
|
||||||
if leftSamples <= samples {
|
|
||||||
endIndex := len(packet.Samples) - (samples-leftSamples)*packet.Channels
|
|
||||||
channel <- &format.AnalyzerPacket{
|
|
||||||
Samples: packet.Samples[:endIndex],
|
|
||||||
Channels: packet.Channels,
|
|
||||||
SampleRate: packet.SampleRate,
|
|
||||||
BitDepth: packet.BitDepth,
|
|
||||||
}
|
|
||||||
samples = 0
|
|
||||||
break
|
|
||||||
} else {
|
|
||||||
channel <- packet
|
|
||||||
bufferSamples -= len(packet.Samples) / packet.Channels
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c HasherChannel) SkipEndSamplesMultiple(wg *sync.WaitGroup, offset *uint32, samples int) (channel HasherChannel) {
|
|
||||||
channel = make(HasherChannel, chanBuf)
|
|
||||||
go func() {
|
|
||||||
defer close(channel)
|
|
||||||
|
|
||||||
var buffer []*format.AnalyzerPacket
|
|
||||||
bufferSamples := 0
|
|
||||||
|
|
||||||
maxSamples := samples * 2
|
|
||||||
|
|
||||||
samplesRead := 0
|
|
||||||
for packet := range c {
|
|
||||||
for len(buffer) > 0 && (bufferSamples-len(buffer[0].Samples)/buffer[0].Channels) > maxSamples {
|
|
||||||
channel <- buffer[0]
|
|
||||||
samplesRead += len(buffer[0].Samples) / buffer[0].Channels
|
|
||||||
bufferSamples -= len(buffer[0].Samples) / buffer[0].Channels
|
|
||||||
buffer = buffer[1:]
|
|
||||||
}
|
|
||||||
|
|
||||||
bufferSamples += len(packet.Samples) / packet.Channels
|
|
||||||
buffer = append(buffer, packet)
|
|
||||||
}
|
|
||||||
|
|
||||||
wg.Wait()
|
|
||||||
totalSampleOffset := samplesRead + int(atomic.LoadUint32(offset))
|
|
||||||
|
|
||||||
if len(buffer) > 0 {
|
|
||||||
p := &format.AnalyzerPacket{
|
|
||||||
Channels: buffer[0].Channels,
|
|
||||||
SampleRate: buffer[0].SampleRate,
|
|
||||||
BitDepth: buffer[0].BitDepth,
|
|
||||||
}
|
|
||||||
for _, packet := range buffer {
|
|
||||||
p.Samples = append(p.Samples, packet.Samples...)
|
|
||||||
}
|
|
||||||
nsamples := samples + (((len(p.Samples) / p.Channels) + totalSampleOffset) % samples)
|
|
||||||
|
|
||||||
if len(p.Samples)/p.Channels > nsamples {
|
|
||||||
endIndex := len(p.Samples) - nsamples*p.Channels
|
|
||||||
channel <- &format.AnalyzerPacket{
|
|
||||||
Samples: p.Samples[:endIndex],
|
|
||||||
Channels: p.Channels,
|
|
||||||
SampleRate: p.SampleRate,
|
|
||||||
BitDepth: p.BitDepth,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewHasherAudioGap(samples, sampleRate, channels, bitDepth int) (channel HasherChannel) {
|
|
||||||
channel = make(HasherChannel, 1)
|
|
||||||
channel <- &format.AnalyzerPacket{
|
|
||||||
Samples: make([]int32, samples*channels),
|
|
||||||
Channels: channels,
|
|
||||||
SampleRate: sampleRate,
|
|
||||||
BitDepth: bitDepth,
|
|
||||||
}
|
|
||||||
close(channel)
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func MergeHasherChannels(channels ...HasherChannel) (channel HasherChannel) {
|
|
||||||
channel = make(HasherChannel, chanBuf)
|
|
||||||
|
|
||||||
go func() {
|
|
||||||
defer close(channel)
|
|
||||||
|
|
||||||
for _, c := range channels {
|
|
||||||
for packet := range c {
|
|
||||||
channel <- packet
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
type HashType int
|
|
||||||
|
|
||||||
const (
|
|
||||||
HashtypeCrc32 = HashType(iota)
|
|
||||||
HashtypeSha256
|
|
||||||
HashtypeAccurateRipV1
|
|
||||||
HashtypeAccurateRipV1Start
|
|
||||||
HashtypeAccurateRipV2
|
|
||||||
HashtypeAccurateRipV2Start
|
|
||||||
)
|
|
||||||
|
|
||||||
type Hasher struct {
|
|
||||||
hash HashType
|
|
||||||
hasher hash.Hash
|
|
||||||
result []byte
|
|
||||||
channel HasherChannel
|
|
||||||
wg sync.WaitGroup
|
|
||||||
samples int
|
|
||||||
duration float64
|
|
||||||
sampleRate int
|
|
||||||
bitDepth int
|
|
||||||
channels int
|
|
||||||
buffer [][]int32
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewHasher(channel HasherChannel, hashType HashType) (h *Hasher) {
|
|
||||||
h = &Hasher{
|
|
||||||
hash: hashType,
|
|
||||||
channel: channel,
|
|
||||||
}
|
|
||||||
|
|
||||||
switch hashType {
|
|
||||||
case HashtypeCrc32:
|
|
||||||
h.hasher = crc32.NewIEEE()
|
|
||||||
case HashtypeSha256:
|
|
||||||
h.hasher = sha256.New()
|
|
||||||
case HashtypeAccurateRipV1:
|
|
||||||
h.hasher = NewAccurateRipV1(0)
|
|
||||||
case HashtypeAccurateRipV1Start:
|
|
||||||
h.hasher = NewAccurateRipV1(Int16SamplesPerSector*5 - 1)
|
|
||||||
case HashtypeAccurateRipV2:
|
|
||||||
h.hasher = NewAccurateRipV2(0)
|
|
||||||
case HashtypeAccurateRipV2Start:
|
|
||||||
h.hasher = NewAccurateRipV2(Int16SamplesPerSector*5 - 1)
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
h.startRoutine()
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Hasher) startRoutine() {
|
|
||||||
h.wg.Add(1)
|
|
||||||
go func() {
|
|
||||||
defer h.wg.Done()
|
|
||||||
|
|
||||||
for packet := range h.channel {
|
|
||||||
h.handlePacket(packet)
|
|
||||||
}
|
|
||||||
|
|
||||||
h.result = h.hasher.Sum([]byte{})
|
|
||||||
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Hasher) handlePacket(packet *format.AnalyzerPacket) {
|
|
||||||
samples := len(packet.Samples) / packet.Channels
|
|
||||||
|
|
||||||
h.samples += samples
|
|
||||||
|
|
||||||
if h.sampleRate == 0 {
|
|
||||||
h.sampleRate = packet.SampleRate
|
|
||||||
} else if h.sampleRate != packet.SampleRate {
|
|
||||||
h.sampleRate = -1
|
|
||||||
}
|
|
||||||
if h.bitDepth == 0 {
|
|
||||||
h.bitDepth = packet.BitDepth
|
|
||||||
} else if h.bitDepth != packet.BitDepth {
|
|
||||||
h.bitDepth = -1
|
|
||||||
}
|
|
||||||
if h.channels == 0 {
|
|
||||||
h.channels = packet.Channels
|
|
||||||
} else if h.channels != packet.Channels {
|
|
||||||
h.channels = -1
|
|
||||||
}
|
|
||||||
|
|
||||||
h.duration += float64(samples) / float64(packet.SampleRate)
|
|
||||||
|
|
||||||
var buf []byte
|
|
||||||
switch packet.BitDepth {
|
|
||||||
case 8:
|
|
||||||
buf = make([]byte, len(packet.Samples))
|
|
||||||
for i := range packet.Samples {
|
|
||||||
buf[i] = byte(packet.Samples[i])
|
|
||||||
}
|
|
||||||
case 16:
|
|
||||||
buf = make([]byte, len(packet.Samples)*2)
|
|
||||||
for i := range packet.Samples {
|
|
||||||
binary.LittleEndian.PutUint16(buf[i*2:], uint16(int16(packet.Samples[i])))
|
|
||||||
}
|
|
||||||
case 24:
|
|
||||||
buf = make([]byte, len(packet.Samples)*3)
|
|
||||||
for i := range packet.Samples {
|
|
||||||
buf[i*3] = byte((packet.Samples[i] >> 16) & 0xFF)
|
|
||||||
buf[i*3+1] = byte((packet.Samples[i] >> 8) & 0xFF)
|
|
||||||
buf[i*3+2] = byte(packet.Samples[i] & 0xFF)
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
buf = make([]byte, len(packet.Samples)*4)
|
|
||||||
for i := range packet.Samples {
|
|
||||||
binary.LittleEndian.PutUint32(buf[i*4:], uint32(packet.Samples[i]))
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
h.hasher.Write(buf)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Hasher) GetSampleCount() int {
|
|
||||||
return h.samples
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Hasher) GetChannels() int {
|
|
||||||
return h.channels
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Hasher) GetSampleRate() int {
|
|
||||||
return h.sampleRate
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Hasher) GetHashType() HashType {
|
|
||||||
return h.hash
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Hasher) GetResult() []byte {
|
|
||||||
return h.result
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Hasher) GetDuration() time.Duration {
|
|
||||||
if h.sampleRate > 0 {
|
|
||||||
return time.Duration(float64(time.Second) * (float64(h.samples) / float64(h.sampleRate)))
|
|
||||||
}
|
|
||||||
|
|
||||||
//Fallback calculated duration
|
|
||||||
return time.Duration(float64(time.Second) * h.duration)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Hasher) GetWaitGroup() *sync.WaitGroup {
|
|
||||||
return &h.wg
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Hasher) Wait() {
|
|
||||||
h.wg.Wait()
|
|
||||||
}
|
|
|
@ -38,7 +38,7 @@ func (s *Source) GetLicense() metadata.License {
|
||||||
return metadata.License{
|
return metadata.License{
|
||||||
//Most core data is CC0
|
//Most core data is CC0
|
||||||
Code: metadata.CC_BY_NC_SA_30,
|
Code: metadata.CC_BY_NC_SA_30,
|
||||||
URL: baseURL + "/doc/About/Data_License",
|
URL: baseURL + "doc/About/Data_License",
|
||||||
Attribution: fmt.Sprintf("%s (%s)", s.GetName(), s.GetURL()),
|
Attribution: fmt.Sprintf("%s (%s)", s.GetName(), s.GetURL()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -46,8 +46,24 @@ func (s *Source) GetLicense() metadata.License {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Source) FindByTOC(toc metadata.TOC) []*metadata.Album {
|
func (s *Source) FindByTOC(toc metadata.TOC) (albums []*metadata.Album) {
|
||||||
return s.FindByCDDB1Group([]metadata.CDDB1{toc.GetCDDB1()})
|
foundAlbums := s.FindByCDDB1Group([]metadata.CDDB1{toc.GetCDDB1()})
|
||||||
|
for _, album := range foundAlbums {
|
||||||
|
if len(album.Discs) == 1 {
|
||||||
|
for i, track := range album.Discs[0].Tracks {
|
||||||
|
diff := track.Duration - toc.GetTrackDuration(i)
|
||||||
|
if diff < 0 {
|
||||||
|
diff = -diff
|
||||||
|
}
|
||||||
|
if track.Duration != 0 && diff > time.Second*6 { //match threshold
|
||||||
|
goto L
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
albums = append(albums, album)
|
||||||
|
L:
|
||||||
|
}
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Source) FindByCDDB1(cddb metadata.CDDB1) []*metadata.Album {
|
func (s *Source) FindByCDDB1(cddb metadata.CDDB1) []*metadata.Album {
|
||||||
|
@ -349,7 +365,7 @@ func (s *Source) GetSongLyrics(songName string) *metadata.LRCLyrics {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return metadata.NewLRCLyrics(string(body), "japanese", []metadata.Name{
|
return metadata.NewLRCLyrics(string(body), "original", []metadata.Name{ //TODO: detect original language
|
||||||
{Kind: "url", Name: baseURL + "歌词:" + wikitext_parser.NormalizeWikiTitle(songName)},
|
{Kind: "url", Name: baseURL + "歌词:" + wikitext_parser.NormalizeWikiTitle(songName)},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -63,8 +63,10 @@ func (t TOC) GetDuration() time.Duration {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t TOC) GetTrackDuration(index int) time.Duration {
|
func (t TOC) GetTrackDuration(index int) time.Duration {
|
||||||
if index == len(t) {
|
if index < 0 || index > len(t)-2 {
|
||||||
return (time.Second * time.Duration(t[index+2]-t[index+1])) / SectorsPerSecond
|
return 0
|
||||||
|
} else if index == len(t)-2 {
|
||||||
|
return (time.Second * time.Duration(t[0]-t[index+1])) / SectorsPerSecond
|
||||||
} else {
|
} else {
|
||||||
return (time.Second * time.Duration(t[index+2]-t[index+1])) / SectorsPerSecond
|
return (time.Second * time.Duration(t[index+2]-t[index+1])) / SectorsPerSecond
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,9 +1,61 @@
|
||||||
package utilities
|
package utilities
|
||||||
|
|
||||||
import "strings"
|
import (
|
||||||
|
"golang.org/x/text/cases"
|
||||||
|
"golang.org/x/text/language"
|
||||||
|
"golang.org/x/text/runes"
|
||||||
|
"golang.org/x/text/transform"
|
||||||
|
"golang.org/x/text/unicode/norm"
|
||||||
|
"strings"
|
||||||
|
"unicode"
|
||||||
|
)
|
||||||
|
|
||||||
const Version = "1.0"
|
const Version = "1.0"
|
||||||
|
|
||||||
|
var normalizeTransformer = transform.Chain(
|
||||||
|
norm.NFKD,
|
||||||
|
runes.Map(func(r rune) rune {
|
||||||
|
switch r {
|
||||||
|
|
||||||
|
}
|
||||||
|
return r
|
||||||
|
}),
|
||||||
|
norm.NFC,
|
||||||
|
)
|
||||||
|
|
||||||
|
var normalizeKeyTransformer = transform.Chain(
|
||||||
|
norm.NFKD,
|
||||||
|
runes.Remove(runes.In(unicode.Cc)),
|
||||||
|
runes.Remove(runes.In(unicode.Cf)),
|
||||||
|
runes.Remove(runes.In(unicode.Mn)),
|
||||||
|
runes.Remove(runes.In(unicode.Me)),
|
||||||
|
runes.Remove(runes.In(unicode.Mc)),
|
||||||
|
runes.Remove(runes.In(unicode.Po)),
|
||||||
|
runes.Remove(runes.In(unicode.Pe)),
|
||||||
|
runes.Remove(runes.In(unicode.Ps)),
|
||||||
|
runes.Remove(runes.In(unicode.Pf)),
|
||||||
|
runes.Remove(runes.In(unicode.Pi)),
|
||||||
|
runes.Remove(runes.In(unicode.Pd)),
|
||||||
|
runes.Remove(runes.In(unicode.Pc)),
|
||||||
|
runes.Remove(runes.In(unicode.Sc)),
|
||||||
|
runes.Remove(runes.In(unicode.Sk)),
|
||||||
|
runes.Remove(runes.In(unicode.Sm)),
|
||||||
|
runes.Remove(runes.In(unicode.So)),
|
||||||
|
runes.Remove(runes.In(unicode.Space)),
|
||||||
|
cases.Lower(language.Und),
|
||||||
|
norm.NFC,
|
||||||
|
)
|
||||||
|
|
||||||
|
func NormalizeUnicode(text string) (normalized string) {
|
||||||
|
normalized, _, _ = transform.String(normalizeTransformer, text)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func NormalizeKeyUnicode(text string) (normalized string) {
|
||||||
|
normalized, _, _ = transform.String(normalizeKeyTransformer, text)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
func GetMimeTypeFromExtension(ext string) string {
|
func GetMimeTypeFromExtension(ext string) string {
|
||||||
|
|
||||||
if len(ext) > 0 {
|
if len(ext) > 0 {
|
||||||
|
|
Loading…
Reference in a new issue