Added metadata types for lookup system, Wikitext parser
This commit is contained in:
parent
665f622d00
commit
03e94ad7f4
10
go.mod
10
go.mod
|
@ -3,13 +3,18 @@ module git.gammaspectra.live/S.O.N.G/METANOIA
|
|||
go 1.18
|
||||
|
||||
require (
|
||||
git.gammaspectra.live/S.O.N.G/Hibiki v0.0.0-20220206160547-e7023361fa2e
|
||||
git.gammaspectra.live/S.O.N.G/Hibiki v0.0.0-20220214135918-99bafdde7a4a
|
||||
git.gammaspectra.live/S.O.N.G/MakyuuIchaival v0.0.0-20220131114831-c08c7d9b4153
|
||||
github.com/dgraph-io/badger/v3 v3.2103.2
|
||||
github.com/dhowden/tag v0.0.0-20201120070457-d52dcb253c63
|
||||
github.com/ikawaha/kagome-dict/uni v1.1.3
|
||||
github.com/ikawaha/kagome/v2 v2.7.0
|
||||
github.com/ipfs/go-cid v0.1.0
|
||||
github.com/lib/pq v1.10.4
|
||||
github.com/minio/sha256-simd v1.0.0
|
||||
github.com/multiformats/go-multihash v0.1.0
|
||||
github.com/oriser/regroup v0.0.0-20210730155327-fca8d7531263
|
||||
golang.org/x/text v0.3.7
|
||||
)
|
||||
|
||||
require (
|
||||
|
@ -20,7 +25,6 @@ require (
|
|||
github.com/cespare/xxhash/v2 v2.1.2 // indirect
|
||||
github.com/cheekybits/genny v1.0.0 // indirect
|
||||
github.com/cocoonlife/goflac v0.0.0-20170210142907-50ea06ed5a9d // indirect
|
||||
github.com/dgraph-io/badger/v3 v3.2103.2 // indirect
|
||||
github.com/dgraph-io/ristretto v0.1.0 // indirect
|
||||
github.com/dgrr/http2 v0.3.4 // indirect
|
||||
github.com/dh1tw/gosamplerate v0.1.2 // indirect
|
||||
|
@ -33,6 +37,7 @@ require (
|
|||
github.com/golang/protobuf v1.5.2 // indirect
|
||||
github.com/golang/snappy v0.0.4 // indirect
|
||||
github.com/google/flatbuffers v2.0.5+incompatible // indirect
|
||||
github.com/ikawaha/kagome-dict v1.0.4 // indirect
|
||||
github.com/klauspost/compress v1.14.2 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.0.10 // indirect
|
||||
github.com/kvark128/minimp3 v0.0.0-20211109174940-101188771a65 // indirect
|
||||
|
@ -60,7 +65,6 @@ require (
|
|||
golang.org/x/mod v0.5.1 // indirect
|
||||
golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd // indirect
|
||||
golang.org/x/sys v0.0.0-20220204135822-1c1b9b1eba6a // indirect
|
||||
golang.org/x/text v0.3.7 // indirect
|
||||
golang.org/x/tools v0.1.9 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
|
||||
google.golang.org/protobuf v1.27.1 // indirect
|
||||
|
|
12
go.sum
12
go.sum
|
@ -9,6 +9,8 @@ dmitri.shuralyov.com/state v0.0.0-20180228185332-28bcc343414c/go.mod h1:0PRwlb0D
|
|||
git.apache.org/thrift.git v0.0.0-20180902110319-2566ecd5d999/go.mod h1:fPE2ZNJGynbRyZ4dJvy6G277gSllfV2HJqblrnkyeyg=
|
||||
git.gammaspectra.live/S.O.N.G/Hibiki v0.0.0-20220206160547-e7023361fa2e h1:n1Fw+AcqqYhEiHXKBO1hhsggz/ND9d/VYKGvPx9KsL0=
|
||||
git.gammaspectra.live/S.O.N.G/Hibiki v0.0.0-20220206160547-e7023361fa2e/go.mod h1:/NY+4FrfPnEXNCmF16085cSGWZ89YS+Glpg4cTJhamg=
|
||||
git.gammaspectra.live/S.O.N.G/Hibiki v0.0.0-20220214135918-99bafdde7a4a h1:3LnPQmaEHjMTotiJaeg0t7L0JA/BR+ENlr5gTgwDK6c=
|
||||
git.gammaspectra.live/S.O.N.G/Hibiki v0.0.0-20220214135918-99bafdde7a4a/go.mod h1:/NY+4FrfPnEXNCmF16085cSGWZ89YS+Glpg4cTJhamg=
|
||||
git.gammaspectra.live/S.O.N.G/MakyuuIchaival v0.0.0-20220131114831-c08c7d9b4153 h1:RMDA05IEOytScNSiE2ms98x/CVMHSlA+eVBC0VCq4po=
|
||||
git.gammaspectra.live/S.O.N.G/MakyuuIchaival v0.0.0-20220131114831-c08c7d9b4153/go.mod h1:z6KcP5RPhMxDJaVU48sBhiYRCJ6ZJBbx1iIhkUrrhfY=
|
||||
git.gammaspectra.live/S.O.N.G/go-pus v0.0.0-20220130003320-c9b07c6bec7a h1:LxrTp9gf4w5KnFHRPFLXYfoxC58GCSEmZrHI6Ogtrm0=
|
||||
|
@ -135,6 +137,14 @@ github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:Fecb
|
|||
github.com/grpc-ecosystem/grpc-gateway v1.5.0/go.mod h1:RSKVYQBd5MCa4OVpNdGskqpgL2+G+NZTnrVHpWWfpdw=
|
||||
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
|
||||
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
||||
github.com/ikawaha/kagome-dict v1.0.3/go.mod h1:8Ma5E21J2kyaak6KumYLWGLKxm1kaAkCCWKWnrc5o/o=
|
||||
github.com/ikawaha/kagome-dict v1.0.4 h1:sxRKBqQ5FiJKeQwhvUmtumFqCm+GvCYiKRVgA08OQ+w=
|
||||
github.com/ikawaha/kagome-dict v1.0.4/go.mod h1:s6LsRECNl13K4miPTTG3/n6Pt7v3ClQfohMbK7qitzo=
|
||||
github.com/ikawaha/kagome-dict/ipa v1.0.4 h1:+vXHnhfgwNdm/DU4KrPaiRHO4zUht0w0iK4EtkVfrL8=
|
||||
github.com/ikawaha/kagome-dict/uni v1.1.3 h1:ea34C5lBms/U4ECoczXBu2rmJ0KG6UC8Si4wE9NDilI=
|
||||
github.com/ikawaha/kagome-dict/uni v1.1.3/go.mod h1:3rH19G7Fp+BQcRMv9GDwZzbXNkHttfcoOVdv9Pi+zyU=
|
||||
github.com/ikawaha/kagome/v2 v2.7.0 h1:8FSQwHKgDptbqpIPlLzYn1ekIAQwe2NJ6q6QfyXwlC4=
|
||||
github.com/ikawaha/kagome/v2 v2.7.0/go.mod h1:x7RC7TnFI5uOzxIPDcx9sJqV1vFE+3p+mJNcTVUGHjo=
|
||||
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
||||
github.com/ipfs/go-cid v0.1.0 h1:YN33LQulcRHjfom/i25yoOZR4Telp1Hr/2RU3d0PnC0=
|
||||
github.com/ipfs/go-cid v0.1.0/go.mod h1:rH5/Xv83Rfy8Rw6xG+id3DYAMUVmem1MowoKwdXmN2o=
|
||||
|
@ -219,6 +229,8 @@ github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1y
|
|||
github.com/onsi/gomega v1.13.0 h1:7lLHu94wT9Ij0o6EWWclhu0aOh32VxhkwEJvzuWPeak=
|
||||
github.com/onsi/gomega v1.13.0/go.mod h1:lRk9szgn8TxENtWd0Tp4c3wjlRfMTMH27I+3Je41yGY=
|
||||
github.com/openzipkin/zipkin-go v0.1.1/go.mod h1:NtoC/o8u3JlF1lSlyPNswIbeQH9bJTmOf0Erfk+hxe8=
|
||||
github.com/oriser/regroup v0.0.0-20210730155327-fca8d7531263 h1:Qd1Ml+uEhpesT8Og0ysEhu5+DGhbhW+qxjapH8t1Kvs=
|
||||
github.com/oriser/regroup v0.0.0-20210730155327-fca8d7531263/go.mod h1:odkMeLkWS8G6+WP2z3Pn2vkzhPSvBtFhAUYTKXAtZMQ=
|
||||
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
||||
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
|
|
61
metadata/album.go
Normal file
61
metadata/album.go
Normal file
|
@ -0,0 +1,61 @@
|
|||
package metadata
|
||||
|
||||
import "time"
|
||||
|
||||
type Album struct {
|
||||
License License
|
||||
SourceUniqueIdentifier string
|
||||
Name []Name
|
||||
Roles []Role
|
||||
Art []Name
|
||||
Identifiers []Name
|
||||
Tags []Name
|
||||
Links []Link
|
||||
Discs []Disc
|
||||
ReleaseDate time.Time
|
||||
}
|
||||
|
||||
type Disc struct {
|
||||
//TODO: add name?
|
||||
Name []Name
|
||||
Identifiers []Name
|
||||
Links []Link
|
||||
Tracks []Track
|
||||
}
|
||||
|
||||
type Track struct {
|
||||
Name []Name
|
||||
Roles []Role
|
||||
Links []Link
|
||||
Duration time.Duration
|
||||
Lyrics LyricGetter
|
||||
}
|
||||
|
||||
type Role struct {
|
||||
Kind string
|
||||
Name []Name
|
||||
Group string
|
||||
}
|
||||
|
||||
type Link struct {
|
||||
Kind string
|
||||
Name []Name
|
||||
}
|
||||
|
||||
type Name struct {
|
||||
Kind string
|
||||
Name string
|
||||
}
|
||||
|
||||
type Lyrics struct {
|
||||
Identifiers []Name
|
||||
Entries []LyricTextEntry
|
||||
}
|
||||
|
||||
type LyricTextEntry struct {
|
||||
Start time.Duration
|
||||
End time.Duration
|
||||
Content []Name
|
||||
}
|
||||
|
||||
type LyricGetter func() *Lyrics
|
194
metadata/cache.go
Normal file
194
metadata/cache.go
Normal file
|
@ -0,0 +1,194 @@
|
|||
package metadata
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"github.com/dgraph-io/badger/v3"
|
||||
badgerOptions "github.com/dgraph-io/badger/v3/options"
|
||||
"github.com/minio/sha256-simd"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"runtime"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type BadgerCacheStore struct {
|
||||
handle *badger.DB
|
||||
gcTicker *time.Ticker
|
||||
closeChannel chan bool
|
||||
}
|
||||
|
||||
func NewBadgerCacheStore(path string) (*BadgerCacheStore, error) {
|
||||
options := badger.DefaultOptions(path)
|
||||
options.SyncWrites = false
|
||||
options.NumVersionsToKeep = 1
|
||||
options.Compression = badgerOptions.ZSTD
|
||||
options.ZSTDCompressionLevel = 1
|
||||
db, err := badger.Open(options)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
store := &BadgerCacheStore{
|
||||
handle: db,
|
||||
gcTicker: time.NewTicker(time.Minute * 5),
|
||||
closeChannel: make(chan bool),
|
||||
}
|
||||
go func() {
|
||||
defer store.gcTicker.Stop()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-store.gcTicker.C:
|
||||
for store.handle.RunValueLogGC(0.6) == nil {
|
||||
|
||||
}
|
||||
case <-store.closeChannel:
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
runtime.SetFinalizer(store, func(s *BadgerCacheStore) {
|
||||
s.closeChannel <- true
|
||||
s.handle.Close()
|
||||
})
|
||||
|
||||
return store, nil
|
||||
}
|
||||
|
||||
func getRequestKey(r *http.Request) (out []byte) {
|
||||
key := r.Method + ":" + r.URL.String() + ":"
|
||||
var headers []string
|
||||
for k, v := range r.Header {
|
||||
hk := strings.ToLower(k)
|
||||
if hk == "user-agent" || hk == "x-fetched-at" {
|
||||
continue
|
||||
}
|
||||
headers = append(headers, hk+":"+strings.Join(v, ","))
|
||||
}
|
||||
sort.SliceStable(headers, func(i, j int) bool {
|
||||
return strings.Compare(headers[i], headers[j]) < 0
|
||||
})
|
||||
key += strings.Join(headers, ";")
|
||||
|
||||
hasher := sha256.New()
|
||||
hasher.Write([]byte(key))
|
||||
|
||||
out = hasher.Sum(out)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
type encodedRequest struct {
|
||||
Method string `json:"method"`
|
||||
URL string `json:"url"`
|
||||
Headers map[string][]string `json:"headers"`
|
||||
}
|
||||
|
||||
func encodeRequest(r *http.Request) (out []byte) {
|
||||
|
||||
value := encodedRequest{
|
||||
Method: r.Method,
|
||||
URL: r.URL.String(),
|
||||
Headers: r.Header,
|
||||
}
|
||||
|
||||
out, _ = json.Marshal(value)
|
||||
return
|
||||
}
|
||||
|
||||
type encodedResponse struct {
|
||||
Request encodedRequest `json:"request"`
|
||||
Headers map[string][]string `json:"headers"`
|
||||
Status string `json:"status"`
|
||||
StatusCode int `json:"status_code"`
|
||||
Proto string `json:"proto"`
|
||||
Body []byte `json:"body"`
|
||||
}
|
||||
|
||||
func encodeResponse(r *http.Response) (out []byte) {
|
||||
|
||||
body, err := ioutil.ReadAll(r.Body)
|
||||
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
value := encodedResponse{
|
||||
Request: encodedRequest{
|
||||
Method: r.Request.Method,
|
||||
URL: r.Request.URL.String(),
|
||||
Headers: r.Request.Header,
|
||||
},
|
||||
Headers: r.Header,
|
||||
Status: r.Status,
|
||||
StatusCode: r.StatusCode,
|
||||
Proto: r.Proto,
|
||||
Body: body,
|
||||
}
|
||||
|
||||
out, _ = json.Marshal(value)
|
||||
return
|
||||
}
|
||||
|
||||
func (s *BadgerCacheStore) Get(request *http.Request) (response *http.Response, err error) {
|
||||
key := getRequestKey(request)
|
||||
|
||||
err = s.handle.View(func(txn *badger.Txn) error {
|
||||
item, err := txn.Get(key)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = item.Value(func(val []byte) error {
|
||||
value := &encodedResponse{}
|
||||
|
||||
err := json.Unmarshal(val, value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
requestUri, _ := url.Parse(value.Request.URL)
|
||||
response = &http.Response{
|
||||
Request: &http.Request{
|
||||
Method: value.Request.Method,
|
||||
URL: requestUri,
|
||||
Header: value.Request.Headers,
|
||||
},
|
||||
Header: value.Headers,
|
||||
Status: value.Status,
|
||||
StatusCode: value.StatusCode,
|
||||
Proto: value.Proto,
|
||||
Body: io.NopCloser(bytes.NewReader(value.Body)),
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
return err
|
||||
})
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (s *BadgerCacheStore) Set(request *http.Request, response *http.Response) (*http.Response, error) {
|
||||
key := getRequestKey(request)
|
||||
byteValue := encodeResponse(response)
|
||||
if len(byteValue) == 0 {
|
||||
return nil, errors.New("could not encode response")
|
||||
}
|
||||
|
||||
err := s.handle.Update(func(txn *badger.Txn) error {
|
||||
return txn.Set(key, byteValue)
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return s.Get(request)
|
||||
}
|
3
metadata/catalog.go
Normal file
3
metadata/catalog.go
Normal file
|
@ -0,0 +1,3 @@
|
|||
package metadata
|
||||
|
||||
type CatalogNumber string
|
37
metadata/ccdb.go
Normal file
37
metadata/ccdb.go
Normal file
|
@ -0,0 +1,37 @@
|
|||
package metadata
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"encoding/hex"
|
||||
"time"
|
||||
)
|
||||
|
||||
//https://www.liquisearch.com/cddb/example_calculation_of_a_cddb1_freedb_disc_id
|
||||
|
||||
type CDDB1 uint32
|
||||
|
||||
func (c CDDB1) GetStartTimeChecksum() int {
|
||||
return int((c >> 24) & 0xFF)
|
||||
}
|
||||
|
||||
func (c CDDB1) GetDuration() time.Duration {
|
||||
return time.Second * time.Duration((c>>8)&0xFFFF)
|
||||
}
|
||||
|
||||
func (c CDDB1) GetTrackNumber() int {
|
||||
return int(c & 0xFF)
|
||||
}
|
||||
|
||||
func NewCDDB1FromString(cddb1 string) CDDB1 {
|
||||
b, err := hex.DecodeString(cddb1)
|
||||
if err != nil {
|
||||
return 0
|
||||
}
|
||||
return CDDB1(binary.BigEndian.Uint32(b))
|
||||
}
|
||||
|
||||
func (c CDDB1) String() string {
|
||||
b := make([]byte, 4)
|
||||
binary.BigEndian.PutUint32(b, uint32(c))
|
||||
return hex.EncodeToString(b)
|
||||
}
|
63
metadata/client.go
Normal file
63
metadata/client.go
Normal file
|
@ -0,0 +1,63 @@
|
|||
package metadata
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"git.gammaspectra.live/S.O.N.G/METANOIA/utilities"
|
||||
"net/http"
|
||||
"runtime"
|
||||
"time"
|
||||
)
|
||||
|
||||
//DefaultUserAgent Follows most identification rules (Like for Musicbrainz, or Discogs)
|
||||
var DefaultUserAgent = fmt.Sprintf("Mozilla/5.0 (compatible; METANOIA/%s; +https://git.gammaspectra.live/S.O.N.G/METANOIA) golang/%s (%s; %s; net/http; %s)", utilities.Version, runtime.Version(), runtime.GOOS, runtime.GOARCH, runtime.Compiler)
|
||||
|
||||
var defaultCacheStore CacheStore
|
||||
|
||||
type CachingClient struct {
|
||||
context string
|
||||
client *http.Client
|
||||
}
|
||||
|
||||
type CacheStore interface {
|
||||
Get(request *http.Request) (*http.Response, error)
|
||||
Set(request *http.Request, response *http.Response) (*http.Response, error)
|
||||
}
|
||||
|
||||
func SetCacheStore(store CacheStore) {
|
||||
defaultCacheStore = store
|
||||
}
|
||||
|
||||
func NewCachingClient(context string) *CachingClient {
|
||||
return &CachingClient{
|
||||
context: context,
|
||||
client: http.DefaultClient,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *CachingClient) Request(req *http.Request, lifetime time.Duration) (*http.Response, error) {
|
||||
if req.Header == nil || req.UserAgent() == "" {
|
||||
if req.Header == nil {
|
||||
req.Header = make(http.Header)
|
||||
}
|
||||
req.Header.Set("User-Agent", DefaultUserAgent)
|
||||
}
|
||||
|
||||
if defaultCacheStore != nil {
|
||||
response, err := defaultCacheStore.Get(req)
|
||||
if err == nil && response != nil {
|
||||
timestamp, _ := time.ParseInLocation(time.UnixDate, response.Request.Header.Get("X-Fetched-At"), time.UTC)
|
||||
|
||||
if timestamp.Add(lifetime).After(time.Now()) {
|
||||
return response, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
response, err := c.client.Do(req)
|
||||
if defaultCacheStore != nil && err == nil {
|
||||
req.Header.Set("X-Fetched-At", time.Now().UTC().Format(time.UnixDate))
|
||||
response, err = defaultCacheStore.Set(req, response)
|
||||
}
|
||||
|
||||
return response, err
|
||||
}
|
13
metadata/discid.go
Normal file
13
metadata/discid.go
Normal file
|
@ -0,0 +1,13 @@
|
|||
package metadata
|
||||
|
||||
//See https://musicbrainz.org/doc/Disc_ID_Calculation for calculation info
|
||||
|
||||
const DiscIDSize = 28
|
||||
|
||||
//DiscID MusicBrainz Disc ID
|
||||
type DiscID string
|
||||
|
||||
const TocIDSize = 28
|
||||
|
||||
//TocID CueTools CTDB TOCID
|
||||
type TocID string
|
146
metadata/hasher.go
Normal file
146
metadata/hasher.go
Normal file
|
@ -0,0 +1,146 @@
|
|||
package metadata
|
||||
|
||||
import (
|
||||
"git.gammaspectra.live/S.O.N.G/Hibiki/utilities/audio/format"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Hasher struct {
|
||||
options HasherOptions
|
||||
channel chan *format.AnalyzerPacket
|
||||
wg sync.WaitGroup
|
||||
samples int
|
||||
duration float64
|
||||
sampleRate int
|
||||
bitDepth int
|
||||
channels int
|
||||
buffer [][]int32
|
||||
}
|
||||
|
||||
type HasherOptions struct {
|
||||
PrefixSilenceSamples int
|
||||
AppendSilenceSamples int
|
||||
SkipStartSamples int
|
||||
SkipEndSamples int
|
||||
}
|
||||
|
||||
func NewMuxedHasher(channel chan *format.AnalyzerPacket, options ...HasherOptions) (h []*Hasher) {
|
||||
channels := make([]chan *format.AnalyzerPacket, len(options))
|
||||
for i := range channels {
|
||||
channels[i] = make(chan *format.AnalyzerPacket, 16)
|
||||
h = append(h, NewHasher(channels[i], options[i]))
|
||||
}
|
||||
|
||||
go func() {
|
||||
defer func() {
|
||||
for _, c := range channels {
|
||||
close(c)
|
||||
}
|
||||
}()
|
||||
|
||||
for packet := range channel {
|
||||
for _, c := range channels {
|
||||
c <- packet
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func NewHasher(channel chan *format.AnalyzerPacket, options HasherOptions) (h *Hasher) {
|
||||
h = &Hasher{
|
||||
options: options,
|
||||
channel: channel,
|
||||
}
|
||||
|
||||
h.startRoutine()
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (h *Hasher) startRoutine() {
|
||||
h.wg.Add(1)
|
||||
go func() {
|
||||
defer h.wg.Done()
|
||||
|
||||
for packet := range h.channel {
|
||||
if h.samples == 0 {
|
||||
h.buffer = make([][]int32, h.options.SkipStartSamples, h.options.SkipEndSamples)
|
||||
|
||||
if h.options.PrefixSilenceSamples > 0 {
|
||||
h.handlePacket(&format.AnalyzerPacket{
|
||||
Samples: make([]int32, packet.Channels*h.options.PrefixSilenceSamples),
|
||||
Channels: packet.Channels,
|
||||
SampleRate: packet.SampleRate,
|
||||
BitDepth: packet.BitDepth,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
h.handlePacket(packet)
|
||||
}
|
||||
if h.options.AppendSilenceSamples > 0 {
|
||||
h.handlePacket(&format.AnalyzerPacket{
|
||||
Samples: make([]int32, h.channels*h.options.AppendSilenceSamples),
|
||||
Channels: h.channels,
|
||||
SampleRate: h.sampleRate,
|
||||
BitDepth: h.bitDepth,
|
||||
})
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func (h *Hasher) handlePacket(packet *format.AnalyzerPacket) {
|
||||
samples := len(packet.Samples) / packet.Channels
|
||||
|
||||
h.samples += samples
|
||||
|
||||
if h.sampleRate == 0 {
|
||||
h.sampleRate = packet.SampleRate
|
||||
} else if h.sampleRate != packet.SampleRate {
|
||||
h.sampleRate = -1
|
||||
}
|
||||
if h.bitDepth == 0 {
|
||||
h.bitDepth = packet.BitDepth
|
||||
} else if h.bitDepth != packet.BitDepth {
|
||||
h.bitDepth = -1
|
||||
}
|
||||
if h.channels == 0 {
|
||||
h.channels = packet.Channels
|
||||
} else if h.channels != packet.Channels {
|
||||
h.channels = -1
|
||||
}
|
||||
|
||||
h.duration += float64(samples) / float64(packet.SampleRate)
|
||||
}
|
||||
|
||||
func (h *Hasher) GetSampleCount() int {
|
||||
return h.samples
|
||||
}
|
||||
|
||||
func (h *Hasher) GetChannels() int {
|
||||
return h.channels
|
||||
}
|
||||
|
||||
func (h *Hasher) GetSampleRate() int {
|
||||
return h.sampleRate
|
||||
}
|
||||
|
||||
func (h *Hasher) GetOptions() HasherOptions {
|
||||
return h.options
|
||||
}
|
||||
|
||||
func (h *Hasher) GetDuration() time.Duration {
|
||||
if h.sampleRate > 0 {
|
||||
return time.Duration(float64(time.Second) * (float64(h.samples) / float64(h.sampleRate)))
|
||||
}
|
||||
|
||||
//Fallback calculated duration
|
||||
return time.Duration(float64(time.Second) * h.duration)
|
||||
}
|
||||
|
||||
func (h *Hasher) Wait() {
|
||||
h.wg.Wait()
|
||||
}
|
23
metadata/language.go
Normal file
23
metadata/language.go
Normal file
|
@ -0,0 +1,23 @@
|
|||
package metadata
|
||||
|
||||
import (
|
||||
"github.com/ikawaha/kagome-dict/uni"
|
||||
"github.com/ikawaha/kagome/v2/tokenizer"
|
||||
"golang.org/x/text/unicode/norm"
|
||||
"log"
|
||||
)
|
||||
|
||||
func GenerateAlternateFromJapanese(text string) []Name {
|
||||
normalized := norm.NFC.String(text)
|
||||
|
||||
t, err := tokenizer.New(uni.Dict(), tokenizer.OmitBosEos())
|
||||
for _, token := range t.Tokenize(normalized) {
|
||||
token.Features()
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
log.Panic(err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
48
metadata/source.go
Normal file
48
metadata/source.go
Normal file
|
@ -0,0 +1,48 @@
|
|||
package metadata
|
||||
|
||||
type License struct {
|
||||
Attribution string
|
||||
Code LicenseCode
|
||||
URL string
|
||||
}
|
||||
type LicenseCode string
|
||||
|
||||
const (
|
||||
LicenseCode_Unknown = "Unknown"
|
||||
LicenseCode_NC = "NonCommercial"
|
||||
LicenseCode_CC0 = "CC0"
|
||||
LicenseCode_CC_BY_NC_SA_30 = "CC BY-NC-SA 3.0"
|
||||
LicenseCode_CC_BY_SA_40 = "CC BY-SA 4.0"
|
||||
)
|
||||
|
||||
type AlbumNameSource interface {
|
||||
SourceMetadata
|
||||
FindByAlbumNames(names []Name) []*Album
|
||||
}
|
||||
type CatalogSource interface {
|
||||
SourceMetadata
|
||||
FindByCatalogNumber(catalog CatalogNumber) []*Album
|
||||
}
|
||||
type TOCSource interface {
|
||||
SourceMetadata
|
||||
FindByTOC(toc TOC) []*Album
|
||||
}
|
||||
type CDDB1Source interface {
|
||||
TOCSource
|
||||
FindByCDDB1(cddb CDDB1) []*Album
|
||||
FindByCDDB1Group(group []CDDB1) []*Album
|
||||
}
|
||||
type DiscIDSource interface {
|
||||
TOCSource
|
||||
FindByDiscID(discId DiscID) []*Album
|
||||
}
|
||||
type TocIDSource interface {
|
||||
TOCSource
|
||||
FindByTocID(tocId TocID) []*Album
|
||||
}
|
||||
|
||||
type SourceMetadata interface {
|
||||
GetName() string
|
||||
GetURL() string
|
||||
GetLicense() License
|
||||
}
|
178
metadata/toc.go
Normal file
178
metadata/toc.go
Normal file
|
@ -0,0 +1,178 @@
|
|||
package metadata
|
||||
|
||||
import (
|
||||
"crypto/sha1"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
const TocPregap = 150
|
||||
const SectorsPerSecond = 75
|
||||
const DataTrackGap = 11400
|
||||
|
||||
//TOC includes a list, index 0 being total sectors/end, then start times follow, with TocPregap added
|
||||
type TOC []int
|
||||
|
||||
var specialBase64Encoding = base64.NewEncoding("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._").WithPadding('-')
|
||||
|
||||
func NewTOCFromString(toc string, split ...string) (r TOC) {
|
||||
sep := " "
|
||||
if len(split) > 0 {
|
||||
sep = split[0]
|
||||
}
|
||||
for _, i := range strings.Split(toc, sep) {
|
||||
item, err := strconv.Atoi(i)
|
||||
if err == nil {
|
||||
r = append(r, item)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
func NewTOCFromCTDBString(toc string) (r TOC) {
|
||||
t := NewTOCFromString(toc)
|
||||
|
||||
r = append(r, t[len(t)-1]+TocPregap)
|
||||
for i := 0; i < len(t)-1; i++ {
|
||||
r = append(r, t[i]+TocPregap)
|
||||
}
|
||||
return
|
||||
}
|
||||
func NewTOCFromCTDB2String(toc string) (r TOC) {
|
||||
t := NewTOCFromString(toc, ":")
|
||||
|
||||
r = append(r, t[len(t)-1]+TocPregap)
|
||||
for i := 0; i < len(t)-1; i++ {
|
||||
r = append(r, t[i]+TocPregap)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t TOC) GetTrackNumber() int {
|
||||
return len(t) - 1
|
||||
}
|
||||
|
||||
func (t TOC) GetDuration() time.Duration {
|
||||
return (time.Second * time.Duration(t[0]-t[1])) / SectorsPerSecond
|
||||
}
|
||||
|
||||
func (t TOC) GetTrackDuration(index int) time.Duration {
|
||||
if index == len(t) {
|
||||
return (time.Second * time.Duration(t[index+2]-t[index+1])) / SectorsPerSecond
|
||||
} else {
|
||||
return (time.Second * time.Duration(t[index+2]-t[index+1])) / SectorsPerSecond
|
||||
}
|
||||
}
|
||||
|
||||
func (t TOC) CTDBString() string {
|
||||
toc := make([]string, 0, len(t))
|
||||
for _, o := range t[1:] {
|
||||
toc = append(toc, fmt.Sprintf("%d", o-TocPregap))
|
||||
}
|
||||
toc = append(toc, fmt.Sprintf("%d", t[0]-TocPregap))
|
||||
|
||||
return strings.Join(toc, ":")
|
||||
}
|
||||
|
||||
func (t TOC) CDDBString() string {
|
||||
return fmt.Sprintf("%s %d %s %d", t.GetCDDB1(), t.GetTrackNumber(), t[1:].String(), int(t.GetDuration().Seconds()))
|
||||
}
|
||||
|
||||
func (t TOC) MusicBrainzString() string {
|
||||
return fmt.Sprintf("1 %d %s", t.GetTrackNumber(), t.String())
|
||||
}
|
||||
|
||||
func (t TOC) String() string {
|
||||
toc := make([]string, 0, len(t))
|
||||
for _, o := range t {
|
||||
toc = append(toc, fmt.Sprintf("%d", o))
|
||||
}
|
||||
return strings.Join(toc, " ")
|
||||
}
|
||||
|
||||
func (t TOC) GetCDDB1() CDDB1 {
|
||||
|
||||
length := uint32(t.GetDuration().Seconds())
|
||||
checksum := uint32(0)
|
||||
for i := 1; i < len(t); i++ {
|
||||
n := uint32(t[i] / SectorsPerSecond)
|
||||
for n > 0 {
|
||||
checksum += n % 10
|
||||
n /= 10
|
||||
}
|
||||
}
|
||||
|
||||
return CDDB1(uint32((len(t)-1)&0xFF) | ((length & 0xFFFF) << 8) | ((checksum % 255) << 24))
|
||||
}
|
||||
|
||||
func (t TOC) GetDiscID() DiscID {
|
||||
|
||||
hasher := sha1.New()
|
||||
|
||||
hasher.Write([]byte(fmt.Sprintf("%02X", 1)))
|
||||
hasher.Write([]byte(fmt.Sprintf("%02X", len(t)-1)))
|
||||
|
||||
for i := 0; i < 100; i++ {
|
||||
if i < len(t) { //tracks+lead-out
|
||||
hasher.Write([]byte(fmt.Sprintf("%08X", t[i])))
|
||||
} else {
|
||||
hasher.Write([]byte(fmt.Sprintf("%08X", 0)))
|
||||
}
|
||||
}
|
||||
|
||||
result := hasher.Sum([]byte{})
|
||||
return DiscID(specialBase64Encoding.EncodeToString(result))
|
||||
}
|
||||
|
||||
func (t TOC) GetTocID() TocID {
|
||||
|
||||
hasher := sha1.New()
|
||||
|
||||
for i := 2; i < len(t); i++ {
|
||||
hasher.Write([]byte(fmt.Sprintf("%08X", t[i]-t[1])))
|
||||
}
|
||||
hasher.Write([]byte(fmt.Sprintf("%08X", t[0]-t[1])))
|
||||
for i := len(t) - 1; i < 100; i++ {
|
||||
hasher.Write([]byte(fmt.Sprintf("%08X", 0)))
|
||||
}
|
||||
|
||||
result := hasher.Sum([]byte{})
|
||||
return TocID(specialBase64Encoding.EncodeToString(result))
|
||||
}
|
||||
|
||||
func (t TOC) GetAccurateRipData() (byte, uint32, uint32, CDDB1) {
|
||||
|
||||
var TrackOffsetsAdded uint32
|
||||
var TrackOffsetsMultiplied uint32
|
||||
var num uint32
|
||||
for i := 1; i < len(t); i++ {
|
||||
start := uint32(t[i] - TocPregap)
|
||||
TrackOffsetsAdded += start
|
||||
if start < 1 {
|
||||
start = 1
|
||||
}
|
||||
num++
|
||||
TrackOffsetsMultiplied += start * num
|
||||
}
|
||||
TrackOffsetsAdded += uint32(t[0] - TocPregap)
|
||||
num++
|
||||
TrackOffsetsMultiplied += uint32(t[0]-TocPregap) * num
|
||||
|
||||
return byte(t.GetTrackNumber()), TrackOffsetsAdded, TrackOffsetsMultiplied, t.GetCDDB1()
|
||||
}
|
||||
|
||||
func (t TOC) Equals(o TOC) bool {
|
||||
if len(t) != len(o) {
|
||||
return false
|
||||
}
|
||||
|
||||
for i, d := range t {
|
||||
if o[i] != d {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
3
utilities/utilities.go
Normal file
3
utilities/utilities.go
Normal file
|
@ -0,0 +1,3 @@
|
|||
package utilities
|
||||
|
||||
const Version = "1.0"
|
108
utilities/wiki/parser.go
Normal file
108
utilities/wiki/parser.go
Normal file
|
@ -0,0 +1,108 @@
|
|||
package wiki
|
||||
|
||||
import "strings"
|
||||
|
||||
func NormalizeWikiTitle(title string) string {
|
||||
//TODO
|
||||
return strings.Replace(title, " ", "_", -1)
|
||||
}
|
||||
|
||||
//ParseWikiText small WikiText parser that extracts text, Templates, and its arguments/parameters
|
||||
func ParseWikiText(text string) (result []interface{}) {
|
||||
index := 0
|
||||
|
||||
for {
|
||||
templateIndex := strings.Index(text[index:], "{{")
|
||||
if templateIndex == -1 {
|
||||
t := strings.TrimSpace(text[index:])
|
||||
if len(t) > 0 {
|
||||
result = append(result, t)
|
||||
}
|
||||
break
|
||||
} else {
|
||||
t := strings.TrimSpace(text[index : index+templateIndex])
|
||||
if len(t) > 0 {
|
||||
result = append(result, t)
|
||||
}
|
||||
var tpl *Template
|
||||
index, tpl = ParseTemplate(text, index+templateIndex+2, 0)
|
||||
if tpl != nil {
|
||||
result = append(result, tpl)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func ParseTemplate(text string, index int, depth int) (i int, template *Template) {
|
||||
|
||||
var c byte
|
||||
lastToken := index
|
||||
|
||||
var key string
|
||||
|
||||
addValue := func() int {
|
||||
if lastToken < len(text) && i-lastToken > 0 {
|
||||
t := strings.TrimSpace(text[lastToken:i])
|
||||
if len(t) > 0 {
|
||||
if template == nil {
|
||||
template = NewTemplate(t)
|
||||
} else {
|
||||
if key == "" {
|
||||
template.AddParameterUnkeyed(t)
|
||||
} else {
|
||||
template.AddParameter(key, t)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return len(t)
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
addKey := func() {
|
||||
if lastToken < len(text) && i-lastToken > 0 {
|
||||
t := strings.TrimSpace(text[lastToken:i])
|
||||
if len(t) > 0 {
|
||||
key = t
|
||||
}
|
||||
}
|
||||
}
|
||||
for i = index; i < len(text); i++ {
|
||||
c = text[i]
|
||||
|
||||
if c == '}' && i < len(text)-1 && text[i+1] == '}' {
|
||||
addValue()
|
||||
i += 2
|
||||
break
|
||||
} else if c == '{' && i < len(text)-1 && text[i+1] == '{' {
|
||||
addValue()
|
||||
var tpl *Template
|
||||
i, tpl = ParseTemplate(text, i+2, depth+1)
|
||||
if tpl != nil {
|
||||
if key == "" {
|
||||
template.AddParameterUnkeyed(tpl)
|
||||
} else {
|
||||
template.AddParameter(key, tpl)
|
||||
}
|
||||
}
|
||||
lastToken = i
|
||||
} else if c == '|' {
|
||||
addValue()
|
||||
lastToken = i + 1
|
||||
key = ""
|
||||
} else if c == '\n' {
|
||||
addValue()
|
||||
lastToken = i + 1
|
||||
} else if c == '=' {
|
||||
if key == "" {
|
||||
addKey()
|
||||
lastToken = i + 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
26
utilities/wiki/template.go
Normal file
26
utilities/wiki/template.go
Normal file
|
@ -0,0 +1,26 @@
|
|||
package wiki
|
||||
|
||||
import "fmt"
|
||||
|
||||
type Template struct {
|
||||
Name string
|
||||
Parameters map[string][]interface{}
|
||||
}
|
||||
|
||||
func NewTemplate(name string) *Template {
|
||||
return &Template{
|
||||
Name: name,
|
||||
Parameters: make(map[string][]interface{}),
|
||||
}
|
||||
}
|
||||
|
||||
func (t *Template) AddParameterUnkeyed(value interface{}) {
|
||||
t.Parameters[fmt.Sprintf("%d", len(t.Parameters))] = []interface{}{value}
|
||||
}
|
||||
|
||||
func (t *Template) AddParameter(key string, value interface{}) {
|
||||
if _, ok := t.Parameters[key]; !ok {
|
||||
t.Parameters[key] = make([]interface{}, 0, 1)
|
||||
}
|
||||
t.Parameters[key] = append(t.Parameters[key], value)
|
||||
}
|
Loading…
Reference in a new issue