Add loose album search
This commit is contained in:
parent
2140256dc6
commit
573b954828
|
@ -20,8 +20,9 @@ Small Golang server that loads all albums in an index at startup and is used to
|
|||
* Album Title ex. `/search?type=album&query=マジコカタストロフィ`
|
||||
* Album Catalog ex. `/search?type=album&query=STAL-1302`
|
||||
* Values here are normalized so special characters or uppercase are changed. It deals with Unicode too.
|
||||
* Additionally, `&type=loosealbum` exists to match loosely after normalization (will find partial matches).
|
||||
* Bare CDDB emulator for `query` and `read` commands. Used to query by track count + length of album.
|
||||
* query cmd ex. `/cddb?cmd=cddb+query+730dec08` (no need to provide TOC, but can be provided)
|
||||
* query cmd ex. `/cddb?cmd=cddb+query+730dec08` (no need to provide TOC, but can be provided for more exact match)
|
||||
* read cmd ex. `/cddb?cmd=cddb+read+Soundtrack54742+730dec08`
|
||||
|
||||
# License
|
||||
|
|
14
server.go
14
server.go
|
@ -962,7 +962,7 @@ func main() {
|
|||
writer.Header().Set("Content-Type", "application/json")
|
||||
|
||||
switch request.URL.Query().Get("type") {
|
||||
case "album": //search by title or catalog number
|
||||
case "album": //search by title or catalog number, "exact" after normalization
|
||||
entries, ok := albumTitleLookup[normalizeSearchTitle(request.URL.Query().Get("query"))]
|
||||
|
||||
if !ok {
|
||||
|
@ -972,7 +972,19 @@ func main() {
|
|||
|
||||
writer.Write(jsonBytes)
|
||||
}
|
||||
case "loosealbum": //search by title or catalog number, loosely
|
||||
|
||||
var entries []*albumEntry
|
||||
normalized := normalizeSearchTitle(request.URL.Query().Get("query"))
|
||||
for k, v := range albumTitleLookup {
|
||||
if strings.Index(k, normalized) != -1 {
|
||||
entries = append(entries, v...)
|
||||
}
|
||||
}
|
||||
|
||||
jsonBytes, _ := json.Marshal(entries)
|
||||
|
||||
writer.Write(jsonBytes)
|
||||
default:
|
||||
writer.WriteHeader(http.StatusNotFound)
|
||||
writer.Write([]byte("[]"))
|
||||
|
|
Loading…
Reference in a new issue