Implement HTTP URL playback via Range requests.
All checks were successful
continuous-integration/drone/push Build is passing
All checks were successful
continuous-integration/drone/push Build is passing
This commit is contained in:
parent
2d91a5bd47
commit
08574dbe05
|
@ -14,9 +14,9 @@ Radio streamer ([kawa](https://github.com/Luminarys/kawa) drop-in compatible).
|
|||
* Implements `queue.nr` and `/random` (to be deprecated/changed)
|
||||
* Supports extra encoder bitrate control settings (CBR, VBR, auto, etc.)
|
||||
* Can read and apply ReplayGain tags.
|
||||
* Can have audio sources over HTTP(s) URLs on `path` property, and supports seeking.
|
||||
|
||||
# Future improvements
|
||||
* Allow playback of files by URL, not just by path
|
||||
* Implement precise timing information side-channel
|
||||
|
||||
## Dependencies
|
||||
|
|
|
@ -18,6 +18,7 @@ host="127.0.0.1"
|
|||
# }
|
||||
#
|
||||
# The path is the path to an audio file on the filesystem you want MeteorLight to play.
|
||||
# It can also be an http(s) URL, that supports Range requests and returns proper Content-Length.
|
||||
# Additionally, the "title", "artist" and "art" properties can be included to be used as metadata.
|
||||
# If "title", "artist" are not specified, file tags may be used.
|
||||
random_song_api="http://localhost:8012/api/random"
|
||||
|
|
166
http.go
Normal file
166
http.go
Normal file
|
@ -0,0 +1,166 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
const rangeReaderBufferSize = 1024 * 64
|
||||
|
||||
type RangeReadSeekCloser struct {
|
||||
uri *url.URL
|
||||
size int64
|
||||
i int64
|
||||
buf []byte
|
||||
ib int64
|
||||
}
|
||||
|
||||
func NewRangeReadSeekCloser(uri string) (*RangeReadSeekCloser, error) {
|
||||
parsedUrl, err := url.Parse(uri)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r := &RangeReadSeekCloser{
|
||||
uri: parsedUrl,
|
||||
buf: make([]byte, 0, rangeReaderBufferSize),
|
||||
}
|
||||
|
||||
if err = r.getInformation(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return r, nil
|
||||
}
|
||||
|
||||
func (r *RangeReadSeekCloser) GetURI() string {
|
||||
return r.uri.String()
|
||||
}
|
||||
|
||||
func (r *RangeReadSeekCloser) Read(p []byte) (n int, err error) {
|
||||
if r.i >= r.size {
|
||||
return 0, io.EOF
|
||||
}
|
||||
|
||||
if r.i >= r.ib {
|
||||
bufStart := int(r.i - r.ib)
|
||||
bufEnd := bufStart + len(p)
|
||||
if bufEnd <= len(r.buf) {
|
||||
copy(p, r.buf[bufStart:bufEnd])
|
||||
r.i += int64(len(p))
|
||||
return len(p), nil
|
||||
}
|
||||
}
|
||||
|
||||
//TODO: EOF error / limit
|
||||
headers := make(http.Header)
|
||||
|
||||
startOffset := r.i
|
||||
endOffset := r.i + int64(len(p)) - 1
|
||||
if endOffset-startOffset+1 < rangeReaderBufferSize {
|
||||
endOffset = startOffset + rangeReaderBufferSize - 1
|
||||
}
|
||||
if endOffset >= (r.size - 1) {
|
||||
endOffset = r.size - 1
|
||||
}
|
||||
|
||||
expectedLength := endOffset - startOffset + 1
|
||||
|
||||
returnLength := int(endOffset - startOffset + 1)
|
||||
if returnLength > len(p) {
|
||||
returnLength = len(p)
|
||||
}
|
||||
|
||||
headers.Set("Range", fmt.Sprintf("bytes=%d-%d", startOffset, endOffset))
|
||||
response, err := http.DefaultClient.Do(&http.Request{
|
||||
Method: "GET",
|
||||
URL: r.uri,
|
||||
Header: headers,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer response.Body.Close()
|
||||
|
||||
if response.StatusCode != http.StatusPartialContent {
|
||||
return 0, fmt.Errorf("response status code %d != %d", response.StatusCode, http.StatusPartialContent)
|
||||
}
|
||||
|
||||
contentLength, err := strconv.ParseInt(response.Header.Get("content-length"), 10, 0)
|
||||
if err != nil {
|
||||
return 0, errors.New("server response does not have a valid Content-Length")
|
||||
}
|
||||
|
||||
if contentLength != expectedLength {
|
||||
return 0, fmt.Errorf("server returned %d bytes, expected %d", contentLength, expectedLength)
|
||||
}
|
||||
|
||||
data, err := ioutil.ReadAll(response.Body)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
if len(data) != int(expectedLength) {
|
||||
return 0, fmt.Errorf("read %d bytes, expected %d", len(data), expectedLength)
|
||||
}
|
||||
copy(p[:returnLength], data[:returnLength])
|
||||
|
||||
r.buf = data
|
||||
r.ib = r.i
|
||||
|
||||
r.i += int64(returnLength)
|
||||
return returnLength, nil
|
||||
}
|
||||
|
||||
func (r *RangeReadSeekCloser) Seek(offset int64, whence int) (int64, error) {
|
||||
switch whence {
|
||||
case io.SeekStart:
|
||||
r.i = offset
|
||||
|
||||
case io.SeekCurrent:
|
||||
r.i += offset
|
||||
|
||||
case io.SeekEnd:
|
||||
//todo: maybe without -1?
|
||||
r.i = (r.size - 1) - offset
|
||||
default:
|
||||
return 0, fmt.Errorf("unknown whence %d", whence)
|
||||
}
|
||||
|
||||
if r.i >= r.size {
|
||||
return r.i, io.EOF
|
||||
} else if r.i < 0 {
|
||||
return r.i, io.ErrUnexpectedEOF
|
||||
}
|
||||
|
||||
return r.i, nil
|
||||
}
|
||||
|
||||
func (r *RangeReadSeekCloser) Close() error {
|
||||
//todo close?
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *RangeReadSeekCloser) getInformation() error {
|
||||
response, err := http.DefaultClient.Head(r.GetURI())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer response.Body.Close()
|
||||
if response.StatusCode != http.StatusOK {
|
||||
return fmt.Errorf("response status code %d != %d", response.StatusCode, http.StatusOK)
|
||||
}
|
||||
if response.Header.Get("accept-ranges") != "bytes" {
|
||||
return errors.New("server does not accept Range")
|
||||
}
|
||||
if r.size, err = strconv.ParseInt(response.Header.Get("content-length"), 10, 0); err != nil {
|
||||
return errors.New("server response does not have a valid Content-Length")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
79
queue.go
79
queue.go
|
@ -48,26 +48,47 @@ func (e *QueueTrackEntry) Load() error {
|
|||
return nil
|
||||
}
|
||||
|
||||
f, err := os.Open(e.Path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
//close at end, TODO check if it runs
|
||||
runtime.SetFinalizer(f, (*os.File).Close)
|
||||
var reader io.ReadSeekCloser
|
||||
|
||||
meta, err := tag.ReadFrom(f)
|
||||
if len(e.Path) > 4 && e.Path[:4] == "http" {
|
||||
s, err := NewRangeReadSeekCloser(e.Path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
reader = s
|
||||
//close at end, TODO check if it runs
|
||||
runtime.SetFinalizer(s, (*RangeReadSeekCloser).Close)
|
||||
|
||||
reader = s
|
||||
} else {
|
||||
f, err := os.Open(e.Path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
//close at end, TODO check if it runs
|
||||
runtime.SetFinalizer(f, (*os.File).Close)
|
||||
|
||||
reader = f
|
||||
}
|
||||
|
||||
if reader == nil {
|
||||
return errors.New("could not find stream opener")
|
||||
}
|
||||
|
||||
meta, err := tag.ReadFrom(reader)
|
||||
if err != nil {
|
||||
err = nil
|
||||
}
|
||||
if _, err = f.Seek(0, io.SeekStart); err != nil {
|
||||
if _, err = reader.Seek(0, io.SeekStart); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
decoders, err := guess.GetDecoders(f, e.Path)
|
||||
decoders, err := guess.GetDecoders(reader, e.Path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
source, err := guess.Open(f, decoders)
|
||||
source, err := guess.Open(reader, decoders)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -84,7 +105,7 @@ func (e *QueueTrackEntry) Load() error {
|
|||
e.Metadata.Title = meta.Title()
|
||||
}
|
||||
if e.Metadata.Album == "" {
|
||||
e.Metadata.Title = meta.Album()
|
||||
e.Metadata.Album = meta.Album()
|
||||
}
|
||||
if e.Metadata.Artist == "" {
|
||||
e.Metadata.Artist = meta.Artist()
|
||||
|
@ -98,28 +119,30 @@ func (e *QueueTrackEntry) Load() error {
|
|||
var value interface{}
|
||||
var ok bool
|
||||
|
||||
if value, ok = tags["replaygain_track_gain"]; ok {
|
||||
if strValue, ok = value.(string); ok {
|
||||
if e.ReplayGain.TrackGain, err = strconv.ParseFloat(strings.TrimSpace(strings.TrimSuffix(strValue, "dB")), 64); err == nil {
|
||||
e.ReplayGain.Apply = true
|
||||
if !e.ReplayGain.Apply {
|
||||
if value, ok = tags["replaygain_track_gain"]; ok {
|
||||
if strValue, ok = value.(string); ok {
|
||||
if e.ReplayGain.TrackGain, err = strconv.ParseFloat(strings.TrimSpace(strings.TrimSuffix(strValue, "dB")), 64); err == nil {
|
||||
e.ReplayGain.Apply = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if value, ok = tags["replaygain_track_peak"]; ok {
|
||||
if strValue, ok = value.(string); ok {
|
||||
if e.ReplayGain.TrackPeak, err = strconv.ParseFloat(strings.TrimSpace(strings.TrimSuffix(strValue, "dB")), 64); err == nil {
|
||||
e.ReplayGain.Apply = true
|
||||
if value, ok = tags["replaygain_track_peak"]; ok {
|
||||
if strValue, ok = value.(string); ok {
|
||||
if e.ReplayGain.TrackPeak, err = strconv.ParseFloat(strings.TrimSpace(strings.TrimSuffix(strValue, "dB")), 64); err == nil {
|
||||
e.ReplayGain.Apply = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if value, ok = tags["replaygain_album_gain"]; ok {
|
||||
if strValue, ok = value.(string); ok {
|
||||
e.ReplayGain.AlbumGain, _ = strconv.ParseFloat(strings.TrimSpace(strings.TrimSuffix(strValue, "dB")), 64)
|
||||
if value, ok = tags["replaygain_album_gain"]; ok {
|
||||
if strValue, ok = value.(string); ok {
|
||||
e.ReplayGain.AlbumGain, _ = strconv.ParseFloat(strings.TrimSpace(strings.TrimSuffix(strValue, "dB")), 64)
|
||||
}
|
||||
}
|
||||
}
|
||||
if value, ok = tags["replaygain_album_peak"]; ok {
|
||||
if strValue, ok = value.(string); ok {
|
||||
e.ReplayGain.AlbumPeak, _ = strconv.ParseFloat(strings.TrimSpace(strings.TrimSuffix(strValue, "dB")), 64)
|
||||
if value, ok = tags["replaygain_album_peak"]; ok {
|
||||
if strValue, ok = value.(string); ok {
|
||||
e.ReplayGain.AlbumPeak, _ = strconv.ParseFloat(strings.TrimSpace(strings.TrimSuffix(strValue, "dB")), 64)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue