summaryrefslogtreecommitdiff
path: root/rhimport/fetcher.go
diff options
context:
space:
mode:
Diffstat (limited to 'rhimport/fetcher.go')
-rw-r--r--rhimport/fetcher.go294
1 files changed, 294 insertions, 0 deletions
diff --git a/rhimport/fetcher.go b/rhimport/fetcher.go
new file mode 100644
index 0000000..2d99be4
--- /dev/null
+++ b/rhimport/fetcher.go
@@ -0,0 +1,294 @@
+//
+// rhimportd
+//
+// The Radio Helsinki Rivendell Import Daemon
+//
+//
+// Copyright (C) 2015-2016 Christian Pointner <equinox@helsinki.at>
+//
+// This file is part of rhimportd.
+//
+// rhimportd is free software: you can redistribute it and/or modify
+// it under the terms of the GNU General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// any later version.
+//
+// rhimportd is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU General Public License for more details.
+//
+// You should have received a copy of the GNU General Public License
+// along with rhimportd. If not, see <http://www.gnu.org/licenses/>.
+//
+
+package rhimport
+
+import (
+ "fmt"
+ "github.com/andelf/go-curl"
+ "io/ioutil"
+ "mime"
+ "net/http"
+ "net/url"
+ "os"
+ "path"
+ "path/filepath"
+ "strconv"
+ "strings"
+ "time"
+)
+
+type FetcherCurlCBData struct {
+ basepath string
+ filename string
+ remotename string
+ *os.File
+}
+
+func (self *FetcherCurlCBData) Cleanup() {
+ if self.File != nil {
+ self.File.Close()
+ }
+}
+
+func curlHeaderCallback(ptr []byte, userdata interface{}) bool {
+ hdr := fmt.Sprintf("%s", ptr)
+ data := userdata.(*FetcherCurlCBData)
+
+ if strings.HasPrefix(hdr, "Content-Disposition:") {
+ if mediatype, params, err := mime.ParseMediaType(strings.TrimPrefix(hdr, "Content-Disposition:")); err == nil {
+ if mediatype == "attachment" {
+ data.filename = data.basepath + "/" + params["filename"]
+ }
+ }
+ }
+ return true
+}
+
+func curlWriteCallback(ptr []byte, userdata interface{}) bool {
+ data := userdata.(*FetcherCurlCBData)
+ if data.File == nil {
+ if data.filename == "" {
+ data.filename = data.basepath + "/" + data.remotename
+ }
+ fp, err := os.OpenFile(data.filename, os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0600)
+ if err != nil {
+ rhl.Printf("Unable to create file %s: %s", data.filename, err)
+ return false
+ }
+ data.File = fp
+ }
+ if _, err := data.File.Write(ptr); err != nil {
+ rhl.Printf("Unable to write file %s: %s", data.filename, err)
+ return false
+ }
+ return true
+}
+
+func fetchFileCurl(ctx *Context, res *Result, uri *url.URL) (err error) {
+ rhl.Printf("curl-based fetcher called for '%s'", ctx.SourceUri)
+
+ easy := curl.EasyInit()
+ if easy != nil {
+ defer easy.Cleanup()
+
+ easy.Setopt(curl.OPT_FOLLOWLOCATION, true)
+ easy.Setopt(curl.OPT_URL, ctx.SourceUri)
+
+ cbdata := &FetcherCurlCBData{remotename: path.Base(uri.Path)}
+ defer cbdata.Cleanup()
+ if cbdata.basepath, err = ioutil.TempDir(ctx.conf.TempDir, "rhimportd-"); err != nil {
+ return
+ }
+
+ easy.Setopt(curl.OPT_HEADERFUNCTION, curlHeaderCallback)
+ easy.Setopt(curl.OPT_HEADERDATA, cbdata)
+
+ easy.Setopt(curl.OPT_WRITEFUNCTION, curlWriteCallback)
+ easy.Setopt(curl.OPT_WRITEDATA, cbdata)
+
+ easy.Setopt(curl.OPT_NOPROGRESS, false)
+ easy.Setopt(curl.OPT_PROGRESSFUNCTION, func(dltotal, dlnow, ultotal, ulnow float64, userdata interface{}) bool {
+ if ctx.Cancel != nil && len(ctx.Cancel) > 0 {
+ rhl.Printf("downloading '%s' got canceled", ctx.SourceUri)
+ res.ResponseCode = http.StatusNoContent
+ res.ErrorString = "canceled"
+ return false
+ }
+
+ if ctx.ProgressCallBack != nil {
+ if keep := ctx.ProgressCallBack(1, "downloading", dlnow/dltotal, ctx.ProgressCallBackData); !keep {
+ ctx.ProgressCallBack = nil
+ }
+ }
+ return true
+ })
+ easy.Setopt(curl.OPT_PROGRESSDATA, ctx)
+
+ if err = easy.Perform(); err != nil {
+ if cbdata.File != nil {
+ rhdl.Printf("Removing stale file: %s", cbdata.filename)
+ os.Remove(cbdata.filename)
+ os.Remove(path.Dir(cbdata.filename))
+ }
+ if res.ResponseCode == http.StatusNoContent {
+ err = nil
+ } else {
+ err = fmt.Errorf("curl-fetcher('%s'): %s", ctx.SourceUri, err)
+ }
+ return
+ }
+
+ ctx.SourceFile = cbdata.filename
+ ctx.DeleteSourceFile = true
+ ctx.DeleteSourceDir = true
+ } else {
+ err = fmt.Errorf("Error initializing libcurl")
+ }
+
+ return
+}
+
+func fetchFileLocal(ctx *Context, res *Result, uri *url.URL) (err error) {
+ rhl.Printf("Local fetcher called for '%s'", ctx.SourceUri)
+ if ctx.ProgressCallBack != nil {
+ if keep := ctx.ProgressCallBack(1, "fetching", 0.0, ctx.ProgressCallBackData); !keep {
+ ctx.ProgressCallBack = nil
+ }
+ }
+
+ ctx.SourceFile = filepath.Join(ctx.conf.LocalFetchDir, path.Clean("/"+uri.Path))
+ var src *os.File
+ if src, err = os.Open(ctx.SourceFile); err != nil {
+ res.ResponseCode = http.StatusBadRequest
+ res.ErrorString = fmt.Sprintf("local-file open(): %s", err)
+ return nil
+ }
+ if info, err := src.Stat(); err != nil {
+ res.ResponseCode = http.StatusBadRequest
+ res.ErrorString = fmt.Sprintf("local-file stat(): %s", err)
+ return nil
+ } else {
+ if info.IsDir() {
+ res.ResponseCode = http.StatusBadRequest
+ res.ErrorString = fmt.Sprintf("'%s' is a directory", ctx.SourceFile)
+ return nil
+ }
+ }
+ src.Close()
+ if ctx.ProgressCallBack != nil {
+ if keep := ctx.ProgressCallBack(1, "fetching", 1.0, ctx.ProgressCallBackData); !keep {
+ ctx.ProgressCallBack = nil
+ }
+ }
+ ctx.DeleteSourceFile = false
+ ctx.DeleteSourceDir = false
+ return
+}
+
+func fetchFileFake(ctx *Context, res *Result, uri *url.URL) error {
+ rhdl.Printf("Fake fetcher for '%s'", ctx.SourceUri)
+
+ if duration, err := strconv.ParseUint(uri.Host, 10, 32); err != nil {
+ err = nil
+ res.ResponseCode = http.StatusBadRequest
+ res.ErrorString = "invalid duration (must be a positive integer)"
+ } else {
+ for i := uint(0); i < uint(duration); i++ {
+ if ctx.Cancel != nil && len(ctx.Cancel) > 0 {
+ rhl.Printf("faking got canceled")
+ res.ResponseCode = http.StatusNoContent
+ res.ErrorString = "canceled"
+ return nil
+ }
+ if ctx.ProgressCallBack != nil {
+ if keep := ctx.ProgressCallBack(1, "faking", float64(i)/float64(duration), ctx.ProgressCallBackData); !keep {
+ ctx.ProgressCallBack = nil
+ }
+ }
+ time.Sleep(100 * time.Millisecond)
+ }
+ if ctx.ProgressCallBack != nil {
+ if keep := ctx.ProgressCallBack(1, "faking", 1.0, ctx.ProgressCallBackData); !keep {
+ ctx.ProgressCallBack = nil
+ }
+ }
+ ctx.SourceFile = "/nonexistend/fake.mp3"
+ ctx.DeleteSourceFile = false
+ ctx.DeleteSourceDir = false
+ }
+ return nil
+}
+
+type FetchFunc func(*Context, *Result, *url.URL) (err error)
+
+// TODO: implement fetchers for:
+// archiv://
+// public://
+// home:// ?????
+var (
+ fetchers = map[string]FetchFunc{
+ "local": fetchFileLocal,
+ "fake": fetchFileFake,
+ }
+ curlProtos = map[string]bool{
+ "http": false, "https": false,
+ "ftp": false, "ftps": false,
+ }
+)
+
+func fetcherInit() {
+ info := curl.VersionInfo(curl.VERSION_FIRST)
+ protos := info.Protocols
+ for _, proto := range protos {
+ if _, ok := curlProtos[proto]; ok {
+ rhdl.Printf("curl: enabling protocol %s", proto)
+ fetchers[proto] = fetchFileCurl
+ curlProtos[proto] = true
+ } else {
+ rhdl.Printf("curl: ignoring protocol %s", proto)
+ }
+ }
+ for proto, enabled := range curlProtos {
+ if !enabled {
+ rhl.Printf("curl: protocol %s is disabled because the installed library version doesn't support it!", proto)
+ }
+ }
+}
+
+func checkPassword(ctx *Context, result *Result) (err error) {
+ ok := false
+ if ok, err = ctx.CheckPassword(); err != nil {
+ return
+ }
+ if !ok {
+ result.ResponseCode = http.StatusUnauthorized
+ result.ErrorString = "invalid username and/or password"
+ }
+ return
+}
+
+func FetchFile(ctx *Context) (res *Result, err error) {
+ res = &Result{ResponseCode: http.StatusOK}
+
+ var uri *url.URL
+ if uri, err = url.Parse(ctx.SourceUri); err != nil {
+ res.ResponseCode = http.StatusBadRequest
+ res.ErrorString = fmt.Sprintf("parsing uri: %s", err)
+ return res, nil
+ }
+
+ if !ctx.Trusted {
+ if err = checkPassword(ctx, res); err != nil || res.ResponseCode != http.StatusOK {
+ return
+ }
+ }
+
+ if fetcher, ok := fetchers[uri.Scheme]; ok {
+ err = fetcher(ctx, res, uri)
+ } else {
+ err = fmt.Errorf("No fetcher for uri scheme '%s' found.", uri.Scheme)
+ }
+ return
+}