From fcfe5720506f4015e4e65c62f7a9d22c4ee44913 Mon Sep 17 00:00:00 2001 From: Christian Pointner Date: Sun, 24 Jul 2016 01:45:59 +0200 Subject: no seperate log for lib anymore - use logger from host application instead diff --git a/rhimport/converter.go b/rhimport/converter.go index c4d8301..e53da9c 100644 --- a/rhimport/converter.go +++ b/rhimport/converter.go @@ -52,16 +52,16 @@ func NewFetchConverter(ctx *Context, filename string, metadata map[string]string switch ctx.FetchConverter { case "null": // no support for loudness evaluation - leave normalization to Rivendell - return NewNullFetchConverter(filename, metadata, ctx.conf.SampleRate, ctx.Channels) + return NewNullFetchConverter(ctx, filename, metadata) case "ffmpeg": // no support for loudness evaluation - leave normalization to Rivendell - return NewFFMpegFetchConverter(filename, metadata, ctx.conf.SampleRate, ctx.Channels) + return NewFFMpegFetchConverter(ctx, filename, metadata) case "bs1770": ctx.NormalizationLevel = 0 // disable Rivendell normalization - return NewBS1770FetchConverter(filename, metadata, ctx.conf.SampleRate, ctx.Channels) + return NewBS1770FetchConverter(ctx, filename, metadata) case "ffmpeg-bs1770": ctx.NormalizationLevel = 0 // disable Rivendell normalization - return NewFFMpegBS1770FetchConverter(filename, metadata, ctx.conf.SampleRate, ctx.Channels) + return NewFFMpegBS1770FetchConverter(ctx, filename, metadata) } return nil, "", errors.New("unknown fetch converter type: " + ctx.FetchConverter) } @@ -74,10 +74,10 @@ type NullFetchConverter struct { file *os.File } -func NewNullFetchConverter(filename string, metadata map[string]string, samplerate, channels uint) (n *NullFetchConverter, newFilename string, err error) { +func NewNullFetchConverter(ctx *Context, filename string, metadata map[string]string) (n *NullFetchConverter, newFilename string, err error) { n = &NullFetchConverter{} newFilename = filepath.Dir(filename) + "/conv-null" + filepath.Ext(filename) - rhl.Printf("null-converter: opening file '%s' -> '%s'", filename, newFilename) + ctx.stdlog.Printf("null-converter: opening file '%s' -> '%s'", filename, newFilename) n.file, err = os.OpenFile(newFilename, os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0600) return } @@ -104,17 +104,17 @@ type FFMpegFetchConverter struct { result chan FetchConverterResult } -func NewFFMpegFetchConverter(filename string, metadata map[string]string, samplerate, channels uint) (ff *FFMpegFetchConverter, filenameFlac string, err error) { +func NewFFMpegFetchConverter(ctx *Context, filename string, metadata map[string]string) (ff *FFMpegFetchConverter, filenameFlac string, err error) { ff = &FFMpegFetchConverter{} filenameFlac = filepath.Dir(filename) + "/conv-ffmpeg.flac" - rhl.Printf("ffmpeg-converter: starting ffmpeg for file '%s' -> '%s'", filename, filenameFlac) + ctx.stdlog.Printf("ffmpeg-converter: starting ffmpeg for file '%s' -> '%s'", filename, filenameFlac) ff.cmd = exec.Command("ffmpeg", "-loglevel", "warning", "-i", "-", "-map_metadata", "0") if metadata != nil { for key, value := range metadata { ff.cmd.Args = append(ff.cmd.Args, "-metadata", fmt.Sprintf("%s=%s", key, value)) } } - ff.cmd.Args = append(ff.cmd.Args, "-ar", strconv.FormatUint(uint64(samplerate), 10), "-ac", strconv.FormatUint(uint64(channels), 10), "-f", "flac", filenameFlac) + ff.cmd.Args = append(ff.cmd.Args, "-ar", strconv.FormatUint(uint64(ctx.conf.SampleRate), 10), "-ac", strconv.FormatUint(uint64(ctx.Channels), 10), "-f", "flac", filenameFlac) if ff.pipe, err = ff.cmd.StdinPipe(); err != nil { return nil, "", err } @@ -163,10 +163,10 @@ type BS1770FetchConverter struct { result chan FetchConverterResult } -func NewBS1770FetchConverter(filename string, metadata map[string]string, samplerate, channels uint) (bs *BS1770FetchConverter, newFilename string, err error) { +func NewBS1770FetchConverter(ctx *Context, filename string, metadata map[string]string) (bs *BS1770FetchConverter, newFilename string, err error) { bs = &BS1770FetchConverter{} newFilename = filepath.Dir(filename) + "/conv-bs1770" + filepath.Ext(filename) - rhl.Printf("bs1770-converter: starting bs1770gain for file '%s' -> '%s'", filename, newFilename) + ctx.stdlog.Printf("bs1770-converter: starting bs1770gain for file '%s' -> '%s'", filename, newFilename) bs.file, err = os.OpenFile(newFilename, os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0600) bs.cmd = exec.Command("bs1770gain", "--ebu", "-i", "--xml", "-") @@ -241,18 +241,18 @@ type FFMpegBS1770FetchConverter struct { resultBS chan FetchConverterResult } -func NewFFMpegBS1770FetchConverter(filename string, metadata map[string]string, samplerate, channels uint) (ff *FFMpegBS1770FetchConverter, filenameFlac string, err error) { +func NewFFMpegBS1770FetchConverter(ctx *Context, filename string, metadata map[string]string) (ff *FFMpegBS1770FetchConverter, filenameFlac string, err error) { ff = &FFMpegBS1770FetchConverter{} filenameFlac = filepath.Dir(filename) + "/conv-ffmpeg-bs1770.flac" - rhl.Printf("ffmpeg-bs1770-converter: starting ffmpeg and bs1770gain for file '%s' -> '%s'", filename, filenameFlac) + ctx.stdlog.Printf("ffmpeg-bs1770-converter: starting ffmpeg and bs1770gain for file '%s' -> '%s'", filename, filenameFlac) ff.ffmpeg = exec.Command("ffmpeg", "-loglevel", "warning", "-i", "pipe:0", "-map_metadata", "0") if metadata != nil { for key, value := range metadata { ff.ffmpeg.Args = append(ff.ffmpeg.Args, "-metadata", fmt.Sprintf("%s=%s", key, value)) } } - ff.ffmpeg.Args = append(ff.ffmpeg.Args, "-ar", strconv.FormatUint(uint64(samplerate), 10), "-ac", strconv.FormatUint(uint64(channels), 10), "-f", "flac", filenameFlac) - ff.ffmpeg.Args = append(ff.ffmpeg.Args, "-ar", strconv.FormatUint(uint64(samplerate), 10), "-ac", strconv.FormatUint(uint64(channels), 10), "-f", "flac", "pipe:1") + ff.ffmpeg.Args = append(ff.ffmpeg.Args, "-ar", strconv.FormatUint(uint64(ctx.conf.SampleRate), 10), "-ac", strconv.FormatUint(uint64(ctx.Channels), 10), "-f", "flac", filenameFlac) + ff.ffmpeg.Args = append(ff.ffmpeg.Args, "-ar", strconv.FormatUint(uint64(ctx.conf.SampleRate), 10), "-ac", strconv.FormatUint(uint64(ctx.Channels), 10), "-f", "flac", "pipe:1") if ff.pipe, err = ff.ffmpeg.StdinPipe(); err != nil { return nil, "", err } diff --git a/rhimport/core.go b/rhimport/core.go index f1bbe16..a0f1060 100644 --- a/rhimport/core.go +++ b/rhimport/core.go @@ -53,17 +53,11 @@ const ( var ( bool2str = map[bool]string{false: "0", true: "1"} - rhl = log.New(os.Stderr, "[rhimport]\t", log.LstdFlags) - rhdl = log.New(ioutil.Discard, "[rhimport-dbg]\t", log.LstdFlags) ) -func init() { - if _, exists := os.LookupEnv("RHIMPORT_DEBUG"); exists { - rhdl.SetOutput(os.Stderr) - } - +func Init(stdlog, dbglog *log.Logger) { curl.GlobalInit(curl.GLOBAL_ALL) - fetcherInit() + fetcherInit(stdlog, dbglog) } type ProgressCB func(step int, stepName string, current, total float64, title string, cart, cut uint, userdata interface{}) bool @@ -134,6 +128,8 @@ type AttachmentChunk struct { type Context struct { conf *Config db *rddb.DBChan + stdlog *log.Logger + dbglog *log.Logger UserName string Password string Trusted bool @@ -163,10 +159,19 @@ type Context struct { Cancel <-chan bool } -func NewContext(conf *Config, db *rddb.DBChan) *Context { +func NewContext(conf *Config, db *rddb.DBChan, stdlog, dbglog *log.Logger) *Context { + if stdlog == nil { + stdlog = log.New(ioutil.Discard, "", 0) + } + if dbglog == nil { + dbglog = log.New(ioutil.Discard, "", 0) + } + ctx := new(Context) ctx.conf = conf ctx.db = db + ctx.stdlog = stdlog + ctx.dbglog = dbglog ctx.UserName = "" ctx.Password = "" ctx.Trusted = false diff --git a/rhimport/fetcher.go b/rhimport/fetcher.go index 536dbe1..2855ea0 100644 --- a/rhimport/fetcher.go +++ b/rhimport/fetcher.go @@ -29,6 +29,7 @@ import ( "fmt" "io" "io/ioutil" + "log" "mime" "net/http" "net/url" @@ -77,7 +78,7 @@ func curlWriteCallback(ptr []byte, userdata interface{}) bool { if data.filename == "" { name := path.Clean("/" + data.remotename) if name == "/" { - rhdl.Printf("remotename('%s') is invalid, replacing it with 'unnamed'", data.remotename) + data.ctx.dbglog.Printf("remotename('%s') is invalid, replacing it with 'unnamed'", data.remotename) name = "unnamed" } data.filename = filepath.Join(data.basepath, name) @@ -85,7 +86,7 @@ func curlWriteCallback(ptr []byte, userdata interface{}) bool { data.ctx.OrigFilename = data.filename conv, newFilename, err := NewFetchConverter(data.ctx, data.filename, data.metadata) if err != nil { - rhl.Printf("Unable to create converter for file %s: %s", data.filename, err) + data.ctx.stdlog.Printf("Unable to create converter for file %s: %s", data.filename, err) data.writeError = err return false } @@ -94,7 +95,7 @@ func curlWriteCallback(ptr []byte, userdata interface{}) bool { } w, err := data.conv.Write(ptr) if err != nil { - rhl.Printf("Unable to write to converter(%s): %s", data.filename, err) + data.ctx.stdlog.Printf("Unable to write to converter(%s): %s", data.filename, err) data.writeError = err return false } @@ -140,22 +141,22 @@ func checkYoutubeDL(ctx *Context, res *Result, uri *url.URL) *YoutubeDLInfo { cmd.Env = append(os.Environ(), "CBA_API_KEY="+cba_api_key) } - rhl.Printf("running youtube-dl for '%s'", ctx.SourceUri) + ctx.stdlog.Printf("running youtube-dl for '%s'", ctx.SourceUri) done := make(chan *YoutubeDLInfo) go func() { defer func() { done <- nil }() if err := cmd.Run(); err != nil { - rhdl.Printf("youtube-dl: %v, stderr: %s", err, strings.TrimSpace(stderr.String())) + ctx.dbglog.Printf("youtube-dl: %v, stderr: %s", err, strings.TrimSpace(stderr.String())) return } info, err := NewYoutubeDLInfoFromJSON(&stdout) if err != nil { - rhdl.Printf("youtube-dl: %v, stderr: %s", err, strings.TrimSpace(stderr.String())) + ctx.dbglog.Printf("youtube-dl: %v, stderr: %s", err, strings.TrimSpace(stderr.String())) return } - rhl.Printf("youtube-dl: extractor: %s -> %s", info.Extractor, info.URL) + ctx.dbglog.Printf("youtube-dl: extractor: %s -> %s", info.Extractor, info.URL) ctx.SourceUri = info.URL done <- info }() @@ -172,11 +173,11 @@ func checkYoutubeDL(ctx *Context, res *Result, uri *url.URL) *YoutubeDLInfo { } func fetchFileCurl(ctx *Context, res *Result, uri *url.URL) (err error) { - rhl.Printf("curl-based fetcher called for '%s'", ctx.SourceUri) + ctx.stdlog.Printf("curl-based fetcher called for '%s'", ctx.SourceUri) info := checkYoutubeDL(ctx, res, uri) if res.ResponseCode == http.StatusNoContent { - rhl.Printf("download of '%s' got canceled", ctx.SourceUri) + ctx.stdlog.Printf("download of '%s' got canceled", ctx.SourceUri) return nil } @@ -198,7 +199,7 @@ func fetchFileCurl(ctx *Context, res *Result, uri *url.URL) (err error) { h = append(h, key+": "+value) } easy.Setopt(curl.OPT_HTTPHEADER, h) - rhdl.Printf("added HTTP header: %q", h) + ctx.dbglog.Printf("added HTTP header: %q", h) } } @@ -249,17 +250,17 @@ func fetchFileCurl(ctx *Context, res *Result, uri *url.URL) (err error) { var convErr error if cbdata.conv != nil { cbdata.conv.Close() - rhl.Printf("waiting for converter to finish...") + ctx.stdlog.Printf("waiting for converter to finish...") convOut, convErr = cbdata.conv.GetResult(ctx, res) } if err != nil || cbdata.writeError != nil || convErr != nil { if cbdata.conv != nil { - rhdl.Printf("Removing stale file: %s", cbdata.filename) + ctx.dbglog.Printf("Removing stale file: %s", cbdata.filename) os.Remove(cbdata.filename) os.Remove(path.Dir(cbdata.filename)) } if res.ResponseCode == http.StatusNoContent { - rhl.Printf("download of '%s' got canceled", ctx.SourceUri) + ctx.stdlog.Printf("download of '%s' got canceled", ctx.SourceUri) return nil } if statusCode > 0 && statusCode != http.StatusOK { @@ -272,13 +273,13 @@ func fetchFileCurl(ctx *Context, res *Result, uri *url.URL) (err error) { } if convErr != nil { err = fmt.Errorf("converter error: %v; converter output: %s", convErr, convOut) - rhl.Printf("%v", err) + ctx.stdlog.Printf("%v", err) } err = fmt.Errorf("curl-fetcher('%s'): %s", ctx.SourceUri, err) - rhl.Println(err) + ctx.stdlog.Println(err) return } - rhdl.Printf("converter: loudness correction = %.2f dB", ctx.LoudnessCorr) + ctx.dbglog.Printf("converter: loudness correction = %.2f dB", ctx.LoudnessCorr) ctx.SourceFile = cbdata.filename if ctx.SourceFilePolicy == Auto { @@ -324,7 +325,7 @@ func generateArchivFilePath(uri *url.URL) (file, path string, t time.Time, err e } func fetchFileArchiv(ctx *Context, res *Result, uri *url.URL) (err error) { - rhdl.Printf("archiv fetcher called for '%s'", ctx.SourceUri) + ctx.dbglog.Printf("archiv fetcher called for '%s'", ctx.SourceUri) var srcfile, srcpath string var start time.Time @@ -374,24 +375,24 @@ func fetchFileArchiv(ctx *Context, res *Result, uri *url.URL) (err error) { cbdata.metadata["ORGANIZATION"] = "Radio Helsinki" cbdata.metadata["DATE"] = start.Format("2.1.2006") - rhdl.Printf("importing archiv file from %s", scpuri) + ctx.dbglog.Printf("importing archiv file from %s", scpuri) err = easy.Perform() var convOut string var convErr error if cbdata.conv != nil { cbdata.conv.Close() - rhl.Printf("waiting for converter to finish...") + ctx.stdlog.Printf("waiting for converter to finish...") convOut, convErr = cbdata.conv.GetResult(ctx, res) } if err != nil || cbdata.writeError != nil || convErr != nil { if cbdata.conv != nil { - rhdl.Printf("Removing stale file: %s", cbdata.filename) + ctx.dbglog.Printf("Removing stale file: %s", cbdata.filename) os.Remove(cbdata.filename) os.Remove(path.Dir(cbdata.filename)) } if res.ResponseCode == http.StatusNoContent { - rhl.Printf("download of '%s' got canceled", ctx.SourceUri) + ctx.stdlog.Printf("download of '%s' got canceled", ctx.SourceUri) return nil } if cbdata.writeError != nil { @@ -399,13 +400,13 @@ func fetchFileArchiv(ctx *Context, res *Result, uri *url.URL) (err error) { } if convErr != nil { err = fmt.Errorf("converter error: %v; converter output: %s", convErr, convOut) - rhl.Printf("%v", err) + ctx.stdlog.Printf("%v", err) } err = fmt.Errorf("archiv-fetcher('%s'): %s", ctx.SourceUri, err) - rhl.Println(err) + ctx.stdlog.Println(err) return } - rhdl.Printf("converter: loudness correction = %.2f dB", ctx.LoudnessCorr) + ctx.dbglog.Printf("converter: loudness correction = %.2f dB", ctx.LoudnessCorr) ctx.SourceFile = cbdata.filename if ctx.SourceFilePolicy == Auto { @@ -434,7 +435,7 @@ func fetchFileDirConvert(ctx *Context, res *Result, origSrc *os.File, sizeTotal ctx.OrigFilename = ctx.SourceFile var conv FetchConverter if conv, ctx.SourceFile, err = NewFetchConverter(ctx, filepath.Join(basepath, origFile), nil); err != nil { - rhl.Printf("Unable to create converter for file %s: %s", origDir+origFile, err) + ctx.stdlog.Printf("Unable to create converter for file %s: %s", origDir+origFile, err) return } @@ -449,12 +450,12 @@ func fetchFileDirConvert(ctx *Context, res *Result, origSrc *os.File, sizeTotal break } if err != nil { - rhl.Printf("Unable to read from source file %s: %s", origDir+origFile, err) + ctx.stdlog.Printf("Unable to read from source file %s: %s", origDir+origFile, err) break } w, err = conv.Write(buffer[0:r]) if err != nil { - rhl.Printf("Unable to write to converter(%s): %s", ctx.SourceFile, err) + ctx.stdlog.Printf("Unable to write to converter(%s): %s", ctx.SourceFile, err) break } written += uint64(w) @@ -469,21 +470,21 @@ func fetchFileDirConvert(ctx *Context, res *Result, origSrc *os.File, sizeTotal conv.Close() if res.ResponseCode == http.StatusNoContent { - rhl.Printf("converting of '%s' got canceled", ctx.SourceUri) + ctx.stdlog.Printf("converting of '%s' got canceled", ctx.SourceUri) return nil } - rhl.Printf("waiting for converter to finish...") + ctx.stdlog.Printf("waiting for converter to finish...") convOut, convErr := conv.GetResult(ctx, res) if convErr != nil { if res.ResponseCode == http.StatusNoContent { - rhl.Printf("converting of '%s' got canceled", ctx.SourceUri) + ctx.stdlog.Printf("converting of '%s' got canceled", ctx.SourceUri) return nil } - rhl.Printf("converter error: %v; converter output: %s", convErr, convOut) + ctx.stdlog.Printf("converter error: %v; converter output: %s", convErr, convOut) return fmt.Errorf("converter error: %v; converter output: %s", convErr, convOut) } - rhdl.Printf("converter: loudness correction = %.2f dB", ctx.LoudnessCorr) + ctx.dbglog.Printf("converter: loudness correction = %.2f dB", ctx.LoudnessCorr) if err != nil { return err } @@ -510,7 +511,7 @@ func fetchFileDirConvert(ctx *Context, res *Result, origSrc *os.File, sizeTotal } func fetchFileDir(ctx *Context, res *Result, uri *url.URL, dir string, convert bool) (err error) { - rhl.Printf("Dir fetcher called for '%s'", ctx.SourceUri) + ctx.stdlog.Printf("Dir fetcher called for '%s'", ctx.SourceUri) ctx.SourceFile = filepath.Join(dir, path.Clean("/"+uri.Path)) var src *os.File @@ -556,7 +557,7 @@ func fetchFileDir(ctx *Context, res *Result, uri *url.URL, dir string, convert b } func fetchFileFake(ctx *Context, res *Result, uri *url.URL) error { - rhdl.Printf("Fake fetcher for '%s'", ctx.SourceUri) + ctx.dbglog.Printf("Fake fetcher for '%s'", ctx.SourceUri) duration, err := strconv.ParseUint(uri.Host, 10, 32) if err != nil { @@ -567,7 +568,7 @@ func fetchFileFake(ctx *Context, res *Result, uri *url.URL) error { for i := uint(0); i < uint(duration); i++ { if ctx.isCanceled() { - rhl.Printf("faking got canceled") + ctx.stdlog.Printf("faking got canceled") res.ResponseCode = http.StatusNoContent res.ErrorString = "canceled" return nil @@ -592,19 +593,19 @@ func writeAttachmentFile(ctx *Context, res *Result, sizeTotal uint64, conv Fetch for { select { case <-ctx.Cancel: - rhl.Printf("receiving attachment '%s' got canceled", ctx.SourceFile) + ctx.stdlog.Printf("receiving attachment '%s' got canceled", ctx.SourceFile) res.ResponseCode = http.StatusNoContent res.ErrorString = "canceled" return nil case chunk, ok := <-ctx.AttachmentChan: if !ok { - rhl.Printf("receiving attachment '%s' failed: channel has been closed prematurely, after %d Bytes", ctx.SourceFile, written) + ctx.stdlog.Printf("receiving attachment '%s' failed: channel has been closed prematurely, after %d Bytes", ctx.SourceFile, written) res.ResponseCode = http.StatusBadRequest res.ErrorString = fmt.Sprintf("file upload stopped prematurely (after %d Bytes)", written) return nil } if chunk.Error != nil { - rhl.Printf("receiving attachment '%s' failed: %v", ctx.SourceFile, chunk.Error) + ctx.stdlog.Printf("receiving attachment '%s' failed: %v", ctx.SourceFile, chunk.Error) res.ResponseCode = http.StatusInternalServerError res.ErrorString = chunk.Error.Error() return nil @@ -612,13 +613,13 @@ func writeAttachmentFile(ctx *Context, res *Result, sizeTotal uint64, conv Fetch left := sizeTotal - written if int(left) < len(chunk.Data) { - rhl.Printf("attachment fetcher: truncating %d byes of extra data", len(chunk.Data)-int(left)) + ctx.stdlog.Printf("attachment fetcher: truncating %d byes of extra data", len(chunk.Data)-int(left)) chunk.Data = chunk.Data[0:left] } w, err := conv.Write(chunk.Data) if err != nil { - rhl.Printf("Unable to write to converter(%s): %s", ctx.SourceFile, err) + ctx.stdlog.Printf("Unable to write to converter(%s): %s", ctx.SourceFile, err) return err } written += uint64(w) @@ -632,7 +633,7 @@ func writeAttachmentFile(ctx *Context, res *Result, sizeTotal uint64, conv Fetch } func fetchFileAttachment(ctx *Context, res *Result, uri *url.URL) error { - rhdl.Printf("Attachment fetcher for '%s'", ctx.SourceUri) + ctx.dbglog.Printf("Attachment fetcher for '%s'", ctx.SourceUri) if ctx.AttachmentChan == nil { return fmt.Errorf("attachement channel is nil") @@ -660,7 +661,7 @@ func fetchFileAttachment(ctx *Context, res *Result, uri *url.URL) error { var conv FetchConverter ctx.OrigFilename = ctx.SourceFile if conv, ctx.SourceFile, err = NewFetchConverter(ctx, ctx.SourceFile, nil); err != nil { - rhl.Printf("Unable to create converter for file %s: %s", ctx.OrigFilename, err) + ctx.stdlog.Printf("Unable to create converter for file %s: %s", ctx.OrigFilename, err) return err } @@ -668,24 +669,24 @@ func fetchFileAttachment(ctx *Context, res *Result, uri *url.URL) error { err = writeAttachmentFile(ctx, res, sizeTotal, conv) conv.Close() if res.ResponseCode == http.StatusNoContent { - rhl.Printf("download of '%s' got canceled", ctx.SourceUri) + ctx.stdlog.Printf("download of '%s' got canceled", ctx.SourceUri) return nil } - rhl.Printf("waiting for converter to finish...") + ctx.stdlog.Printf("waiting for converter to finish...") convOut, convErr := conv.GetResult(ctx, res) if err != nil { return err } if convErr != nil { if res.ResponseCode == http.StatusNoContent { - rhl.Printf("download of '%s' got canceled", ctx.SourceUri) + ctx.stdlog.Printf("download of '%s' got canceled", ctx.SourceUri) return nil } - rhl.Printf("converter error: %v; converter output: %s", convErr, convOut) + ctx.stdlog.Printf("converter error: %v; converter output: %s", convErr, convOut) return fmt.Errorf("converter error: %v; converter output: %s", convErr, convOut) } - rhdl.Printf("converter: loudness correction = %.2f dB", ctx.LoudnessCorr) + ctx.dbglog.Printf("converter: loudness correction = %.2f dB", ctx.LoudnessCorr) if ctx.SourceFilePolicy == Auto { ctx.DeleteSourceFile = true @@ -709,30 +710,30 @@ var ( } ) -func fetcherInit() { +func fetcherInit(stdlog, dbglog *log.Logger) { archiveEnabled := false info := curl.VersionInfo(curl.VERSION_FIRST) protos := info.Protocols for _, proto := range protos { if proto == "sftp" { - rhdl.Printf("curl: * enabling protocol %s", proto) + dbglog.Printf("curl: * enabling protocol %s", proto) fetchers["archiv"] = fetchFileArchiv archiveEnabled = true } else if _, ok := curlProtos[proto]; ok { - rhdl.Printf("curl: * enabling protocol %s", proto) + dbglog.Printf("curl: * enabling protocol %s", proto) fetchers[proto] = fetchFileCurl curlProtos[proto] = true } else { - rhdl.Printf("curl: ignoring protocol %s", proto) + dbglog.Printf("curl: ignoring protocol %s", proto) } } for proto, enabled := range curlProtos { if !enabled { - rhl.Printf("curl: protocol %s is disabled because the installed library version doesn't support it!", proto) + stdlog.Printf("curl: protocol %s is disabled because the installed library version doesn't support it!", proto) } } if !archiveEnabled { - rhl.Printf("archiv: fetcher is disabled because the installed curl library version doesn't support sFTP!") + stdlog.Printf("archiv: fetcher is disabled because the installed curl library version doesn't support sFTP!") } } diff --git a/rhimport/importer.go b/rhimport/importer.go index 9b29e97..4b06724 100644 --- a/rhimport/importer.go +++ b/rhimport/importer.go @@ -46,7 +46,7 @@ func (res *Result) fromRDWebResult(rdres *RDWebResult) { } func addCart(ctx *Context, res *Result) (err error) { - rhdl.Printf("importer: addCart() called for cart: %d", ctx.Cart) + ctx.dbglog.Printf("importer: addCart() called for cart: %d", ctx.Cart) if ctx.GroupName == "" { if err = ctx.getGroupOfCart(); err != nil { @@ -106,7 +106,7 @@ func addCart(ctx *Context, res *Result) (err error) { } func addCut(ctx *Context, res *Result) (err error) { - rhdl.Printf("importer: addCut() called for cart/cut: %d/%d", ctx.Cart, ctx.Cut) + ctx.dbglog.Printf("importer: addCut() called for cart/cut: %d/%d", ctx.Cart, ctx.Cut) var b bytes.Buffer w := multipart.NewWriter(&b) @@ -153,7 +153,7 @@ func addCut(ctx *Context, res *Result) (err error) { } func removeCart(ctx *Context, res *Result) (err error) { - rhdl.Printf("importer: removeCart() called for cart: %d", ctx.Cart) + ctx.dbglog.Printf("importer: removeCart() called for cart: %d", ctx.Cart) var b bytes.Buffer w := multipart.NewWriter(&b) @@ -187,7 +187,7 @@ func removeCart(ctx *Context, res *Result) (err error) { } func removeCut(ctx *Context, res *Result) (err error) { - rhdl.Printf("importer: removeCut() called for cart/cut: %d/%d", ctx.Cart, ctx.Cut) + ctx.dbglog.Printf("importer: removeCut() called for cart/cut: %d/%d", ctx.Cart, ctx.Cut) var b bytes.Buffer w := multipart.NewWriter(&b) @@ -278,7 +278,7 @@ func importAudioCreateRequest(ctx *Context, easy *curl.CURL) (form *curl.Form, e } func importAudio(ctx *Context, res *Result) (err error) { - rhdl.Printf("importer: importAudio() called for cart/cut: %d/%d", ctx.Cart, ctx.Cut) + ctx.dbglog.Printf("importer: importAudio() called for cart/cut: %d/%d", ctx.Cart, ctx.Cut) easy := curl.EasyInit() if easy != nil { @@ -318,7 +318,7 @@ func importAudio(ctx *Context, res *Result) (err error) { if err = easy.Perform(); err != nil { if res.ResponseCode == http.StatusNoContent { - rhl.Printf("import to cart/cat %d/%d got canceled", ctx.Cart, ctx.Cut) + ctx.stdlog.Printf("import to cart/cat %d/%d got canceled", ctx.Cart, ctx.Cut) res.Cart = ctx.Cart res.Cut = ctx.Cut err = nil @@ -404,16 +404,16 @@ func addShowCartCut(ctx *Context, res *Result, carts []uint) (err error) { func cleanupFiles(ctx *Context, res *Result) { if ctx.DeleteSourceFile { - rhdl.Printf("importer: removing file: %s", ctx.SourceFile) + ctx.dbglog.Printf("importer: removing file: %s", ctx.SourceFile) if err := os.Remove(ctx.SourceFile); err != nil { - rhl.Printf("importer: error removing source file: %s", err) + ctx.stdlog.Printf("importer: error removing source file: %s", err) return } if ctx.DeleteSourceDir { dir := path.Dir(ctx.SourceFile) - rhdl.Printf("importer: also removing directory: %s", dir) + ctx.dbglog.Printf("importer: also removing directory: %s", dir) if err := os.Remove(dir); err != nil { - rhl.Printf("importer: error removing source directory: %s", err) + ctx.stdlog.Printf("importer: error removing source directory: %s", err) } } } else { @@ -426,7 +426,7 @@ func ImportFile(ctx *Context) (res *Result, err error) { res = &Result{ResponseCode: http.StatusOK} defer cleanupFiles(ctx, res) - rhl.Printf("importer: ImportFile called with: show-id: %d, pool-name: '%s', cart/cut: %d/%d", ctx.ShowId, ctx.GroupName, ctx.Cart, ctx.Cut) + ctx.stdlog.Printf("importer: ImportFile called with: show-id: %d, pool-name: '%s', cart/cut: %d/%d", ctx.ShowId, ctx.GroupName, ctx.Cart, ctx.Cut) // TODO: on trusted interfaces we should call getPassword again with cached=false after 401's if ctx.Trusted { @@ -480,9 +480,9 @@ func ImportFile(ctx *Context) (res *Result, err error) { if ctx.Cart != 0 && ctx.Cut != 0 { // Import to specific Cut within Cart if err = importAudio(ctx, res); err != nil || res.ResponseCode != http.StatusOK { if err != nil { - rhl.Printf("Fileimport has failed (Cart/Cut %d/%d): %s", ctx.Cart, ctx.Cut, err) + ctx.stdlog.Printf("Fileimport has failed (Cart/Cut %d/%d): %s", ctx.Cart, ctx.Cut, err) } else { - rhl.Printf("Fileimport has failed (Cart/Cut %d/%d): %s", res.Cart, res.Cut, res.ErrorString) + ctx.stdlog.Printf("Fileimport has failed (Cart/Cut %d/%d): %s", res.Cart, res.Cut, res.ErrorString) } // Try to clean up after failed import if rmCartOnErr { @@ -496,9 +496,9 @@ func ImportFile(ctx *Context) (res *Result, err error) { } } else { if err := ctx.updateCutCartTitle(); err != nil { - rhl.Printf("Warning: error while updating Cart/Cut Title: %v", err) + ctx.stdlog.Printf("Warning: error while updating Cart/Cut Title: %v", err) } - rhl.Printf("File got succesfully imported into Cart/Cut %d/%d", res.Cart, res.Cut) + ctx.stdlog.Printf("File got succesfully imported into Cart/Cut %d/%d", res.Cart, res.Cut) } } else { res.ResponseCode = http.StatusBadRequest diff --git a/rhimport/normalizer.go b/rhimport/normalizer.go index 840dde2..e883d97 100644 --- a/rhimport/normalizer.go +++ b/rhimport/normalizer.go @@ -50,7 +50,7 @@ func runNormalizer(ctx *Context, res *Result, src *os.File, size int64) (err err basepath, filename := filepath.Split(src.Name()) ext := filepath.Ext(filename) destName := strings.TrimSuffix(filename, ext) + "_normalized.flac" - rhl.Printf("NormalizeFile: '%s' -> '%s', using gain = %.2f dB", filename, destName, ctx.LoudnessCorr) + ctx.stdlog.Printf("NormalizeFile: '%s' -> '%s', using gain = %.2f dB", filename, destName, ctx.LoudnessCorr) ctx.SourceFile = basepath + destName @@ -99,7 +99,7 @@ func NormalizeFile(ctx *Context) (res *Result, err error) { res = &Result{ResponseCode: http.StatusOK} if ctx.LoudnessCorr == 0.0 { - rhl.Println("NormalizeFile: skipping normalization since the gain = 0.0dB") + ctx.stdlog.Println("NormalizeFile: skipping normalization since the gain = 0.0dB") ctx.reportProgress(2, "normalizing", 1.0, 1.0) return } @@ -118,7 +118,7 @@ func NormalizeFile(ctx *Context) (res *Result, err error) { } if err = runNormalizer(ctx, res, src, size); err != nil { - rhl.Println("NormalizeFile error:", err) + ctx.stdlog.Println("NormalizeFile error:", err) if ctx.DeleteSourceFile { os.Remove(ctx.SourceFile) if ctx.DeleteSourceDir { diff --git a/rhimport/session.go b/rhimport/session.go index 1c3439d..a119285 100644 --- a/rhimport/session.go +++ b/rhimport/session.go @@ -155,7 +155,7 @@ func (self *Session) run(timeout time.Duration) { go sessionRun(self.ctx, self.doneIntChan) self.state = SESSION_RUNNING if timeout > 3*time.Hour { - rhl.Printf("requested session timeout (%v) is to high - lowering to 3h", timeout) + self.ctx.stdlog.Printf("requested session timeout (%v) is to high - lowering to 3h", timeout) timeout = 3 * time.Hour } self.timer.Reset(timeout) @@ -163,7 +163,7 @@ func (self *Session) run(timeout time.Duration) { } func (self *Session) cancel() { - rhdl.Println("Session: canceling running import") + self.ctx.dbglog.Println("Session: canceling running import") select { case self.cancelIntChan <- true: default: // session got canceled already?? @@ -380,7 +380,7 @@ func (self *Session) getInterface() *SessionChan { func (self *Session) cleanup() { self.quit <- true - rhdl.Printf("Session: waiting for session to close") + self.ctx.dbglog.Printf("Session: waiting for session to close") <-self.done close(self.quit) close(self.done) @@ -395,7 +395,7 @@ func (self *Session) cleanup() { // close(self.addProgressChan) // close(self.addDoneChan) // close(self.attachUploader) - rhdl.Printf("Session: cleanup is now done") + self.ctx.dbglog.Printf("Session: cleanup is now done") } func newSession(ctx *Context, removeFunc func()) (session *Session) { diff --git a/rhimport/session_store.go b/rhimport/session_store.go index d6dce9f..dafe71e 100644 --- a/rhimport/session_store.go +++ b/rhimport/session_store.go @@ -28,7 +28,9 @@ import ( "crypto/rand" "encoding/base64" "fmt" + "log" "net/http" + "strings" "time" "code.helsinki.at/rhrd-go/rddb" @@ -132,6 +134,8 @@ type SessionStore struct { store map[string]*SessionStoreUserElement conf *Config db *rddb.DBChan + stdlog *log.Logger + dbglog *log.Logger quit chan bool done chan bool newChan chan newSessionRequest @@ -177,10 +181,16 @@ func (store *SessionStore) new(ctx *Context, refId string) (resp newSessionRespo } ctx.conf = store.conf ctx.db = store.db + if pref := ctx.stdlog.Prefix(); strings.Contains(pref, "%s") { + ctx.stdlog.SetPrefix(fmt.Sprintf(pref, resp.id)) + } + if pref := ctx.dbglog.Prefix(); strings.Contains(pref, "%s") { + ctx.dbglog.SetPrefix(fmt.Sprintf(pref, resp.id)) + } s := &SessionStoreSessionElement{newSession(ctx, func() { store.GetInterface().Remove(ctx.UserName, resp.id) }), refId} store.store[ctx.UserName].sessions[resp.id] = s resp.session = store.store[ctx.UserName].sessions[resp.id].s.getInterface() - rhdl.Printf("SessionStore: created session for '%s' -> %s", ctx.UserName, resp.id) + store.dbglog.Printf("SessionStore: created session for '%s' -> %s", ctx.UserName, resp.id) store.store[ctx.UserName].callUpdateHandlerAdd(resp.id, refId) return } @@ -252,12 +262,12 @@ func (store *SessionStore) remove(username, id string) (resp removeSessionRespon go session.s.cleanup() // cleanup could take a while -> don't block all the other stuff refId := session.refId delete(user.sessions, id) - rhdl.Printf("SessionStore: removed session '%s/%s'", username, id) + store.dbglog.Printf("SessionStore: removed session '%s/%s'", username, id) user.callUpdateHandlerRemove(id, refId) if len(user.sessions) == 0 && len(user.updateCBs) == 0 { delete(store.store, username) - rhdl.Printf("SessionStore: removed user '%s'", username) + store.dbglog.Printf("SessionStore: removed user '%s'", username) } } else { resp.responsecode = http.StatusNotFound @@ -271,7 +281,7 @@ func (store *SessionStore) maintenanceTask() { user.callUpdateHandler(nil, nil) if len(user.sessions) == 0 && len(user.updateCBs) == 0 { delete(store.store, name) - rhdl.Printf("SessionStore: removed user '%s'", name) + store.dbglog.Printf("SessionStore: removed user '%s'", name) } } } @@ -379,10 +389,12 @@ func (store *SessionStore) Cleanup() { close(store.removeChan) } -func NewSessionStore(conf *Config, db *rddb.DBChan) (store *SessionStore, err error) { +func NewSessionStore(conf *Config, db *rddb.DBChan, stdlog, dbglog *log.Logger) (store *SessionStore, err error) { store = new(SessionStore) store.conf = conf store.db = db + store.stdlog = stdlog + store.dbglog = dbglog store.quit = make(chan bool, 1) store.done = make(chan bool) store.store = make(map[string]*SessionStoreUserElement) -- cgit v0.10.2