diff options
author | Christian Pointner <equinox@helsinki.at> | 2016-07-28 20:23:33 (GMT) |
---|---|---|
committer | Christian Pointner <equinox@helsinki.at> | 2016-07-28 20:23:33 (GMT) |
commit | c7f6ff81175a78c4fb497a5ac98c3806457f762f (patch) | |
tree | c22084fa654295a6ce1e01a7735cbe0459aafefb /rhimport | |
parent | 12c91ff577dd956f357c17c6026439a54dd1f9ac (diff) |
major name refactoring (don't export all the internal stuff)
Diffstat (limited to 'rhimport')
-rw-r--r-- | rhimport/bs1770_responses.go | 46 | ||||
-rw-r--r-- | rhimport/conf.go | 2 | ||||
-rw-r--r-- | rhimport/converter.go | 104 | ||||
-rw-r--r-- | rhimport/core.go | 12 | ||||
-rw-r--r-- | rhimport/fetcher.go | 40 | ||||
-rw-r--r-- | rhimport/importer.go | 30 | ||||
-rw-r--r-- | rhimport/rdxport_responses.go | 26 | ||||
-rw-r--r-- | rhimport/session.go | 272 | ||||
-rw-r--r-- | rhimport/session_store.go | 66 | ||||
-rw-r--r-- | rhimport/wave_generator.go | 62 | ||||
-rw-r--r-- | rhimport/youtubedl_responses.go | 6 |
11 files changed, 333 insertions, 333 deletions
diff --git a/rhimport/bs1770_responses.go b/rhimport/bs1770_responses.go index b254b9c..b0a66fe 100644 --- a/rhimport/bs1770_responses.go +++ b/rhimport/bs1770_responses.go @@ -30,57 +30,57 @@ import ( "io" ) -type BS1770Result struct { - Album BS1770ResultAlbum `xml:"album"` +type bs1770Result struct { + Album bs1770ResultAlbum `xml:"album"` } -type BS1770ResultAlbum struct { - Tracks []BS1770ResultTrack `xml:"track"` - Summary BS1770ResultSummary `xml:"summary"` +type bs1770ResultAlbum struct { + Tracks []bs1770ResultTrack `xml:"track"` + Summary bs1770ResultSummary `xml:"summary"` } -type BS1770ResultTrack struct { +type bs1770ResultTrack struct { Total uint `xml:"total,attr"` Number uint `xml:"number,attr"` File string `xml:"file,attr"` - Integrated BS1770ResultValueLUFS `xml:"integrated"` - Momentary BS1770ResultValueLUFS `xml:"momentary"` - ShorttermMaximum BS1770ResultValueLUFS `xml:"shortterm-maximum"` - SamplePeak BS1770ResultValueSPFS `xml:"sample-peak"` - TruePeak BS1770ResultValueTPFS `xml:"true-peak"` + Integrated bs1770ResultValueLUFS `xml:"integrated"` + Momentary bs1770ResultValueLUFS `xml:"momentary"` + ShorttermMaximum bs1770ResultValueLUFS `xml:"shortterm-maximum"` + SamplePeak bs1770ResultValueSPFS `xml:"sample-peak"` + TruePeak bs1770ResultValueTPFS `xml:"true-peak"` } -type BS1770ResultSummary struct { +type bs1770ResultSummary struct { Total uint `xml:"total,attr"` - Integrated BS1770ResultValueLUFS `xml:"integrated"` - Momentary BS1770ResultValueLUFS `xml:"momentary"` - ShorttermMaximum BS1770ResultValueLUFS `xml:"shortterm-maximum"` - SamplePeak BS1770ResultValueSPFS `xml:"sample-peak"` - TruePeak BS1770ResultValueTPFS `xml:"true-peak"` + Integrated bs1770ResultValueLUFS `xml:"integrated"` + Momentary bs1770ResultValueLUFS `xml:"momentary"` + ShorttermMaximum bs1770ResultValueLUFS `xml:"shortterm-maximum"` + SamplePeak bs1770ResultValueSPFS `xml:"sample-peak"` + TruePeak bs1770ResultValueTPFS `xml:"true-peak"` } -type BS1770ResultValueLUFS struct { +type bs1770ResultValueLUFS struct { LUFS float64 `xml:"lufs,attr"` LU float64 `xml:"lu,attr"` } -type BS1770ResultValueRange struct { +type bs1770ResultValueRange struct { LUFS float64 `xml:"lufs,attr"` } -type BS1770ResultValueSPFS struct { +type bs1770ResultValueSPFS struct { SPFS float64 `xml:"spfs,attr"` Factor float64 `xml:"factor,attr"` } -type BS1770ResultValueTPFS struct { +type bs1770ResultValueTPFS struct { TPFS float64 `xml:"tpfs,attr"` Factor float64 `xml:"factor,attr"` } -func NewBS1770ResultFromXML(data io.Reader) (res *BS1770Result, err error) { +func newBS1770ResultFromXML(data io.Reader) (res *bs1770Result, err error) { decoder := xml.NewDecoder(data) - res = &BS1770Result{} + res = &bs1770Result{} if xmlerr := decoder.Decode(res); xmlerr != nil { err = fmt.Errorf("Error parsing XML response: %s", xmlerr) return diff --git a/rhimport/conf.go b/rhimport/conf.go index 502882d..f141a78 100644 --- a/rhimport/conf.go +++ b/rhimport/conf.go @@ -78,7 +78,7 @@ func (c *Config) readConfigFile() error { } func NewConfig(configfile, rdxportEndpoint, tempDir, localFetchDir string) (conf *Config, err error) { - conf = new(Config) + conf = &Config{} conf.ConfigFile = configfile conf.RDXportEndpoint = rdxportEndpoint conf.TempDir = tempDir diff --git a/rhimport/converter.go b/rhimport/converter.go index e53da9c..d9ac263 100644 --- a/rhimport/converter.go +++ b/rhimport/converter.go @@ -37,31 +37,31 @@ import ( "strings" ) -type FetchConverter interface { +type fetchConverter interface { io.WriteCloser GetResult(ctx *Context, res *Result) (result string, err error) } -type FetchConverterResult struct { +type fetchConverterResult struct { output string err error loudnessCorr float64 } -func NewFetchConverter(ctx *Context, filename string, metadata map[string]string) (FetchConverter, string, error) { +func newFetchConverter(ctx *Context, filename string, metadata map[string]string) (fetchConverter, string, error) { switch ctx.FetchConverter { case "null": // no support for loudness evaluation - leave normalization to Rivendell - return NewNullFetchConverter(ctx, filename, metadata) + return newNullFetchConverter(ctx, filename, metadata) case "ffmpeg": // no support for loudness evaluation - leave normalization to Rivendell - return NewFFMpegFetchConverter(ctx, filename, metadata) + return newFFMpegFetchConverter(ctx, filename, metadata) case "bs1770": ctx.NormalizationLevel = 0 // disable Rivendell normalization - return NewBS1770FetchConverter(ctx, filename, metadata) + return newBS1770FetchConverter(ctx, filename, metadata) case "ffmpeg-bs1770": ctx.NormalizationLevel = 0 // disable Rivendell normalization - return NewFFMpegBS1770FetchConverter(ctx, filename, metadata) + return newFFMpegBS1770FetchConverter(ctx, filename, metadata) } return nil, "", errors.New("unknown fetch converter type: " + ctx.FetchConverter) } @@ -70,27 +70,27 @@ func NewFetchConverter(ctx *Context, filename string, metadata map[string]string // NUll Converter aka File Writer // -type NullFetchConverter struct { +type nullFetchConverter struct { file *os.File } -func NewNullFetchConverter(ctx *Context, filename string, metadata map[string]string) (n *NullFetchConverter, newFilename string, err error) { - n = &NullFetchConverter{} +func newNullFetchConverter(ctx *Context, filename string, metadata map[string]string) (n *nullFetchConverter, newFilename string, err error) { + n = &nullFetchConverter{} newFilename = filepath.Dir(filename) + "/conv-null" + filepath.Ext(filename) ctx.stdlog.Printf("null-converter: opening file '%s' -> '%s'", filename, newFilename) n.file, err = os.OpenFile(newFilename, os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0600) return } -func (c *NullFetchConverter) Write(p []byte) (n int, err error) { +func (c *nullFetchConverter) Write(p []byte) (n int, err error) { return c.file.Write(p) } -func (c *NullFetchConverter) Close() (err error) { +func (c *nullFetchConverter) Close() (err error) { return c.file.Close() } -func (c *NullFetchConverter) GetResult(ctx *Context, res *Result) (result string, err error) { +func (c *nullFetchConverter) GetResult(ctx *Context, res *Result) (result string, err error) { return "", nil } @@ -98,14 +98,14 @@ func (c *NullFetchConverter) GetResult(ctx *Context, res *Result) (result string // FFMpeg Converter: converts all files into flac // -type FFMpegFetchConverter struct { +type ffmpegFetchConverter struct { cmd *exec.Cmd pipe io.WriteCloser - result chan FetchConverterResult + result chan fetchConverterResult } -func NewFFMpegFetchConverter(ctx *Context, filename string, metadata map[string]string) (ff *FFMpegFetchConverter, filenameFlac string, err error) { - ff = &FFMpegFetchConverter{} +func newFFMpegFetchConverter(ctx *Context, filename string, metadata map[string]string) (ff *ffmpegFetchConverter, filenameFlac string, err error) { + ff = &ffmpegFetchConverter{} filenameFlac = filepath.Dir(filename) + "/conv-ffmpeg.flac" ctx.stdlog.Printf("ffmpeg-converter: starting ffmpeg for file '%s' -> '%s'", filename, filenameFlac) ff.cmd = exec.Command("ffmpeg", "-loglevel", "warning", "-i", "-", "-map_metadata", "0") @@ -119,23 +119,23 @@ func NewFFMpegFetchConverter(ctx *Context, filename string, metadata map[string] return nil, "", err } - ff.result = make(chan FetchConverterResult, 1) + ff.result = make(chan fetchConverterResult, 1) go func() { output, err := ff.cmd.CombinedOutput() - ff.result <- FetchConverterResult{strings.TrimSpace(string(output)), err, 0.0} + ff.result <- fetchConverterResult{strings.TrimSpace(string(output)), err, 0.0} }() return } -func (ff *FFMpegFetchConverter) Write(p []byte) (n int, err error) { +func (ff *ffmpegFetchConverter) Write(p []byte) (n int, err error) { return ff.pipe.Write(p) } -func (ff *FFMpegFetchConverter) Close() (err error) { +func (ff *ffmpegFetchConverter) Close() (err error) { return ff.pipe.Close() } -func (ff *FFMpegFetchConverter) GetResult(ctx *Context, res *Result) (result string, err error) { +func (ff *ffmpegFetchConverter) GetResult(ctx *Context, res *Result) (result string, err error) { if ff.result != nil { select { case r := <-ff.result: @@ -155,16 +155,16 @@ func (ff *FFMpegFetchConverter) GetResult(ctx *Context, res *Result) (result str // BS1770 Converter: calculates loudness correction value using ITU BS1770 (EBU R128) // -type BS1770FetchConverter struct { +type bs1770FetchConverter struct { cmd *exec.Cmd file *os.File pipe io.WriteCloser multi io.Writer - result chan FetchConverterResult + result chan fetchConverterResult } -func NewBS1770FetchConverter(ctx *Context, filename string, metadata map[string]string) (bs *BS1770FetchConverter, newFilename string, err error) { - bs = &BS1770FetchConverter{} +func newBS1770FetchConverter(ctx *Context, filename string, metadata map[string]string) (bs *bs1770FetchConverter, newFilename string, err error) { + bs = &bs1770FetchConverter{} newFilename = filepath.Dir(filename) + "/conv-bs1770" + filepath.Ext(filename) ctx.stdlog.Printf("bs1770-converter: starting bs1770gain for file '%s' -> '%s'", filename, newFilename) bs.file, err = os.OpenFile(newFilename, os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0600) @@ -179,31 +179,31 @@ func NewBS1770FetchConverter(ctx *Context, filename string, metadata map[string] bs.cmd.Stdout = &bsStdout bs.cmd.Stderr = &bsStderr - bs.result = make(chan FetchConverterResult, 1) + bs.result = make(chan fetchConverterResult, 1) go func() { if err := bs.cmd.Run(); err != nil { - bs.result <- FetchConverterResult{strings.TrimSpace(string(bsStderr.String())), err, 0.0} + bs.result <- fetchConverterResult{strings.TrimSpace(string(bsStderr.String())), err, 0.0} } - res, err := NewBS1770ResultFromXML(&bsStdout) + res, err := newBS1770ResultFromXML(&bsStdout) if err != nil { - bs.result <- FetchConverterResult{bsStdout.String(), err, 0.0} + bs.result <- fetchConverterResult{bsStdout.String(), err, 0.0} return } if len(res.Album.Tracks) == 0 { - bs.result <- FetchConverterResult{bsStdout.String(), fmt.Errorf("bs1770gain returned no/invalid result"), 0.0} + bs.result <- fetchConverterResult{bsStdout.String(), fmt.Errorf("bs1770gain returned no/invalid result"), 0.0} return } - bs.result <- FetchConverterResult{"", nil, res.Album.Tracks[0].Integrated.LU} + bs.result <- fetchConverterResult{"", nil, res.Album.Tracks[0].Integrated.LU} }() return } -func (bs *BS1770FetchConverter) Write(p []byte) (n int, err error) { +func (bs *bs1770FetchConverter) Write(p []byte) (n int, err error) { return bs.multi.Write(p) } -func (bs *BS1770FetchConverter) Close() (err error) { +func (bs *bs1770FetchConverter) Close() (err error) { errPipe := bs.pipe.Close() errFile := bs.file.Close() if errFile != nil { @@ -212,7 +212,7 @@ func (bs *BS1770FetchConverter) Close() (err error) { return errPipe } -func (bs *BS1770FetchConverter) GetResult(ctx *Context, res *Result) (result string, err error) { +func (bs *bs1770FetchConverter) GetResult(ctx *Context, res *Result) (result string, err error) { if bs.result != nil { select { case r := <-bs.result: @@ -233,16 +233,16 @@ func (bs *BS1770FetchConverter) GetResult(ctx *Context, res *Result) (result str // using ITU BS1770 (EBU R128) // -type FFMpegBS1770FetchConverter struct { +type ffmpegBS1770FetchConverter struct { ffmpeg *exec.Cmd bs1770 *exec.Cmd pipe io.WriteCloser - resultFF chan FetchConverterResult - resultBS chan FetchConverterResult + resultFF chan fetchConverterResult + resultBS chan fetchConverterResult } -func NewFFMpegBS1770FetchConverter(ctx *Context, filename string, metadata map[string]string) (ff *FFMpegBS1770FetchConverter, filenameFlac string, err error) { - ff = &FFMpegBS1770FetchConverter{} +func newFFMpegBS1770FetchConverter(ctx *Context, filename string, metadata map[string]string) (ff *ffmpegBS1770FetchConverter, filenameFlac string, err error) { + ff = &ffmpegBS1770FetchConverter{} filenameFlac = filepath.Dir(filename) + "/conv-ffmpeg-bs1770.flac" ctx.stdlog.Printf("ffmpeg-bs1770-converter: starting ffmpeg and bs1770gain for file '%s' -> '%s'", filename, filenameFlac) ff.ffmpeg = exec.Command("ffmpeg", "-loglevel", "warning", "-i", "pipe:0", "-map_metadata", "0") @@ -270,46 +270,46 @@ func NewFFMpegBS1770FetchConverter(ctx *Context, filename string, metadata map[s ff.bs1770.Stdout = &bsStdout ff.bs1770.Stderr = &bsStderr - ff.resultFF = make(chan FetchConverterResult, 1) - ff.resultBS = make(chan FetchConverterResult, 1) + ff.resultFF = make(chan fetchConverterResult, 1) + ff.resultBS = make(chan fetchConverterResult, 1) go func() { err := ff.ffmpeg.Run() ffstdout.Close() - ff.resultFF <- FetchConverterResult{strings.TrimSpace(string(ffStderr.String())), err, 0.0} + ff.resultFF <- fetchConverterResult{strings.TrimSpace(string(ffStderr.String())), err, 0.0} }() go func() { if err := ff.bs1770.Run(); err != nil { - ff.resultBS <- FetchConverterResult{strings.TrimSpace(string(bsStderr.String())), err, 0.0} + ff.resultBS <- fetchConverterResult{strings.TrimSpace(string(bsStderr.String())), err, 0.0} } - res, err := NewBS1770ResultFromXML(&bsStdout) + res, err := newBS1770ResultFromXML(&bsStdout) if err != nil { - ff.resultBS <- FetchConverterResult{bsStdout.String(), err, 0.0} + ff.resultBS <- fetchConverterResult{bsStdout.String(), err, 0.0} return } if len(res.Album.Tracks) == 0 { - ff.resultBS <- FetchConverterResult{bsStdout.String(), fmt.Errorf("bs1770gain returned no/invalid result"), 0.0} + ff.resultBS <- fetchConverterResult{bsStdout.String(), fmt.Errorf("bs1770gain returned no/invalid result"), 0.0} return } - ff.resultBS <- FetchConverterResult{"", nil, res.Album.Tracks[0].Integrated.LU} + ff.resultBS <- fetchConverterResult{"", nil, res.Album.Tracks[0].Integrated.LU} }() return } -func (ff *FFMpegBS1770FetchConverter) Write(p []byte) (n int, err error) { +func (ff *ffmpegBS1770FetchConverter) Write(p []byte) (n int, err error) { return ff.pipe.Write(p) } -func (ff *FFMpegBS1770FetchConverter) Close() (err error) { +func (ff *ffmpegBS1770FetchConverter) Close() (err error) { return ff.pipe.Close() } -func (ff *FFMpegBS1770FetchConverter) GetResult(ctx *Context, res *Result) (result string, err error) { +func (ff *ffmpegBS1770FetchConverter) GetResult(ctx *Context, res *Result) (result string, err error) { if ff.resultFF == nil || ff.resultBS == nil { return "", nil } - var rff, rbs *FetchConverterResult + var rff, rbs *fetchConverterResult for { select { case r := <-ff.resultFF: diff --git a/rhimport/core.go b/rhimport/core.go index a0f1060..f01ed77 100644 --- a/rhimport/core.go +++ b/rhimport/core.go @@ -63,6 +63,16 @@ func Init(stdlog, dbglog *log.Logger) { type ProgressCB func(step int, stepName string, current, total float64, title string, cart, cut uint, userdata interface{}) bool type DoneCB func(result Result, userdata interface{}) bool +type ProgressData struct { + Step int + StepName string + Current float64 + Total float64 + Title string + Cart uint + Cut uint +} + type Result struct { ResponseCode int ErrorString string @@ -167,7 +177,7 @@ func NewContext(conf *Config, db *rddb.DBChan, stdlog, dbglog *log.Logger) *Cont dbglog = log.New(ioutil.Discard, "", 0) } - ctx := new(Context) + ctx := &Context{} ctx.conf = conf ctx.db = db ctx.stdlog = stdlog diff --git a/rhimport/fetcher.go b/rhimport/fetcher.go index e3280c4..f8bfa0a 100644 --- a/rhimport/fetcher.go +++ b/rhimport/fetcher.go @@ -45,14 +45,14 @@ import ( "github.com/andelf/go-curl" ) -type FetcherCurlCBData struct { +type fetcherCurlCBData struct { ctx *Context res *Result basepath string filename string remotename string metadata map[string]string - conv FetchConverter + conv fetchConverter totalSize float64 written uint64 writeError error @@ -60,7 +60,7 @@ type FetcherCurlCBData struct { func curlHeaderCallback(ptr []byte, userdata interface{}) bool { hdr := fmt.Sprintf("%s", ptr) - data := userdata.(*FetcherCurlCBData) + data := userdata.(*fetcherCurlCBData) if strings.HasPrefix(hdr, "Content-Disposition:") { if mediatype, params, err := mime.ParseMediaType(strings.TrimPrefix(hdr, "Content-Disposition:")); err == nil { @@ -73,7 +73,7 @@ func curlHeaderCallback(ptr []byte, userdata interface{}) bool { } func curlWriteCallback(ptr []byte, userdata interface{}) bool { - data := userdata.(*FetcherCurlCBData) + data := userdata.(*fetcherCurlCBData) if data.conv == nil { if data.filename == "" { name := path.Clean("/" + data.remotename) @@ -84,7 +84,7 @@ func curlWriteCallback(ptr []byte, userdata interface{}) bool { data.filename = filepath.Join(data.basepath, name) } data.ctx.OrigFilename = data.filename - conv, newFilename, err := NewFetchConverter(data.ctx, data.filename, data.metadata) + conv, newFilename, err := newFetchConverter(data.ctx, data.filename, data.metadata) if err != nil { data.ctx.stdlog.Printf("Unable to create converter for file %s: %s", data.filename, err) data.writeError = err @@ -110,7 +110,7 @@ func curlWriteCallback(ptr []byte, userdata interface{}) bool { } func curlProgressCallback(dltotal, dlnow, ultotal, ulnow float64, userdata interface{}) bool { - data := userdata.(*FetcherCurlCBData) + data := userdata.(*fetcherCurlCBData) if data.writeError != nil { return false @@ -132,7 +132,7 @@ func curlProgressCallback(dltotal, dlnow, ultotal, ulnow float64, userdata inter return true } -func checkYoutubeDL(ctx *Context, res *Result, uri *url.URL) *YoutubeDLInfo { +func checkYoutubeDL(ctx *Context, res *Result, uri *url.URL) *youtubeDLInfo { cmd := exec.Command("youtube-dl", "--no-playlist", "-f", "bestaudio/best", "--prefer-free-formats", "-J", ctx.SourceUri) var stderr, stdout bytes.Buffer cmd.Stdout = &stdout @@ -142,7 +142,7 @@ func checkYoutubeDL(ctx *Context, res *Result, uri *url.URL) *YoutubeDLInfo { } ctx.stdlog.Printf("running youtube-dl for '%s'", ctx.SourceUri) - done := make(chan *YoutubeDLInfo) + done := make(chan *youtubeDLInfo) go func() { defer func() { done <- nil @@ -151,7 +151,7 @@ func checkYoutubeDL(ctx *Context, res *Result, uri *url.URL) *YoutubeDLInfo { ctx.dbglog.Printf("youtube-dl: %v, stderr: %s", err, strings.TrimSpace(stderr.String())) return } - info, err := NewYoutubeDLInfoFromJSON(&stdout) + info, err := newYoutubeDLInfoFromJSON(&stdout) if err != nil { ctx.dbglog.Printf("youtube-dl: %v, stderr: %s", err, strings.TrimSpace(stderr.String())) return @@ -203,7 +203,7 @@ func fetchFileCurl(ctx *Context, res *Result, uri *url.URL) (err error) { } } - cbdata := &FetcherCurlCBData{ctx: ctx, res: res, remotename: path.Base(uri.Path)} + cbdata := &fetcherCurlCBData{ctx: ctx, res: res, remotename: path.Base(uri.Path)} if cbdata.basepath, err = ioutil.TempDir(ctx.conf.TempDir, "rhimportd-"); err != nil { return } @@ -356,7 +356,7 @@ func fetchFileArchiv(ctx *Context, res *Result, uri *url.URL) (err error) { return } - cbdata := &FetcherCurlCBData{ctx: ctx, res: res} + cbdata := &fetcherCurlCBData{ctx: ctx, res: res} cbdata.filename = fmt.Sprintf("%s/%s", destpath, srcfile) easy.Setopt(curl.OPT_WRITEFUNCTION, curlWriteCallback) @@ -433,8 +433,8 @@ func fetchFileDirConvert(ctx *Context, res *Result, origSrc *os.File, sizeTotal origDir, origFile := path.Split(ctx.SourceFile) ctx.OrigFilename = ctx.SourceFile - var conv FetchConverter - if conv, ctx.SourceFile, err = NewFetchConverter(ctx, filepath.Join(basepath, origFile), nil); err != nil { + var conv fetchConverter + if conv, ctx.SourceFile, err = newFetchConverter(ctx, filepath.Join(basepath, origFile), nil); err != nil { ctx.stdlog.Printf("Unable to create converter for file %s: %s", origDir+origFile, err) return } @@ -567,12 +567,12 @@ func fetchFileSilence(ctx *Context, res *Result, uri *url.URL) error { } duration := time.Duration(d) * 100 * time.Millisecond - wav, err := NewPCMWavFile(uint32(ctx.conf.SampleRate), 16, uint16(ctx.Channels), duration) + wav, err := newPCMWavFile(uint32(ctx.conf.SampleRate), 16, uint16(ctx.Channels), duration) if err != nil { return err } fileSize := wav.GetFileSize() - wav.Generator = NewSilenceGenerator() + wav.generator = NewSilenceGenerator() uri.Scheme = "attachment" uri.Host = strconv.FormatUint(uint64(fileSize), 10) @@ -606,7 +606,7 @@ func fetchFileSilence(ctx *Context, res *Result, uri *url.URL) error { return fetchFileAttachment(ctx, res, uri) } -func writeAttachmentFile(ctx *Context, res *Result, sizeTotal uint64, conv FetchConverter) error { +func writeAttachmentFile(ctx *Context, res *Result, sizeTotal uint64, conv fetchConverter) error { written := uint64(0) for { select { @@ -672,9 +672,9 @@ func fetchFileAttachment(ctx *Context, res *Result, uri *url.URL) error { ctx.SourceFile = filepath.Join(basepath, path.Clean("/"+uri.Path)) - var conv FetchConverter + var conv fetchConverter ctx.OrigFilename = ctx.SourceFile - if conv, ctx.SourceFile, err = NewFetchConverter(ctx, ctx.SourceFile, nil); err != nil { + if conv, ctx.SourceFile, err = newFetchConverter(ctx, ctx.SourceFile, nil); err != nil { ctx.stdlog.Printf("Unable to create converter for file %s: %s", ctx.OrigFilename, err) return err } @@ -709,10 +709,10 @@ func fetchFileAttachment(ctx *Context, res *Result, uri *url.URL) error { return nil } -type FetchFunc func(*Context, *Result, *url.URL) (err error) +type fetchFunc func(*Context, *Result, *url.URL) (err error) var ( - fetchers = map[string]FetchFunc{ + fetchers = map[string]fetchFunc{ "local": fetchFileLocal, "tmp": fetchFileTmp, "silence": fetchFileSilence, diff --git a/rhimport/importer.go b/rhimport/importer.go index 4b06724..57c525d 100644 --- a/rhimport/importer.go +++ b/rhimport/importer.go @@ -37,7 +37,7 @@ import ( "github.com/andelf/go-curl" ) -func (res *Result) fromRDWebResult(rdres *RDWebResult) { +func (res *Result) fromRDWebResult(rdres *rdWebResult) { res.ResponseCode = rdres.ResponseCode res.ErrorString = rdres.ErrorString if rdres.AudioConvertError != 0 { @@ -86,16 +86,16 @@ func addCart(ctx *Context, res *Result) (err error) { defer resp.Body.Close() if resp.StatusCode != http.StatusOK { - var rdres *RDWebResult - if rdres, err = NewRDWebResultFromXML(resp.Body); err != nil { + var rdres *rdWebResult + if rdres, err = newRDWebResultFromXML(resp.Body); err != nil { return } res.fromRDWebResult(rdres) res.Cart = ctx.Cart return } - var cartadd *RDCartAdd - if cartadd, err = NewRDCartAddFromXML(resp.Body); err != nil { + var cartadd *rdCartAdd + if cartadd, err = newRDCartAddFromXML(resp.Body); err != nil { return } res.ResponseCode = resp.StatusCode @@ -131,8 +131,8 @@ func addCut(ctx *Context, res *Result) (err error) { defer resp.Body.Close() if resp.StatusCode != http.StatusOK { - var rdres *RDWebResult - if rdres, err = NewRDWebResultFromXML(resp.Body); err != nil { + var rdres *rdWebResult + if rdres, err = newRDWebResultFromXML(resp.Body); err != nil { return } res.fromRDWebResult(rdres) @@ -140,8 +140,8 @@ func addCut(ctx *Context, res *Result) (err error) { res.Cut = ctx.Cut return } - var cutadd *RDCutAdd - if cutadd, err = NewRDCutAddFromXML(resp.Body); err != nil { + var cutadd *rdCutAdd + if cutadd, err = newRDCutAddFromXML(resp.Body); err != nil { return } res.ResponseCode = resp.StatusCode @@ -177,8 +177,8 @@ func removeCart(ctx *Context, res *Result) (err error) { } defer resp.Body.Close() - var rdres *RDWebResult - if rdres, err = NewRDWebResultFromXML(resp.Body); err != nil { + var rdres *rdWebResult + if rdres, err = newRDWebResultFromXML(resp.Body); err != nil { return } res.fromRDWebResult(rdres) @@ -214,8 +214,8 @@ func removeCut(ctx *Context, res *Result) (err error) { } defer resp.Body.Close() - var rdres *RDWebResult - if rdres, err = NewRDWebResultFromXML(resp.Body); err != nil { + var rdres *rdWebResult + if rdres, err = newRDWebResultFromXML(resp.Body); err != nil { return } res.fromRDWebResult(rdres) @@ -328,8 +328,8 @@ func importAudio(ctx *Context, res *Result) (err error) { return } - var rdres *RDWebResult - if rdres, err = NewRDWebResultFromXML(bufio.NewReader(&resbody)); err != nil { + var rdres *rdWebResult + if rdres, err = newRDWebResultFromXML(bufio.NewReader(&resbody)); err != nil { return } res.fromRDWebResult(rdres) diff --git a/rhimport/rdxport_responses.go b/rhimport/rdxport_responses.go index 2871408..b9c3f9b 100644 --- a/rhimport/rdxport_responses.go +++ b/rhimport/rdxport_responses.go @@ -30,15 +30,15 @@ import ( "io" ) -type RDWebResult struct { +type rdWebResult struct { ResponseCode int `xml:"ResponseCode"` ErrorString string `xml:"ErrorString"` AudioConvertError int `xml:"AudioConvertError"` } -func NewRDWebResultFromXML(data io.Reader) (res *RDWebResult, err error) { +func newRDWebResultFromXML(data io.Reader) (res *rdWebResult, err error) { decoder := xml.NewDecoder(data) - res = &RDWebResult{} + res = &rdWebResult{} if xmlerr := decoder.Decode(res); xmlerr != nil { err = fmt.Errorf("Error parsing XML response: %s", xmlerr) return @@ -46,11 +46,11 @@ func NewRDWebResultFromXML(data io.Reader) (res *RDWebResult, err error) { return } -type RDCartAdd struct { - Carts []RDCart `xml:"cart"` +type rdCartAdd struct { + Carts []rdCart `xml:"cart"` } -type RDCart struct { +type rdCart struct { Number uint `xml:"number"` Type string `xml:"type"` GroupName string `xml:"groupName"` @@ -79,9 +79,9 @@ type RDCart struct { MetadataDatetime string `xml:"metadataDatetime"` } -func NewRDCartAddFromXML(data io.Reader) (cart *RDCartAdd, err error) { +func newRDCartAddFromXML(data io.Reader) (cart *rdCartAdd, err error) { decoder := xml.NewDecoder(data) - cart = &RDCartAdd{} + cart = &rdCartAdd{} if xmlerr := decoder.Decode(cart); xmlerr != nil { err = fmt.Errorf("Error parsing XML response: %s", xmlerr) return @@ -89,11 +89,11 @@ func NewRDCartAddFromXML(data io.Reader) (cart *RDCartAdd, err error) { return } -type RDCutAdd struct { - Cuts []RDCut `xml:"cut"` +type rdCutAdd struct { + Cuts []rdCut `xml:"cut"` } -type RDCut struct { +type rdCut struct { Name string `xml:"cutName"` CartNumber uint `xml:"cartNumber"` Number uint `xml:"cutNumber"` @@ -139,9 +139,9 @@ type RDCut struct { TalkEndPoint int `xml:"talkEndPoint"` } -func NewRDCutAddFromXML(data io.Reader) (cut *RDCutAdd, err error) { +func newRDCutAddFromXML(data io.Reader) (cut *rdCutAdd, err error) { decoder := xml.NewDecoder(data) - cut = &RDCutAdd{} + cut = &rdCutAdd{} if xmlerr := decoder.Decode(cut); xmlerr != nil { err = fmt.Errorf("Error parsing XML response: %s", xmlerr) return diff --git a/rhimport/session.go b/rhimport/session.go index a119285..e749020 100644 --- a/rhimport/session.go +++ b/rhimport/session.go @@ -31,33 +31,23 @@ import ( ) const ( - SESSION_NEW = iota - SESSION_RUNNING - SESSION_CANCELED - SESSION_DONE - SESSION_TIMEOUT + _SESSION_NEW = iota + _SESSION_RUNNING + _SESSION_CANCELED + _SESSION_DONE + _SESSION_TIMEOUT ) -type SessionProgressCB struct { +type sessionProgressCB struct { cb ProgressCB userdata interface{} } -type SessionDoneCB struct { +type sessionDoneCB struct { cb DoneCB userdata interface{} } -type ProgressData struct { - Step int - StepName string - Current float64 - Total float64 - Title string - Cart uint - Cut uint -} - type sessionAddProgressHandlerResponse struct { err error } @@ -87,7 +77,7 @@ type attachUploaderRequest struct { response chan<- attachUploaderResponse } -type Session struct { +type session struct { ctx Context state int removeFunc func() @@ -103,8 +93,8 @@ type Session struct { addProgressChan chan sessionAddProgressHandlerRequest addDoneChan chan sessionAddDoneHandlerRequest attachUploaderChan chan attachUploaderRequest - progressCBs []*SessionProgressCB - doneCBs []*SessionDoneCB + progressCBs []*sessionProgressCB + doneCBs []*sessionDoneCB cancelUploader chan bool } @@ -148,47 +138,47 @@ func sessionRun(ctx Context, done chan<- Result) { done <- *res } -func (self *Session) run(timeout time.Duration) { - self.ctx.ProgressCallBack = sessionProgressCallback - self.ctx.ProgressCallBackData = (chan<- ProgressData)(self.progressIntChan) - self.ctx.Cancel = self.cancelIntChan - go sessionRun(self.ctx, self.doneIntChan) - self.state = SESSION_RUNNING +func (s *session) run(timeout time.Duration) { + s.ctx.ProgressCallBack = sessionProgressCallback + s.ctx.ProgressCallBackData = (chan<- ProgressData)(s.progressIntChan) + s.ctx.Cancel = s.cancelIntChan + go sessionRun(s.ctx, s.doneIntChan) + s.state = _SESSION_RUNNING if timeout > 3*time.Hour { - self.ctx.stdlog.Printf("requested session timeout (%v) is to high - lowering to 3h", timeout) + s.ctx.stdlog.Printf("requested session timeout (%v) is to high - lowering to 3h", timeout) timeout = 3 * time.Hour } - self.timer.Reset(timeout) + s.timer.Reset(timeout) return } -func (self *Session) cancel() { - self.ctx.dbglog.Println("Session: canceling running import") +func (s *session) cancel() { + s.ctx.dbglog.Println("Session: canceling running import") select { - case self.cancelIntChan <- true: + case s.cancelIntChan <- true: default: // session got canceled already?? } - self.state = SESSION_CANCELED + s.state = _SESSION_CANCELED } -func (self *Session) addProgressHandler(userdata interface{}, cb ProgressCB) (resp sessionAddProgressHandlerResponse) { - if self.state != SESSION_NEW && self.state != SESSION_RUNNING { +func (s *session) addProgressHandler(userdata interface{}, cb ProgressCB) (resp sessionAddProgressHandlerResponse) { + if s.state != _SESSION_NEW && s.state != _SESSION_RUNNING { resp.err = fmt.Errorf("session is already done/canceled") } - self.progressCBs = append(self.progressCBs, &SessionProgressCB{cb, userdata}) + s.progressCBs = append(s.progressCBs, &sessionProgressCB{cb, userdata}) return } -func (self *Session) addDoneHandler(userdata interface{}, cb DoneCB) (resp sessionAddDoneHandlerResponse) { - if self.state != SESSION_NEW && self.state != SESSION_RUNNING { +func (s *session) addDoneHandler(userdata interface{}, cb DoneCB) (resp sessionAddDoneHandlerResponse) { + if s.state != _SESSION_NEW && s.state != _SESSION_RUNNING { resp.err = fmt.Errorf("session is already done/canceled") } - self.doneCBs = append(self.doneCBs, &SessionDoneCB{cb, userdata}) + s.doneCBs = append(s.doneCBs, &sessionDoneCB{cb, userdata}) return } -func (self *Session) callProgressHandler(p *ProgressData) { - for _, cb := range self.progressCBs { +func (s *session) callProgressHandler(p *ProgressData) { + for _, cb := range s.progressCBs { if cb.cb != nil { if keep := cb.cb(p.Step, p.StepName, p.Current, p.Total, p.Title, p.Cart, p.Cut, cb.userdata); !keep { cb.cb = nil @@ -197,8 +187,8 @@ func (self *Session) callProgressHandler(p *ProgressData) { } } -func (self *Session) callDoneHandler(r *Result) { - for _, cb := range self.doneCBs { +func (s *session) callDoneHandler(r *Result) { + for _, cb := range s.doneCBs { if cb.cb != nil { if keep := cb.cb(*r, cb.userdata); !keep { cb.cb = nil @@ -207,88 +197,88 @@ func (self *Session) callDoneHandler(r *Result) { } } -func (self *Session) attachUploader() (resp attachUploaderResponse) { - if self.cancelUploader != nil { +func (s *session) attachUploader() (resp attachUploaderResponse) { + if s.cancelUploader != nil { return } - self.cancelUploader = make(chan bool, 1) - resp.cancel = self.cancelUploader - resp.attachment = self.ctx.AttachmentChan + s.cancelUploader = make(chan bool, 1) + resp.cancel = s.cancelUploader + resp.attachment = s.ctx.AttachmentChan return } -func (self *Session) dispatchRequests() { +func (s *session) dispatchRequests() { defer func() { - if self.cancelUploader != nil { - close(self.cancelUploader) + if s.cancelUploader != nil { + close(s.cancelUploader) } - self.done <- true + s.done <- true }() var lastProgress *ProgressData progressPending := 0 - pt := time.NewTimer(self.progressRateLimit) + pt := time.NewTimer(s.progressRateLimit) pt.Stop() for { select { - case <-self.quit: - if self.state == SESSION_RUNNING { - self.cancel() + case <-s.quit: + if s.state == _SESSION_RUNNING { + s.cancel() } return - case <-self.timer.C: - if self.state == SESSION_RUNNING { - self.cancel() + case <-s.timer.C: + if s.state == _SESSION_RUNNING { + s.cancel() } - self.state = SESSION_TIMEOUT + s.state = _SESSION_TIMEOUT r := &Result{ResponseCode: http.StatusInternalServerError, ErrorString: "session timed out"} - self.callDoneHandler(r) - if self.removeFunc != nil { - self.removeFunc() + s.callDoneHandler(r) + if s.removeFunc != nil { + s.removeFunc() } - case t := <-self.runChan: - if self.state == SESSION_NEW { - self.run(t) + case t := <-s.runChan: + if s.state == _SESSION_NEW { + s.run(t) } - case <-self.cancelChan: - if self.state == SESSION_RUNNING { - self.cancel() + case <-s.cancelChan: + if s.state == _SESSION_RUNNING { + s.cancel() } - case req := <-self.addProgressChan: - req.response <- self.addProgressHandler(req.userdata, req.callback) - case req := <-self.addDoneChan: - req.response <- self.addDoneHandler(req.userdata, req.callback) + case req := <-s.addProgressChan: + req.response <- s.addProgressHandler(req.userdata, req.callback) + case req := <-s.addDoneChan: + req.response <- s.addDoneHandler(req.userdata, req.callback) case <-pt.C: if progressPending > 1 && lastProgress != nil { - self.callProgressHandler(lastProgress) + s.callProgressHandler(lastProgress) } progressPending = 0 lastProgress = nil - case p := <-self.progressIntChan: - if self.state == SESSION_RUNNING { + case p := <-s.progressIntChan: + if s.state == _SESSION_RUNNING { if lastProgress == nil { - self.callProgressHandler(&p) - pt.Reset(self.progressRateLimit) + s.callProgressHandler(&p) + pt.Reset(s.progressRateLimit) } else if lastProgress.Step != p.Step { - self.callProgressHandler(lastProgress) - self.callProgressHandler(&p) - pt.Reset(self.progressRateLimit) + s.callProgressHandler(lastProgress) + s.callProgressHandler(&p) + pt.Reset(s.progressRateLimit) } lastProgress = &p progressPending++ } - case r := <-self.doneIntChan: - if self.state != SESSION_TIMEOUT { - self.timer.Stop() - self.state = SESSION_DONE - self.callDoneHandler(&r) - if self.removeFunc != nil { - self.removeFunc() + case r := <-s.doneIntChan: + if s.state != _SESSION_TIMEOUT { + s.timer.Stop() + s.state = _SESSION_DONE + s.callDoneHandler(&r) + if s.removeFunc != nil { + s.removeFunc() } } - case req := <-self.attachUploaderChan: - req.response <- self.attachUploader() + case req := <-s.attachUploaderChan: + req.response <- s.attachUploader() } } } @@ -296,7 +286,7 @@ func (self *Session) dispatchRequests() { // ********************************************************* // Public Interface -type SessionChan struct { +type Session struct { runChan chan<- time.Duration cancelChan chan<- bool addProgressChan chan<- sessionAddProgressHandlerRequest @@ -304,28 +294,28 @@ type SessionChan struct { attachUploaderChan chan<- attachUploaderRequest } -func (self *SessionChan) Run(timeout time.Duration) { +func (s *Session) Run(timeout time.Duration) { select { - case self.runChan <- timeout: + case s.runChan <- timeout: default: // command is already pending or session is about to be closed/removed } } -func (self *SessionChan) Cancel() { +func (s *Session) Cancel() { select { - case self.cancelChan <- true: + case s.cancelChan <- true: default: // cancel is already pending or session is about to be closed/removed } } -func (self *SessionChan) AddProgressHandler(userdata interface{}, cb ProgressCB) error { +func (s *Session) AddProgressHandler(userdata interface{}, cb ProgressCB) error { resCh := make(chan sessionAddProgressHandlerResponse) req := sessionAddProgressHandlerRequest{} req.userdata = userdata req.callback = cb req.response = resCh select { - case self.addProgressChan <- req: + case s.addProgressChan <- req: default: return fmt.Errorf("session is about to be closed/removed") } @@ -334,14 +324,14 @@ func (self *SessionChan) AddProgressHandler(userdata interface{}, cb ProgressCB) return res.err } -func (self *SessionChan) AddDoneHandler(userdata interface{}, cb DoneCB) error { +func (s *Session) AddDoneHandler(userdata interface{}, cb DoneCB) error { resCh := make(chan sessionAddDoneHandlerResponse) req := sessionAddDoneHandlerRequest{} req.userdata = userdata req.callback = cb req.response = resCh select { - case self.addDoneChan <- req: + case s.addDoneChan <- req: default: return fmt.Errorf("session is about to be closed/removed") } @@ -350,12 +340,12 @@ func (self *SessionChan) AddDoneHandler(userdata interface{}, cb DoneCB) error { return res.err } -func (self *SessionChan) AttachUploader() (<-chan bool, chan<- AttachmentChunk) { +func (s *Session) AttachUploader() (<-chan bool, chan<- AttachmentChunk) { resCh := make(chan attachUploaderResponse) req := attachUploaderRequest{} req.response = resCh select { - case self.attachUploaderChan <- req: + case s.attachUploaderChan <- req: default: // session is about to be closed/removed return nil, nil @@ -368,53 +358,53 @@ func (self *SessionChan) AttachUploader() (<-chan bool, chan<- AttachmentChunk) // ********************************************************* // Semi-Public Interface (only used by sessionStore) -func (self *Session) getInterface() *SessionChan { - ch := &SessionChan{} - ch.runChan = self.runChan - ch.cancelChan = self.cancelChan - ch.addProgressChan = self.addProgressChan - ch.addDoneChan = self.addDoneChan - ch.attachUploaderChan = self.attachUploaderChan +func (s *session) getInterface() *Session { + ch := &Session{} + ch.runChan = s.runChan + ch.cancelChan = s.cancelChan + ch.addProgressChan = s.addProgressChan + ch.addDoneChan = s.addDoneChan + ch.attachUploaderChan = s.attachUploaderChan return ch } -func (self *Session) cleanup() { - self.quit <- true - self.ctx.dbglog.Printf("Session: waiting for session to close") - <-self.done - close(self.quit) - close(self.done) - self.timer.Stop() +func (s *session) cleanup() { + s.quit <- true + s.ctx.dbglog.Printf("Session: waiting for session to close") + <-s.done + close(s.quit) + close(s.done) + s.timer.Stop() // don't close the channels we give out because this might lead to a panic if // somebody wites to an already removed session - // close(self.cancelIntChan) - // close(self.progressIntChan) - // close(self.doneIntChan) - // close(self.runChan) - // close(self.cancelChan) - // close(self.addProgressChan) - // close(self.addDoneChan) - // close(self.attachUploader) - self.ctx.dbglog.Printf("Session: cleanup is now done") + // close(s.cancelIntChan) + // close(s.progressIntChan) + // close(s.doneIntChan) + // close(s.runChan) + // close(s.cancelChan) + // close(s.addProgressChan) + // close(s.addDoneChan) + // close(s.attachUploader) + s.ctx.dbglog.Printf("Session: cleanup is now done") } -func newSession(ctx *Context, removeFunc func()) (session *Session) { - session = new(Session) - session.state = SESSION_NEW - session.removeFunc = removeFunc - session.ctx = *ctx - session.quit = make(chan bool, 1) - session.done = make(chan bool) - session.timer = time.NewTimer(10 * time.Second) - session.cancelIntChan = make(chan bool, 1) - session.progressRateLimit = 100 * time.Millisecond // TODO: hardcoded value - session.progressIntChan = make(chan ProgressData, 10) - session.doneIntChan = make(chan Result, 1) - session.runChan = make(chan time.Duration, 1) - session.cancelChan = make(chan bool, 1) - session.addProgressChan = make(chan sessionAddProgressHandlerRequest, 10) - session.addDoneChan = make(chan sessionAddDoneHandlerRequest, 10) - session.attachUploaderChan = make(chan attachUploaderRequest, 1) - go session.dispatchRequests() +func newSession(ctx *Context, removeFunc func()) (s *session) { + s = &session{} + s.state = _SESSION_NEW + s.removeFunc = removeFunc + s.ctx = *ctx + s.quit = make(chan bool, 1) + s.done = make(chan bool) + s.timer = time.NewTimer(10 * time.Second) + s.cancelIntChan = make(chan bool, 1) + s.progressRateLimit = 100 * time.Millisecond // TODO: hardcoded value + s.progressIntChan = make(chan ProgressData, 10) + s.doneIntChan = make(chan Result, 1) + s.runChan = make(chan time.Duration, 1) + s.cancelChan = make(chan bool, 1) + s.addProgressChan = make(chan sessionAddProgressHandlerRequest, 10) + s.addDoneChan = make(chan sessionAddDoneHandlerRequest, 10) + s.attachUploaderChan = make(chan attachUploaderRequest, 1) + go s.dispatchRequests() return } diff --git a/rhimport/session_store.go b/rhimport/session_store.go index dafe71e..bb36473 100644 --- a/rhimport/session_store.go +++ b/rhimport/session_store.go @@ -38,7 +38,7 @@ import ( type newSessionResponse struct { id string - session *SessionChan + session *Session responsecode int errorstring string } @@ -50,7 +50,7 @@ type newSessionRequest struct { } type getSessionResponse struct { - session *SessionChan + session *Session refId string responsecode int errorstring string @@ -96,17 +96,17 @@ type removeSessionRequest struct { response chan removeSessionResponse } -type SessionStoreSessionElement struct { - s *Session +type sessionStoreSessionElement struct { + s *session refId string } -type SessionStoreUserElement struct { - sessions map[string]*SessionStoreSessionElement +type sessionStoreUserElement struct { + sessions map[string]*sessionStoreSessionElement updateCBs []SessionsListCB } -func (user *SessionStoreUserElement) callUpdateHandler(added, removed map[string]string) { +func (user *sessionStoreUserElement) callUpdateHandler(added, removed map[string]string) { var keptCBs []SessionsListCB for _, cb := range user.updateCBs { if cb.cb != nil { @@ -118,20 +118,20 @@ func (user *SessionStoreUserElement) callUpdateHandler(added, removed map[string user.updateCBs = keptCBs } -func (user *SessionStoreUserElement) callUpdateHandlerAdd(id, refId string) { +func (user *sessionStoreUserElement) callUpdateHandlerAdd(id, refId string) { added := make(map[string]string) added[id] = refId user.callUpdateHandler(added, nil) } -func (user *SessionStoreUserElement) callUpdateHandlerRemove(id, refId string) { +func (user *sessionStoreUserElement) callUpdateHandlerRemove(id, refId string) { removed := make(map[string]string) removed[id] = refId user.callUpdateHandler(nil, removed) } -type SessionStore struct { - store map[string]*SessionStoreUserElement +type sessionStore struct { + store map[string]*sessionStoreUserElement conf *Config db *rddb.DBChan stdlog *log.Logger @@ -152,7 +152,7 @@ func generateSessionId() (string, error) { return base64.RawURLEncoding.EncodeToString(b[:]), nil } -func (store *SessionStore) new(ctx *Context, refId string) (resp newSessionResponse) { +func (store *sessionStore) new(ctx *Context, refId string) (resp newSessionResponse) { resp.responsecode = http.StatusOK resp.errorstring = "OK" if !ctx.Trusted { @@ -175,8 +175,8 @@ func (store *SessionStore) new(ctx *Context, refId string) (resp newSessionRespo resp.id = id if _, exists := store.store[ctx.UserName]; !exists { - newuser := &SessionStoreUserElement{} - newuser.sessions = make(map[string]*SessionStoreSessionElement) + newuser := &sessionStoreUserElement{} + newuser.sessions = make(map[string]*sessionStoreSessionElement) store.store[ctx.UserName] = newuser } ctx.conf = store.conf @@ -187,7 +187,7 @@ func (store *SessionStore) new(ctx *Context, refId string) (resp newSessionRespo if pref := ctx.dbglog.Prefix(); strings.Contains(pref, "%s") { ctx.dbglog.SetPrefix(fmt.Sprintf(pref, resp.id)) } - s := &SessionStoreSessionElement{newSession(ctx, func() { store.GetInterface().Remove(ctx.UserName, resp.id) }), refId} + s := &sessionStoreSessionElement{newSession(ctx, func() { store.GetInterface().Remove(ctx.UserName, resp.id) }), refId} store.store[ctx.UserName].sessions[resp.id] = s resp.session = store.store[ctx.UserName].sessions[resp.id].s.getInterface() store.dbglog.Printf("SessionStore: created session for '%s' -> %s", ctx.UserName, resp.id) @@ -195,7 +195,7 @@ func (store *SessionStore) new(ctx *Context, refId string) (resp newSessionRespo return } -func (store *SessionStore) get(username, id string) (resp getSessionResponse) { +func (store *sessionStore) get(username, id string) (resp getSessionResponse) { resp.responsecode = http.StatusOK resp.errorstring = "OK" @@ -216,7 +216,7 @@ func (store *SessionStore) get(username, id string) (resp getSessionResponse) { return } -func (store *SessionStore) list(username, password string, trusted bool, userdata interface{}, cb SessionsUpdateCB) (resp listSessionsResponse) { +func (store *sessionStore) list(username, password string, trusted bool, userdata interface{}, cb SessionsUpdateCB) (resp listSessionsResponse) { resp.responsecode = http.StatusOK resp.errorstring = "OK" if !trusted { @@ -239,15 +239,15 @@ func (store *SessionStore) list(username, password string, trusted bool, userdat user.updateCBs = append(user.updateCBs, SessionsListCB{cb, userdata}) } } else if cb != nil { - newuser := &SessionStoreUserElement{} - newuser.sessions = make(map[string]*SessionStoreSessionElement) + newuser := &sessionStoreUserElement{} + newuser.sessions = make(map[string]*sessionStoreSessionElement) newuser.updateCBs = []SessionsListCB{SessionsListCB{cb, userdata}} store.store[username] = newuser } return } -func (store *SessionStore) remove(username, id string) (resp removeSessionResponse) { +func (store *sessionStore) remove(username, id string) (resp removeSessionResponse) { resp.responsecode = http.StatusOK resp.errorstring = "OK" @@ -276,7 +276,7 @@ func (store *SessionStore) remove(username, id string) (resp removeSessionRespon return } -func (store *SessionStore) maintenanceTask() { +func (store *sessionStore) maintenanceTask() { for name, user := range store.store { user.callUpdateHandler(nil, nil) if len(user.sessions) == 0 && len(user.updateCBs) == 0 { @@ -286,7 +286,7 @@ func (store *SessionStore) maintenanceTask() { } } -func (store *SessionStore) dispatchRequests() { +func (store *sessionStore) dispatchRequests() { defer func() { store.done <- true }() mt := time.NewTicker(1 * time.Minute) @@ -311,14 +311,14 @@ func (store *SessionStore) dispatchRequests() { // ********************************************************* // Public Interface -type SessionStoreChan struct { +type SessionStore struct { newChan chan<- newSessionRequest getChan chan<- getSessionRequest listChan chan listSessionsRequest removeChan chan<- removeSessionRequest } -func (store *SessionStoreChan) New(ctx *Context, refId string) (string, *SessionChan, int, string) { +func (store *SessionStore) New(ctx *Context, refId string) (string, *Session, int, string) { resCh := make(chan newSessionResponse) req := newSessionRequest{} req.ctx = ctx @@ -330,7 +330,7 @@ func (store *SessionStoreChan) New(ctx *Context, refId string) (string, *Session return res.id, res.session, res.responsecode, res.errorstring } -func (store *SessionStoreChan) Get(user, id string) (*SessionChan, string, int, string) { +func (store *SessionStore) Get(user, id string) (*Session, string, int, string) { resCh := make(chan getSessionResponse) req := getSessionRequest{} req.user = user @@ -342,7 +342,7 @@ func (store *SessionStoreChan) Get(user, id string) (*SessionChan, string, int, return res.session, res.refId, res.responsecode, res.errorstring } -func (store *SessionStoreChan) List(user, password string, trusted bool, userdata interface{}, cb SessionsUpdateCB) (map[string]string, int, string) { +func (store *SessionStore) List(user, password string, trusted bool, userdata interface{}, cb SessionsUpdateCB) (map[string]string, int, string) { resCh := make(chan listSessionsResponse) req := listSessionsRequest{} req.user = user @@ -357,7 +357,7 @@ func (store *SessionStoreChan) List(user, password string, trusted bool, userdat return res.sessions, res.responsecode, res.errorstring } -func (store *SessionStoreChan) Remove(user, id string) (int, string) { +func (store *SessionStore) Remove(user, id string) (int, string) { resCh := make(chan removeSessionResponse) req := removeSessionRequest{} req.user = user @@ -369,8 +369,8 @@ func (store *SessionStoreChan) Remove(user, id string) (int, string) { return res.responsecode, res.errorstring } -func (store *SessionStore) GetInterface() *SessionStoreChan { - ch := &SessionStoreChan{} +func (store *sessionStore) GetInterface() *SessionStore { + ch := &SessionStore{} ch.newChan = store.newChan ch.getChan = store.getChan ch.listChan = store.listChan @@ -378,7 +378,7 @@ func (store *SessionStore) GetInterface() *SessionStoreChan { return ch } -func (store *SessionStore) Cleanup() { +func (store *sessionStore) Cleanup() { store.quit <- true <-store.done close(store.quit) @@ -389,15 +389,15 @@ func (store *SessionStore) Cleanup() { close(store.removeChan) } -func NewSessionStore(conf *Config, db *rddb.DBChan, stdlog, dbglog *log.Logger) (store *SessionStore, err error) { - store = new(SessionStore) +func NewSessionStore(conf *Config, db *rddb.DBChan, stdlog, dbglog *log.Logger) (store *sessionStore, err error) { + store = &sessionStore{} store.conf = conf store.db = db store.stdlog = stdlog store.dbglog = dbglog store.quit = make(chan bool, 1) store.done = make(chan bool) - store.store = make(map[string]*SessionStoreUserElement) + store.store = make(map[string]*sessionStoreUserElement) store.newChan = make(chan newSessionRequest, 10) store.getChan = make(chan getSessionRequest, 10) store.listChan = make(chan listSessionsRequest, 10) diff --git a/rhimport/wave_generator.go b/rhimport/wave_generator.go index f4c3261..7816bc0 100644 --- a/rhimport/wave_generator.go +++ b/rhimport/wave_generator.go @@ -34,11 +34,11 @@ import ( ) const ( - RIFF_TAG = "RIFF" - WAVE_TAG = "WAVE" - FMT_TAG = "fmt " - FMT_ID_PCM = 0x0001 - DATA_TAG = "data" + _RIFF_TAG = "RIFF" + _WAVE_TAG = "WAVE" + _FMT_TAG = "fmt " + _FMT_ID_PCM = 0x0001 + _DATA_TAG = "data" ) type wavHeader struct { @@ -60,31 +60,31 @@ type wavHeader struct { } func (h *wavHeader) Bytes() []byte { - buf := new(bytes.Buffer) + buf := &bytes.Buffer{} binary.Write(buf, binary.LittleEndian, h) return buf.Bytes() } -type SampleGenerator interface { +type sampleGenerator interface { Reset(samplePeriod float64) GetSamples(nSamples uint32, channels uint16) []float64 // this needs to be normalized, aka -1 <= value <= 1 } -type WavFile struct { +type wavFile struct { header wavHeader headerSize uint32 pcmSampleMax float64 pcmSampleBytes uint32 samplePeriod float64 - Generator SampleGenerator + generator sampleGenerator readOffset uint32 } -func (wav *WavFile) GetFileSize() (size uint32) { +func (wav *wavFile) GetFileSize() (size uint32) { return wav.headerSize + wav.header.dataLength } -func (wav *WavFile) Read(p []byte) (n int, err error) { +func (wav *wavFile) Read(p []byte) (n int, err error) { n = 0 if wav.readOffset >= (wav.header.riffLength + 8) { return n, io.EOF @@ -93,16 +93,16 @@ func (wav *WavFile) Read(p []byte) (n int, err error) { if wav.readOffset < wav.headerSize { n = copy(p, wav.header.Bytes()[wav.readOffset:]) wav.readOffset += uint32(n) - wav.Generator.Reset(wav.samplePeriod) + wav.generator.Reset(wav.samplePeriod) } if n >= len(p) { return } nsamples := uint32(len(p)-n) / uint32(wav.header.blockAlign) - data := wav.Generator.GetSamples(nsamples, wav.header.nChannels) + data := wav.generator.GetSamples(nsamples, wav.header.nChannels) switch wav.header.fmtID { - case FMT_ID_PCM: + case _FMT_ID_PCM: idx := 0 for _, normalized := range data { scaled := wav.pcmSampleMax * normalized @@ -123,8 +123,8 @@ func (wav *WavFile) Read(p []byte) (n int, err error) { return } -func NewPCMWavFile(sampleRate uint32, sampleDepth uint16, channels uint16, length time.Duration) (wav *WavFile, err error) { - wav = &WavFile{} +func newPCMWavFile(sampleRate uint32, sampleDepth uint16, channels uint16, length time.Duration) (wav *wavFile, err error) { + wav = &wavFile{} wav.headerSize = 8 + 4 + 8 + 16 + 8 if length <= 0 { return nil, errors.New("invalid length: must be > 0") @@ -154,61 +154,61 @@ func NewPCMWavFile(sampleRate uint32, sampleDepth uint16, channels uint16, lengt } dataLen := nFrames * uint32(frameSize) - copy(wav.header.riffTag[:], RIFF_TAG) + copy(wav.header.riffTag[:], _RIFF_TAG) wav.header.riffLength = wav.headerSize - 8 + dataLen - copy(wav.header.waveTag[:], WAVE_TAG) + copy(wav.header.waveTag[:], _WAVE_TAG) - copy(wav.header.fmtTag[:], FMT_TAG) + copy(wav.header.fmtTag[:], _FMT_TAG) wav.header.fmtLength = 16 - wav.header.fmtID = FMT_ID_PCM + wav.header.fmtID = _FMT_ID_PCM wav.header.nChannels = channels wav.header.sampleRate = sampleRate wav.header.byteRate = sampleRate * uint32(frameSize) wav.header.blockAlign = frameSize wav.header.sampleDepth = sampleDepth - copy(wav.header.dataTag[:], DATA_TAG) + copy(wav.header.dataTag[:], _DATA_TAG) wav.header.dataLength = dataLen return } -type SilenceGenerator struct { +type silenceGenerator struct { } -func NewSilenceGenerator() *SilenceGenerator { - return &SilenceGenerator{} +func NewSilenceGenerator() *silenceGenerator { + return &silenceGenerator{} } -func (s *SilenceGenerator) Reset(samplePeriod float64) { +func (s *silenceGenerator) Reset(samplePeriod float64) { // nothing here } -func (s *SilenceGenerator) GetSamples(nSamples uint32, nChannels uint16) (data []float64) { +func (s *silenceGenerator) GetSamples(nSamples uint32, nChannels uint16) (data []float64) { data = make([]float64, int(nSamples)*int(nChannels)) return } -type SinusGenerator struct { +type sinusGenerator struct { amp float64 freq float64 sp float64 t float64 } -func NewSinusGenerator(ampDB, freq float64) (sin *SinusGenerator) { - sin = &SinusGenerator{} +func newSinusGenerator(ampDB, freq float64) (sin *sinusGenerator) { + sin = &sinusGenerator{} sin.amp = math.Pow(10.0, (ampDB / 20.0)) sin.freq = freq return } -func (sin *SinusGenerator) Reset(samplePeriod float64) { +func (sin *sinusGenerator) Reset(samplePeriod float64) { sin.sp = samplePeriod sin.t = 0 } -func (sin *SinusGenerator) GetSamples(nSamples uint32, nChannels uint16) (data []float64) { +func (sin *sinusGenerator) GetSamples(nSamples uint32, nChannels uint16) (data []float64) { data = make([]float64, int(nSamples)*int(nChannels)) for i := 0; i < int(nSamples); i++ { val := sin.amp * math.Sin(2*math.Pi*sin.freq*sin.t) diff --git a/rhimport/youtubedl_responses.go b/rhimport/youtubedl_responses.go index 6f6d7ee..abc4f11 100644 --- a/rhimport/youtubedl_responses.go +++ b/rhimport/youtubedl_responses.go @@ -30,7 +30,7 @@ import ( "io" ) -type YoutubeDLInfo struct { +type youtubeDLInfo struct { ID string `json:"id"` Title string `json:"title"` URL string `json:"url"` @@ -41,9 +41,9 @@ type YoutubeDLInfo struct { HTTPHeaders map[string]string `json:"http_headers"` } -func NewYoutubeDLInfoFromJSON(data io.Reader) (res *YoutubeDLInfo, err error) { +func newYoutubeDLInfoFromJSON(data io.Reader) (res *youtubeDLInfo, err error) { decoder := json.NewDecoder(data) - res = &YoutubeDLInfo{} + res = &youtubeDLInfo{} if jsonerr := decoder.Decode(res); jsonerr != nil { err = fmt.Errorf("Error parsing JSON response: %s", jsonerr) return |