func rescale(im image.Image, opts *DecodeOpts, swapDimensions bool) image.Image { mw, mh := opts.MaxWidth, opts.MaxHeight mwf, mhf := opts.ScaleWidth, opts.ScaleHeight b := im.Bounds() // only do downscaling, otherwise just serve the original image if !opts.wantRescale(b, swapDimensions) { return im } if swapDimensions { mw, mh = mh, mw } // ScaleWidth and ScaleHeight overrule MaxWidth and MaxHeight if mwf > 0.0 && mwf <= 1 { mw = int(mwf * float32(b.Dx())) } if mhf > 0.0 && mhf <= 1 { mh = int(mhf * float32(b.Dy())) } // If it's gigantic, it's more efficient to downsample first // and then resize; resizing will smooth out the roughness. // (trusting the moustachio guys on that one). if b.Dx() > mw*2 || b.Dy() > mh*2 { w, h := ScaledDimensions(b.Dx(), b.Dy(), mw*2, mh*2) im = resize.ResampleInplace(im, b, w, h) return resize.HalveInplace(im) } mw, mh = ScaledDimensions(b.Dx(), b.Dy(), mw, mh) return resize.Resize(im, b, mw, mh) }
// rescale resizes im in-place to the dimensions sw x sh, overwriting the // existing pixel data. It is up to the caller to ensure sw & sh maintain the // aspect ratio of im. func rescale(im image.Image, sw, sh int) image.Image { b := im.Bounds() w, h := b.Dx(), b.Dy() if sw == w && sh == h { return im } // If it's gigantic, it's more efficient to downsample first // and then resize; resizing will smooth out the roughness. // (trusting the moustachio guys on that one). if w > sw*2 && h > sh*2 { im = resize.ResampleInplace(im, b, sw*2, sh*2) return resize.HalveInplace(im) } return resize.Resize(im, b, sw, sh) }
func testRun(b testing.TB, decode decodeFunc) { if !fastjpeg.Available() { b.Skip("Skipping benchmark, djpeg unavailable.") } im, _, err := decode(bytes.NewReader(jpegBytes)) if err != nil { b.Fatal(err) } rect := im.Bounds() w, h := 128, 128 im = resize.Resize(im, rect, w, h) err = jpeg.Encode(ioutil.Discard, im, nil) if err != nil { b.Fatal(err) } }