mirror of
https://github.com/willnorris/imageproxy.git
synced 2024-12-16 21:56:43 -05:00
parent
702a07e3b5
commit
9837a20ddd
2 changed files with 79 additions and 83 deletions
|
@ -22,9 +22,18 @@ import (
|
||||||
"net/url"
|
"net/url"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// URLError reports a malformed URL error.
|
||||||
|
type URLError struct {
|
||||||
|
Message string
|
||||||
|
URL *url.URL
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e URLError) Error() string {
|
||||||
|
return fmt.Sprintf("malformed URL %q: %s", e.URL, e.Message)
|
||||||
|
}
|
||||||
|
|
||||||
// Options specifies transformations that can be performed on a
|
// Options specifies transformations that can be performed on a
|
||||||
// requested image.
|
// requested image.
|
||||||
type Options struct {
|
type Options struct {
|
||||||
|
@ -153,20 +162,3 @@ func NewRequest(r *http.Request) (*Request, error) {
|
||||||
req.URL.RawQuery = r.URL.RawQuery
|
req.URL.RawQuery = r.URL.RawQuery
|
||||||
return req, nil
|
return req, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Image represents a remote image that is being proxied. It tracks where
|
|
||||||
// the image was originally retrieved from and how long the image can be cached.
|
|
||||||
type Image struct {
|
|
||||||
// URL of original remote image.
|
|
||||||
URL string
|
|
||||||
|
|
||||||
// Expires is the cache expiration time for the original image, as
|
|
||||||
// returned by the remote server.
|
|
||||||
Expires time.Time
|
|
||||||
|
|
||||||
// Etag returned from server when fetching image.
|
|
||||||
Etag string
|
|
||||||
|
|
||||||
// Bytes contains the actual image.
|
|
||||||
Bytes []byte
|
|
||||||
}
|
|
||||||
|
|
122
proxy/proxy.go
122
proxy/proxy.go
|
@ -16,28 +16,19 @@
|
||||||
package proxy
|
package proxy
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
"bufio"
|
||||||
|
"bytes"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"strconv"
|
"reflect"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/golang/glog"
|
"github.com/golang/glog"
|
||||||
"github.com/gregjones/httpcache"
|
"github.com/gregjones/httpcache"
|
||||||
)
|
)
|
||||||
|
|
||||||
// URLError reports a malformed URL error.
|
|
||||||
type URLError struct {
|
|
||||||
Message string
|
|
||||||
URL *url.URL
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e URLError) Error() string {
|
|
||||||
return fmt.Sprintf("malformed URL %q: %s", e.URL, e.Message)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Proxy serves image requests.
|
// Proxy serves image requests.
|
||||||
type Proxy struct {
|
type Proxy struct {
|
||||||
Client *http.Client // client used to fetch remote URLs
|
Client *http.Client // client used to fetch remote URLs
|
||||||
|
@ -52,22 +43,23 @@ type Proxy struct {
|
||||||
|
|
||||||
// NewProxy constructs a new proxy. The provided http Client will be used to
|
// NewProxy constructs a new proxy. The provided http Client will be used to
|
||||||
// fetch remote URLs. If nil is provided, http.DefaultClient will be used.
|
// fetch remote URLs. If nil is provided, http.DefaultClient will be used.
|
||||||
func NewProxy(client *http.Client, cache Cache) *Proxy {
|
func NewProxy(transport http.RoundTripper, cache Cache) *Proxy {
|
||||||
if client == nil {
|
if transport == nil {
|
||||||
client = http.DefaultClient
|
transport = http.DefaultTransport
|
||||||
}
|
}
|
||||||
if cache == nil {
|
if cache == nil {
|
||||||
cache = NopCache
|
cache = NopCache
|
||||||
}
|
}
|
||||||
|
|
||||||
return &Proxy{
|
client := new(http.Client)
|
||||||
Client: &http.Client{
|
client.Transport = &httpcache.Transport{
|
||||||
Transport: &httpcache.Transport{
|
Transport: &TransformingTransport{transport, client},
|
||||||
Transport: client.Transport,
|
|
||||||
Cache: cache,
|
Cache: cache,
|
||||||
MarkCachedResponses: true,
|
MarkCachedResponses: true,
|
||||||
},
|
}
|
||||||
},
|
|
||||||
|
return &Proxy{
|
||||||
|
Client: client,
|
||||||
Cache: cache,
|
Cache: cache,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -88,52 +80,33 @@ func (p *Proxy) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||||
req.Options.Height = float64(p.MaxHeight)
|
req.Options.Height = float64(p.MaxHeight)
|
||||||
}
|
}
|
||||||
|
|
||||||
u := req.URL.String()
|
|
||||||
glog.Infof("request for image: %v", u)
|
|
||||||
|
|
||||||
if !p.allowed(req.URL) {
|
if !p.allowed(req.URL) {
|
||||||
glog.Errorf("remote URL is not for an allowed host: %v", req.URL.Host)
|
glog.Errorf("remote URL is not for an allowed host: %v", req.URL.Host)
|
||||||
http.Error(w, fmt.Sprintf("remote URL is not for an allowed host: %v", req.URL.Host), http.StatusBadRequest)
|
http.Error(w, fmt.Sprintf("remote URL is not for an allowed host: %v", req.URL.Host), http.StatusBadRequest)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
image, err := p.fetchRemoteImage(u)
|
u := req.URL.String()
|
||||||
|
if req.Options != nil && !reflect.DeepEqual(req.Options, emptyOptions) {
|
||||||
|
u += "#" + req.Options.String()
|
||||||
|
}
|
||||||
|
resp, err := p.Client.Get(u)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
glog.Errorf("error fetching remote image: %v", err)
|
glog.Errorf("error fetching remote image: %v", err)
|
||||||
http.Error(w, fmt.Sprintf("Error fetching remote image: %v", err), http.StatusInternalServerError)
|
http.Error(w, fmt.Sprintf("Error fetching remote image: %v", err), http.StatusInternalServerError)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
b, _ := Transform(image.Bytes, req.Options)
|
|
||||||
image.Bytes = b
|
|
||||||
|
|
||||||
w.Header().Add("Content-Length", strconv.Itoa(len(image.Bytes)))
|
|
||||||
w.Header().Add("Expires", image.Expires.Format(time.RFC1123))
|
|
||||||
w.Write(image.Bytes)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Proxy) fetchRemoteImage(u string) (*Image, error) {
|
|
||||||
resp, err := p.Client.Get(u)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if resp.StatusCode != http.StatusOK {
|
if resp.StatusCode != http.StatusOK {
|
||||||
return nil, errors.New(fmt.Sprintf("HTTP status not OK: %v", resp.Status))
|
http.Error(w, fmt.Sprintf("Remote URL %q returned status: %v", req.URL, resp.Status), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
w.Header().Add("Content-Length", resp.Header.Get("Content-Length"))
|
||||||
|
w.Header().Add("Expires", resp.Header.Get("Expires"))
|
||||||
|
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
b, err := ioutil.ReadAll(resp.Body)
|
io.Copy(w, resp.Body)
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return &Image{
|
|
||||||
URL: u,
|
|
||||||
Expires: parseExpires(resp),
|
|
||||||
Etag: resp.Header.Get("Etag"),
|
|
||||||
Bytes: b,
|
|
||||||
}, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// allowed returns whether the specified URL is on the whitelist of remote hosts.
|
// allowed returns whether the specified URL is on the whitelist of remote hosts.
|
||||||
|
@ -151,16 +124,47 @@ func (p *Proxy) allowed(u *url.URL) bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseExpires(resp *http.Response) time.Time {
|
// TransformingTransport is an implementation of http.RoundTripper that
|
||||||
exp := resp.Header.Get("Expires")
|
// optionally transforms images using the options specified in the request URL
|
||||||
if exp == "" {
|
// fragment.
|
||||||
return time.Now()
|
type TransformingTransport struct {
|
||||||
|
// Transport is used to satisfy non-transform requests (those that do not include a URL fragment)
|
||||||
|
Transport http.RoundTripper
|
||||||
|
|
||||||
|
// Client is used to fetch images to be resized.
|
||||||
|
Client *http.Client
|
||||||
}
|
}
|
||||||
|
|
||||||
t, err := time.Parse(time.RFC1123, exp)
|
// RoundTrip implements http.RoundTripper.
|
||||||
|
func (t *TransformingTransport) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||||
|
if req.URL.Fragment == "" {
|
||||||
|
// normal requests pass through
|
||||||
|
glog.Infof("fetching remote URL: %v", req.URL)
|
||||||
|
return t.Transport.RoundTrip(req)
|
||||||
|
}
|
||||||
|
|
||||||
|
u := *req.URL
|
||||||
|
u.Fragment = ""
|
||||||
|
resp, err := t.Client.Get(u.String())
|
||||||
|
|
||||||
|
defer resp.Body.Close()
|
||||||
|
b, err := ioutil.ReadAll(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return time.Now()
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return t
|
opt := ParseOptions(req.URL.Fragment)
|
||||||
|
img, err := Transform(b, opt)
|
||||||
|
if err != nil {
|
||||||
|
img = b
|
||||||
|
}
|
||||||
|
|
||||||
|
// replay response with transformed image and updated content length
|
||||||
|
buf := new(bytes.Buffer)
|
||||||
|
fmt.Fprintf(buf, "%s %s\n", resp.Proto, resp.Status)
|
||||||
|
resp.Header.WriteSubset(buf, map[string]bool{"Content-Length": true})
|
||||||
|
fmt.Fprintf(buf, "Content-Length: %d\n\n", len(img))
|
||||||
|
buf.Write(img)
|
||||||
|
|
||||||
|
return http.ReadResponse(bufio.NewReader(buf), req)
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue