Merge pull request #90 from 18F/robots-txt

Provide a robots.txt that denies all crawlers
This commit is contained in:
Jehiah Czebotar 2015-05-10 16:07:36 -04:00
commit 5c03fe3840
3 changed files with 28 additions and 0 deletions

View File

@ -149,6 +149,7 @@ The command line to run `google_auth_proxy` would look like this:
Google Auth Proxy responds directly to the following endpoints. All other endpoints will be proxied upstream when authenticated. Google Auth Proxy responds directly to the following endpoints. All other endpoints will be proxied upstream when authenticated.
* /robots.txt - returns a 200 OK response that disallows all User-agents from all paths; see [robotstxt.org](http://www.robotstxt.org/) for more info
* /ping - returns an 200 OK response * /ping - returns an 200 OK response
* /oauth2/sign_in - the login page, which also doubles as a sign out page (it clears cookies) * /oauth2/sign_in - the login page, which also doubles as a sign out page (it clears cookies)
* /oauth2/start - a URL that will redirect to start the OAuth cycle * /oauth2/start - a URL that will redirect to start the OAuth cycle

View File

@ -21,6 +21,7 @@ import (
"github.com/bitly/google_auth_proxy/providers" "github.com/bitly/google_auth_proxy/providers"
) )
const robotsPath = "/robots.txt"
const pingPath = "/ping" const pingPath = "/ping"
const signInPath = "/oauth2/sign_in" const signInPath = "/oauth2/sign_in"
const oauthStartPath = "/oauth2/start" const oauthStartPath = "/oauth2/start"
@ -270,6 +271,11 @@ func (p *OauthProxy) SetCookie(rw http.ResponseWriter, req *http.Request, val st
http.SetCookie(rw, cookie) http.SetCookie(rw, cookie)
} }
func (p *OauthProxy) RobotsTxt(rw http.ResponseWriter) {
rw.WriteHeader(http.StatusOK)
fmt.Fprintf(rw, "User-agent: *\nDisallow: /")
}
func (p *OauthProxy) PingPage(rw http.ResponseWriter) { func (p *OauthProxy) PingPage(rw http.ResponseWriter) {
rw.WriteHeader(http.StatusOK) rw.WriteHeader(http.StatusOK)
fmt.Fprintf(rw, "OK") fmt.Fprintf(rw, "OK")
@ -358,6 +364,11 @@ func (p *OauthProxy) ServeHTTP(rw http.ResponseWriter, req *http.Request) {
var email string var email string
var access_token string var access_token string
if req.URL.Path == robotsPath {
p.RobotsTxt(rw)
return
}
if req.URL.Path == pingPath { if req.URL.Path == pingPath {
p.PingPage(rw) p.PingPage(rw)
return return

View File

@ -67,6 +67,22 @@ func TestEncodedSlashes(t *testing.T) {
} }
} }
func TestRobotsTxt(t *testing.T) {
opts := NewOptions()
opts.Upstreams = append(opts.Upstreams, "unused")
opts.ClientID = "bazquux"
opts.ClientSecret = "foobar"
opts.CookieSecret = "xyzzyplugh"
opts.Validate()
proxy := NewOauthProxy(opts, func(string) bool { return true })
rw := httptest.NewRecorder()
req, _ := http.NewRequest("GET", "/robots.txt", nil)
proxy.ServeHTTP(rw, req)
assert.Equal(t, 200, rw.Code)
assert.Equal(t, "User-agent: *\nDisallow: /", rw.Body.String())
}
type TestProvider struct { type TestProvider struct {
*providers.ProviderData *providers.ProviderData
EmailAddress string EmailAddress string