diff --git a/httpbin/handlers.go b/httpbin/handlers.go
index 5963531869ba209a68bd992198e0c64b4920582a..f26ad0190776bfbbb09674f77bdcd7c753ebcaba 100644
--- a/httpbin/handlers.go
+++ b/httpbin/handlers.go
@@ -554,3 +554,11 @@ func (h *HTTPBin) Range(w http.ResponseWriter, r *http.Request) {
 func (h *HTTPBin) HTML(w http.ResponseWriter, r *http.Request) {
 	writeHTML(w, MustAsset("moby.html"), http.StatusOK)
 }
+
+// Robots renders a basic robots.txt file
+func (h *HTTPBin) Robots(w http.ResponseWriter, r *http.Request) {
+	robotsTxt := []byte(`User-agent: *
+Disallow: /deny
+`)
+	writeResponse(w, http.StatusOK, "text/plain", robotsTxt)
+}
diff --git a/httpbin/handlers_test.go b/httpbin/handlers_test.go
index afdd681f0f35c0d26bfc99cc2f471dbcdfc5f562..dbfab0b5fcab7b3033f8a5fcb1988490246a9fd1 100644
--- a/httpbin/handlers_test.go
+++ b/httpbin/handlers_test.go
@@ -1543,3 +1543,12 @@ func TestHTML(t *testing.T) {
 	assertContentType(t, w, htmlContentType)
 	assertBodyContains(t, w, `<h1>Herman Melville - Moby-Dick</h1>`)
 }
+
+func TestRobots(t *testing.T) {
+	r, _ := http.NewRequest("GET", "/robots.txt", nil)
+	w := httptest.NewRecorder()
+	handler.ServeHTTP(w, r)
+
+	assertContentType(t, w, "text/plain")
+	assertBodyContains(t, w, `Disallow: /deny`)
+}
diff --git a/httpbin/httpbin.go b/httpbin/httpbin.go
index 5c1d86c7e1aa4185b7d345f2a7d2b38274c8963e..5824755cd8413bd67699676b5bdcdccbddf19511 100644
--- a/httpbin/httpbin.go
+++ b/httpbin/httpbin.go
@@ -123,6 +123,7 @@ func (h *HTTPBin) Handler() http.Handler {
 	mux.HandleFunc("/range/", h.Range)
 
 	mux.HandleFunc("/html", h.HTML)
+	mux.HandleFunc("/robots.txt", h.Robots)
 
 	// Make sure our ServeMux doesn't "helpfully" redirect these invalid
 	// endpoints by adding a trailing slash. See the ServeMux docs for more