diff --git a/httpbin/handlers.go b/httpbin/handlers.go
index f26ad0190776bfbbb09674f77bdcd7c753ebcaba..baf8dc2e14b42ac4129361139813d162d75b5cc7 100644
--- a/httpbin/handlers.go
+++ b/httpbin/handlers.go
@@ -562,3 +562,8 @@ Disallow: /deny
 `)
 	writeResponse(w, http.StatusOK, "text/plain", robotsTxt)
 }
+
+// Deny renders a basic page that robots should never access
+func (h *HTTPBin) Deny(w http.ResponseWriter, r *http.Request) {
+	writeResponse(w, http.StatusOK, "text/plain", []byte(`YOU SHOULDN'T BE HERE`))
+}
diff --git a/httpbin/handlers_test.go b/httpbin/handlers_test.go
index dbfab0b5fcab7b3033f8a5fcb1988490246a9fd1..1a23a14063379aa8d6c4b7405560cc929d9850ed 100644
--- a/httpbin/handlers_test.go
+++ b/httpbin/handlers_test.go
@@ -1552,3 +1552,12 @@ func TestRobots(t *testing.T) {
 	assertContentType(t, w, "text/plain")
 	assertBodyContains(t, w, `Disallow: /deny`)
 }
+
+func TestDeny(t *testing.T) {
+	r, _ := http.NewRequest("GET", "/deny", nil)
+	w := httptest.NewRecorder()
+	handler.ServeHTTP(w, r)
+
+	assertContentType(t, w, "text/plain")
+	assertBodyContains(t, w, `YOU SHOULDN'T BE HERE`)
+}
diff --git a/httpbin/httpbin.go b/httpbin/httpbin.go
index 5824755cd8413bd67699676b5bdcdccbddf19511..674522fb5e80ee081d2489688873d6b517aa2caf 100644
--- a/httpbin/httpbin.go
+++ b/httpbin/httpbin.go
@@ -124,6 +124,7 @@ func (h *HTTPBin) Handler() http.Handler {
 
 	mux.HandleFunc("/html", h.HTML)
 	mux.HandleFunc("/robots.txt", h.Robots)
+	mux.HandleFunc("/deny", h.Deny)
 
 	// Make sure our ServeMux doesn't "helpfully" redirect these invalid
 	// endpoints by adding a trailing slash. See the ServeMux docs for more