Disallows indexation by search engines (with the robots.txt file) whenever you're not in production.
package main
import (
"fmt"
"github.com/codegangsta/negroni"
"github.com/rabeesh/negroni-disallowrobots"
"net/http"
)
func main() {
mux := http.NewServeMux()
mux.HandleFunc("/", func(rw http.ResponseWriter, req *http.Request) {
fmt.Fprintf(rw, "Welcome to the home page!")
})
isProduction := false
n := negroni.New()
n.Use(disallowrobots.New(isProduction))
n.UseHandler(mux)
n.Run(":5000")
}
X-Robots-Tag
info is available here.