package helper import ( "bytes" "errors" "io/ioutil" "mime" "net" "net/http" "net/url" "os" "os/exec" "regexp" "strings" "syscall" "gitlab.com/gitlab-org/gitlab-workhorse/internal/log" ) const NginxResponseBufferHeader = "X-Accel-Buffering" func Fail500(w http.ResponseWriter, r *http.Request, err error) { http.Error(w, "Internal server error", 500) if err != nil { captureRavenError(r, err) } printError(r, err) } func LogError(r *http.Request, err error) { if err != nil { captureRavenError(r, err) } printError(r, err) } func RequestEntityTooLarge(w http.ResponseWriter, r *http.Request, err error) { http.Error(w, "Request Entity Too Large", http.StatusRequestEntityTooLarge) captureRavenError(r, err) printError(r, err) } func printError(r *http.Request, err error) { if r != nil { log.WithFields(r.Context(), log.Fields{ "method": r.Method, "uri": ScrubURLParams(r.RequestURI), }).WithError(err).Error("error") } else { log.NoContext().WithError(err).Error("unknown error") } } func SetNoCacheHeaders(header http.Header) { header.Set("Cache-Control", "no-cache, no-store, max-age=0, must-revalidate") header.Set("Pragma", "no-cache") header.Set("Expires", "Fri, 01 Jan 1990 00:00:00 GMT") } func OpenFile(path string) (file *os.File, fi os.FileInfo, err error) { file, err = os.Open(path) if err != nil { return } defer func() { if err != nil { file.Close() } }() fi, err = file.Stat() if err != nil { return } // The os.Open can also open directories if fi.IsDir() { err = &os.PathError{ Op: "open", Path: path, Err: errors.New("path is directory"), } return } return } func URLMustParse(s string) *url.URL { u, err := url.Parse(s) if err != nil { log.NoContext().WithField("url", s).WithError(err).Fatal("urlMustParse") } return u } func HTTPError(w http.ResponseWriter, r *http.Request, error string, code int) { if r.ProtoAtLeast(1, 1) { // Force client to disconnect if we render request error w.Header().Set("Connection", "close") } http.Error(w, error, code) } func HeaderClone(h http.Header) http.Header { h2 := make(http.Header, len(h)) for k, vv := range h { vv2 := make([]string, len(vv)) copy(vv2, vv) h2[k] = vv2 } return h2 } func CleanUpProcessGroup(cmd *exec.Cmd) { if cmd == nil { return } process := cmd.Process if process != nil && process.Pid > 0 { // Send SIGTERM to the process group of cmd syscall.Kill(-process.Pid, syscall.SIGTERM) } // reap our child process cmd.Wait() } func ExitStatus(err error) (int, bool) { exitError, ok := err.(*exec.ExitError) if !ok { return 0, false } waitStatus, ok := exitError.Sys().(syscall.WaitStatus) if !ok { return 0, false } return waitStatus.ExitStatus(), true } func DisableResponseBuffering(w http.ResponseWriter) { w.Header().Set(NginxResponseBufferHeader, "no") } func AllowResponseBuffering(w http.ResponseWriter) { w.Header().Del(NginxResponseBufferHeader) } func SetForwardedFor(newHeaders *http.Header, originalRequest *http.Request) { if clientIP, _, err := net.SplitHostPort(originalRequest.RemoteAddr); err == nil { var header string // If we aren't the first proxy retain prior // X-Forwarded-For information as a comma+space // separated list and fold multiple headers into one. if prior, ok := originalRequest.Header["X-Forwarded-For"]; ok { header = strings.Join(prior, ", ") + ", " + clientIP } else { header = clientIP } newHeaders.Set("X-Forwarded-For", header) } } func IsContentType(expected, actual string) bool { parsed, _, err := mime.ParseMediaType(actual) return err == nil && parsed == expected } func IsApplicationJson(r *http.Request) bool { contentType := r.Header.Get("Content-Type") return IsContentType("application/json", contentType) } func ReadRequestBody(w http.ResponseWriter, r *http.Request, maxBodySize int64) ([]byte, error) { limitedBody := http.MaxBytesReader(w, r.Body, maxBodySize) defer limitedBody.Close() return ioutil.ReadAll(limitedBody) } func CloneRequestWithNewBody(r *http.Request, body []byte) *http.Request { newReq := *r newReq.Body = ioutil.NopCloser(bytes.NewReader(body)) newReq.Header = HeaderClone(r.Header) newReq.ContentLength = int64(len(body)) return &newReq } // Based on https://stackoverflow.com/a/52965552/474597 // ScrubURLParams replaces the content of any sensitive query string parameters // in an URL with `[FILTERED]` func ScrubURLParams(originalURL string) string { u, err := url.Parse(originalURL) if err != nil { return "<invalid URL>" } buf := bytes.NewBuffer(make([]byte, 0, len(originalURL))) for i, queryPart := range bytes.Split([]byte(u.RawQuery), []byte("&")) { if i != 0 { buf.WriteByte(byte('&')) } splitParam := bytes.SplitN(queryPart, []byte("="), 2) if len(splitParam) == 2 { buf.Write(splitParam[0]) buf.WriteByte(byte('=')) if isParamSensitive(splitParam[0]) { buf.Write([]byte("[FILTERED]")) } else { buf.Write(splitParam[1]) } } else { buf.Write(queryPart) } } u.RawQuery = buf.String() return u.String() } // Remember to keep in sync with Rails' filter_parameters var sensitiveRegexps = []*regexp.Regexp{ regexp.MustCompile(`token$`), regexp.MustCompile(`key$`), regexp.MustCompile(`(?i)(?:X\-AMZ\-)?Signature`), } // Not in regexp due to SA6004. // Not in string for performance. var sensitivePartialMatch = [][]byte{ []byte("password"), []byte("secret"), } var sensitiveExactMatch = []string{ "certificate", "hook", "import_url", "otp_attempt", "sentry_dsn", "trace", "variables", "content", "sharedSecret", } func isParamSensitive(name []byte) bool { for _, s := range sensitiveExactMatch { if string(name) == s { return true } } for _, r := range sensitiveRegexps { if r.Match(name) { return true } } for _, s := range sensitivePartialMatch { if bytes.Contains(name, s) { return true } } return false }