2017-04-05 23:01:34 +05:30
|
|
|
/*
|
2020-03-18 06:28:14 +05:30
|
|
|
Copyright 2017 Vector Creations Ltd
|
|
|
|
Copyright 2020 The Matrix.org Foundation C.I.C.
|
2017-04-05 23:01:34 +05:30
|
|
|
|
|
|
|
Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
you may not use this file except in compliance with the License.
|
|
|
|
You may obtain a copy of the License at
|
|
|
|
|
|
|
|
http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
|
|
|
|
Unless required by applicable law or agreed to in writing, software
|
|
|
|
distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
See the License for the specific language governing permissions and
|
|
|
|
limitations under the License.
|
|
|
|
*/
|
|
|
|
|
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"compress/gzip"
|
2017-04-07 20:13:19 +05:30
|
|
|
"context"
|
2017-04-05 23:01:34 +05:30
|
|
|
"encoding/json"
|
|
|
|
"fmt"
|
2017-04-18 20:13:04 +05:30
|
|
|
"io"
|
2017-04-05 23:01:34 +05:30
|
|
|
"io/ioutil"
|
2017-04-07 20:13:19 +05:30
|
|
|
"log"
|
2017-04-18 20:13:04 +05:30
|
|
|
"mime"
|
|
|
|
"mime/multipart"
|
2017-04-05 23:01:34 +05:30
|
|
|
"net/http"
|
|
|
|
"os"
|
|
|
|
"path/filepath"
|
2017-05-02 22:23:12 +05:30
|
|
|
"regexp"
|
2019-04-10 14:06:24 +05:30
|
|
|
"sort"
|
2017-04-05 23:01:34 +05:30
|
|
|
"strconv"
|
2017-04-12 19:36:40 +05:30
|
|
|
"strings"
|
2017-04-05 23:01:34 +05:30
|
|
|
"time"
|
2019-04-10 14:06:24 +05:30
|
|
|
|
|
|
|
"github.com/google/go-github/github"
|
2017-04-05 23:01:34 +05:30
|
|
|
)
|
|
|
|
|
|
|
|
var maxPayloadSize = 1024 * 1024 * 55 // 55 MB
|
|
|
|
|
|
|
|
type submitServer struct {
|
2017-04-07 20:13:19 +05:30
|
|
|
// github client for reporting bugs. may be nil, in which case,
|
|
|
|
// reporting is disabled.
|
|
|
|
ghClient *github.Client
|
2017-04-12 19:36:40 +05:30
|
|
|
|
|
|
|
// External URI to /api
|
|
|
|
apiPrefix string
|
|
|
|
|
|
|
|
// mappings from application to github owner/project
|
2020-03-15 18:31:50 +05:30
|
|
|
githubProjectMappings map[string]string
|
|
|
|
autocompleteProjectMappings map[string]string
|
2019-03-07 19:12:08 +05:30
|
|
|
|
|
|
|
slack *slackClient
|
2017-04-05 23:01:34 +05:30
|
|
|
}
|
|
|
|
|
2017-05-04 20:24:25 +05:30
|
|
|
// the type of payload which can be uploaded as JSON to the submit endpoint
|
|
|
|
type jsonPayload struct {
|
2017-04-11 16:51:30 +05:30
|
|
|
Text string `json:"text"`
|
|
|
|
AppName string `json:"app"`
|
|
|
|
Version string `json:"version"`
|
|
|
|
UserAgent string `json:"user_agent"`
|
2017-05-04 20:24:25 +05:30
|
|
|
Logs []jsonLogEntry `json:"logs"`
|
2017-04-11 16:51:30 +05:30
|
|
|
Data map[string]string `json:"data"`
|
2017-05-03 15:03:27 +05:30
|
|
|
Labels []string `json:"labels"`
|
2017-04-05 23:01:34 +05:30
|
|
|
}
|
|
|
|
|
2017-05-04 20:24:25 +05:30
|
|
|
type jsonLogEntry struct {
|
2017-04-05 23:01:34 +05:30
|
|
|
ID string `json:"id"`
|
|
|
|
Lines string `json:"lines"`
|
|
|
|
}
|
|
|
|
|
2017-05-04 20:24:25 +05:30
|
|
|
// the payload after parsing
|
|
|
|
type parsedPayload struct {
|
2017-12-04 19:30:05 +05:30
|
|
|
UserText string
|
|
|
|
AppName string
|
|
|
|
Data map[string]string
|
|
|
|
Labels []string
|
|
|
|
Logs []string
|
|
|
|
LogErrors []string
|
|
|
|
Files []string
|
|
|
|
FileErrors []string
|
2017-05-04 20:24:25 +05:30
|
|
|
}
|
|
|
|
|
2019-04-10 14:06:24 +05:30
|
|
|
func (p parsedPayload) WriteTo(out io.Writer) {
|
|
|
|
fmt.Fprintf(
|
|
|
|
out,
|
|
|
|
"%s\n\nNumber of logs: %d\nApplication: %s\n",
|
|
|
|
p.UserText, len(p.Logs), p.AppName,
|
|
|
|
)
|
|
|
|
fmt.Fprintf(out, "Labels: %s\n", strings.Join(p.Labels, ", "))
|
|
|
|
|
|
|
|
var dataKeys []string
|
|
|
|
for k := range p.Data {
|
|
|
|
dataKeys = append(dataKeys, k)
|
|
|
|
}
|
|
|
|
sort.Strings(dataKeys)
|
|
|
|
for _, k := range dataKeys {
|
|
|
|
v := p.Data[k]
|
|
|
|
fmt.Fprintf(out, "%s: %s\n", k, v)
|
|
|
|
}
|
|
|
|
if len(p.LogErrors) > 0 {
|
|
|
|
fmt.Fprint(out, "Log upload failures:\n")
|
|
|
|
for _, e := range p.LogErrors {
|
|
|
|
fmt.Fprintf(out, " %s\n", e)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if len(p.FileErrors) > 0 {
|
|
|
|
fmt.Fprint(out, "Attachment upload failures:\n")
|
|
|
|
for _, e := range p.FileErrors {
|
|
|
|
fmt.Fprintf(out, " %s\n", e)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-04-18 16:25:22 +05:30
|
|
|
type submitResponse struct {
|
|
|
|
ReportURL string `json:"report_url,omitempty"`
|
|
|
|
}
|
|
|
|
|
2020-03-15 18:31:50 +05:30
|
|
|
// regex to catch and substitute ambiguous issue references with explicit ones to the actual repo they are in
|
2020-03-18 06:28:14 +05:30
|
|
|
var ambiguousIssueRegex = regexp.MustCompile(`(^|[([{\s])(#\d+)([^\w]|$)`)
|
|
|
|
|
|
|
|
func replaceAmbiguousIssueReferences(ownerRepo, text string) string {
|
|
|
|
t := ambiguousIssueRegex.ReplaceAllString(text, fmt.Sprintf("${1}%s$2$3", ownerRepo))
|
|
|
|
return t
|
|
|
|
}
|
2020-03-15 18:31:50 +05:30
|
|
|
|
2017-04-05 23:01:34 +05:30
|
|
|
func (s *submitServer) ServeHTTP(w http.ResponseWriter, req *http.Request) {
|
2017-12-04 18:19:48 +05:30
|
|
|
// if we attempt to return a response without reading the request body,
|
2018-03-06 16:19:30 +05:30
|
|
|
// apache gets upset and returns a 500. Let's try this.
|
2017-12-04 18:19:48 +05:30
|
|
|
defer req.Body.Close()
|
2018-03-06 15:53:24 +05:30
|
|
|
defer io.Copy(ioutil.Discard, req.Body)
|
2017-12-04 18:19:48 +05:30
|
|
|
|
2017-04-05 23:01:34 +05:30
|
|
|
if req.Method != "POST" && req.Method != "OPTIONS" {
|
|
|
|
respond(405, w)
|
|
|
|
return
|
|
|
|
}
|
2017-04-07 20:13:19 +05:30
|
|
|
|
2017-04-05 23:01:34 +05:30
|
|
|
// Set CORS
|
|
|
|
w.Header().Set("Access-Control-Allow-Origin", "*")
|
|
|
|
w.Header().Set("Access-Control-Allow-Methods", "POST, OPTIONS")
|
|
|
|
w.Header().Set("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept")
|
|
|
|
if req.Method == "OPTIONS" {
|
|
|
|
respond(200, w)
|
|
|
|
return
|
|
|
|
}
|
2017-04-13 19:48:20 +05:30
|
|
|
|
2017-05-02 22:23:12 +05:30
|
|
|
// create the report dir before parsing the request, so that we can dump
|
|
|
|
// files straight in
|
|
|
|
t := time.Now().UTC()
|
|
|
|
prefix := t.Format("2006-01-02/150405")
|
|
|
|
reportDir := filepath.Join("bugs", prefix)
|
|
|
|
if err := os.MkdirAll(reportDir, os.ModePerm); err != nil {
|
|
|
|
log.Println("Unable to create report directory", err)
|
|
|
|
http.Error(w, "Internal error", 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
listingURL := s.apiPrefix + "/listing/" + prefix
|
|
|
|
log.Println("Handling report submission; listing URI will be", listingURL)
|
|
|
|
|
|
|
|
p := parseRequest(w, req, reportDir)
|
2017-04-13 19:48:20 +05:30
|
|
|
if p == nil {
|
2017-05-02 22:23:12 +05:30
|
|
|
// parseRequest already wrote an error, but now let's delete the
|
|
|
|
// useless report dir
|
|
|
|
if err := os.RemoveAll(reportDir); err != nil {
|
|
|
|
log.Printf("Unable to remove report dir %s after invalid upload: %v\n",
|
|
|
|
reportDir, err)
|
|
|
|
}
|
2017-04-13 19:48:20 +05:30
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2020-03-15 18:31:50 +05:30
|
|
|
if s.autocompleteProjectMappings[p.AppName] != "" {
|
2020-03-18 06:28:14 +05:30
|
|
|
p.UserText = replaceAmbiguousIssueReferences(s.autocompleteProjectMappings[p.AppName], p.UserText)
|
2020-03-15 18:31:50 +05:30
|
|
|
}
|
|
|
|
|
2017-05-02 22:23:12 +05:30
|
|
|
resp, err := s.saveReport(req.Context(), *p, reportDir, listingURL)
|
2017-04-18 16:25:22 +05:30
|
|
|
if err != nil {
|
2017-05-04 20:51:09 +05:30
|
|
|
log.Println("Error handling report submission:", err)
|
2017-04-13 19:48:20 +05:30
|
|
|
http.Error(w, "Internal error", 500)
|
2017-04-05 23:01:34 +05:30
|
|
|
return
|
|
|
|
}
|
2017-04-13 19:48:20 +05:30
|
|
|
|
2017-04-18 16:25:22 +05:30
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
|
|
w.WriteHeader(200)
|
|
|
|
json.NewEncoder(w).Encode(resp)
|
2017-04-13 19:48:20 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
// parseRequest attempts to parse a received request as a bug report. If
|
|
|
|
// the request cannot be parsed, it responds with an error and returns nil.
|
2017-05-04 20:24:25 +05:30
|
|
|
func parseRequest(w http.ResponseWriter, req *http.Request, reportDir string) *parsedPayload {
|
2017-04-13 19:48:20 +05:30
|
|
|
length, err := strconv.Atoi(req.Header.Get("Content-Length"))
|
|
|
|
if err != nil {
|
|
|
|
log.Println("Couldn't parse content-length", err)
|
|
|
|
http.Error(w, "Bad content-length", 400)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
if length > maxPayloadSize {
|
|
|
|
log.Println("Content-length", length, "too large")
|
2018-03-06 15:53:24 +05:30
|
|
|
http.Error(w, fmt.Sprintf("Content too large (max %d)", maxPayloadSize), 413)
|
2017-04-13 19:48:20 +05:30
|
|
|
return nil
|
|
|
|
}
|
2017-04-18 20:13:04 +05:30
|
|
|
|
|
|
|
contentType := req.Header.Get("Content-Type")
|
|
|
|
if contentType != "" {
|
|
|
|
d, _, _ := mime.ParseMediaType(contentType)
|
|
|
|
if d == "multipart/form-data" {
|
2017-05-02 22:23:12 +05:30
|
|
|
p, err1 := parseMultipartRequest(w, req, reportDir)
|
2017-04-18 20:13:04 +05:30
|
|
|
if err1 != nil {
|
2017-05-04 20:24:25 +05:30
|
|
|
log.Println("Error parsing multipart data:", err1)
|
2017-04-18 20:13:04 +05:30
|
|
|
http.Error(w, "Bad multipart data", 400)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
return p
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-05-04 20:24:25 +05:30
|
|
|
p, err := parseJSONRequest(w, req, reportDir)
|
2017-04-18 20:13:04 +05:30
|
|
|
if err != nil {
|
|
|
|
log.Println("Error parsing JSON body", err)
|
2017-04-11 16:51:30 +05:30
|
|
|
http.Error(w, fmt.Sprintf("Could not decode payload: %s", err.Error()), 400)
|
2017-04-13 19:48:20 +05:30
|
|
|
return nil
|
|
|
|
}
|
2017-04-18 20:13:04 +05:30
|
|
|
return p
|
|
|
|
}
|
|
|
|
|
2017-05-04 20:24:25 +05:30
|
|
|
func parseJSONRequest(w http.ResponseWriter, req *http.Request, reportDir string) (*parsedPayload, error) {
|
|
|
|
var p jsonPayload
|
2017-04-18 20:13:04 +05:30
|
|
|
if err := json.NewDecoder(req.Body).Decode(&p); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2017-04-13 19:48:20 +05:30
|
|
|
|
2017-05-04 20:24:25 +05:30
|
|
|
parsed := parsedPayload{
|
|
|
|
UserText: strings.TrimSpace(p.Text),
|
|
|
|
Data: make(map[string]string),
|
|
|
|
Labels: p.Labels,
|
|
|
|
}
|
|
|
|
|
|
|
|
if p.Data != nil {
|
|
|
|
parsed.Data = p.Data
|
|
|
|
}
|
2017-04-13 19:48:20 +05:30
|
|
|
|
2017-12-04 19:30:05 +05:30
|
|
|
for i, logfile := range p.Logs {
|
|
|
|
buf := bytes.NewBufferString(logfile.Lines)
|
|
|
|
leafName, err := saveLogPart(i, logfile.ID, buf, reportDir)
|
2017-05-04 20:24:25 +05:30
|
|
|
if err != nil {
|
2017-12-04 19:30:05 +05:30
|
|
|
log.Printf("Error saving log %s: %v", leafName, err)
|
|
|
|
parsed.LogErrors = append(parsed.LogErrors, fmt.Sprintf("Error saving log %s: %v", leafName, err))
|
|
|
|
} else {
|
|
|
|
parsed.Logs = append(parsed.Logs, leafName)
|
2017-05-04 20:24:25 +05:30
|
|
|
}
|
2017-04-05 23:01:34 +05:30
|
|
|
}
|
|
|
|
|
2017-04-12 20:51:21 +05:30
|
|
|
// backwards-compatibility hack: current versions of riot-android
|
|
|
|
// don't set 'app', so we don't correctly file github issues.
|
|
|
|
if p.AppName == "" && p.UserAgent == "Android" {
|
2017-05-04 20:24:25 +05:30
|
|
|
parsed.AppName = "riot-android"
|
2017-04-12 20:51:21 +05:30
|
|
|
|
2017-04-13 19:48:20 +05:30
|
|
|
// they also shove lots of stuff into 'Version' which we don't really
|
|
|
|
// want in the github report
|
|
|
|
for _, line := range strings.Split(p.Version, "\n") {
|
|
|
|
line = strings.TrimSpace(line)
|
|
|
|
if line == "" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
parts := strings.SplitN(line, ":", 2)
|
|
|
|
key := strings.TrimSpace(parts[0])
|
|
|
|
val := ""
|
|
|
|
if len(parts) > 1 {
|
|
|
|
val = strings.TrimSpace(parts[1])
|
|
|
|
}
|
2017-05-04 20:24:25 +05:30
|
|
|
parsed.Data[key] = val
|
|
|
|
}
|
|
|
|
} else {
|
2017-05-10 15:12:44 +05:30
|
|
|
parsed.AppName = p.AppName
|
|
|
|
|
2017-05-04 20:24:25 +05:30
|
|
|
if p.UserAgent != "" {
|
|
|
|
parsed.Data["User-Agent"] = p.UserAgent
|
|
|
|
}
|
|
|
|
if p.Version != "" {
|
|
|
|
parsed.Data["Version"] = p.Version
|
2017-04-13 19:48:20 +05:30
|
|
|
}
|
2017-04-05 23:01:34 +05:30
|
|
|
}
|
|
|
|
|
2017-05-04 20:24:25 +05:30
|
|
|
return &parsed, nil
|
2017-04-18 20:13:04 +05:30
|
|
|
}
|
|
|
|
|
2017-05-04 20:24:25 +05:30
|
|
|
func parseMultipartRequest(w http.ResponseWriter, req *http.Request, reportDir string) (*parsedPayload, error) {
|
2017-04-18 20:13:04 +05:30
|
|
|
rdr, err := req.MultipartReader()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2017-05-04 20:24:25 +05:30
|
|
|
p := parsedPayload{
|
2017-04-18 20:13:04 +05:30
|
|
|
Data: make(map[string]string),
|
|
|
|
}
|
|
|
|
|
|
|
|
for true {
|
|
|
|
part, err := rdr.NextPart()
|
|
|
|
if err == io.EOF {
|
|
|
|
break
|
|
|
|
} else if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2017-05-02 22:23:12 +05:30
|
|
|
if err = parseFormPart(part, &p, reportDir); err != nil {
|
2017-04-18 20:13:04 +05:30
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return &p, nil
|
|
|
|
}
|
|
|
|
|
2017-05-04 20:24:25 +05:30
|
|
|
func parseFormPart(part *multipart.Part, p *parsedPayload, reportDir string) error {
|
2017-04-18 20:13:04 +05:30
|
|
|
defer part.Close()
|
|
|
|
field := part.FormName()
|
2017-12-04 18:08:03 +05:30
|
|
|
partName := part.FileName()
|
2017-04-18 20:13:04 +05:30
|
|
|
|
2017-04-18 20:17:45 +05:30
|
|
|
var partReader io.Reader
|
|
|
|
if field == "compressed-log" {
|
2017-05-04 20:24:25 +05:30
|
|
|
// decompress logs as we read them.
|
|
|
|
//
|
|
|
|
// we could save the log directly rather than unzipping and re-zipping,
|
|
|
|
// but doing so conveys the benefit of checking the validity of the
|
|
|
|
// gzip at upload time.
|
2017-04-18 20:17:45 +05:30
|
|
|
zrdr, err := gzip.NewReader(part)
|
|
|
|
if err != nil {
|
2017-12-04 19:30:05 +05:30
|
|
|
// we don't reject the whole request if there is an
|
|
|
|
// error reading one attachment.
|
|
|
|
log.Printf("Error unzipping %s: %v", partName, err)
|
|
|
|
|
|
|
|
p.LogErrors = append(p.LogErrors, fmt.Sprintf("Error unzipping %s: %v", partName, err))
|
|
|
|
return nil
|
2017-04-18 20:17:45 +05:30
|
|
|
}
|
|
|
|
defer zrdr.Close()
|
|
|
|
partReader = zrdr
|
|
|
|
} else {
|
|
|
|
// read the field data directly from the multipart part
|
|
|
|
partReader = part
|
|
|
|
}
|
2017-05-02 22:23:12 +05:30
|
|
|
|
|
|
|
if field == "file" {
|
2017-12-04 18:08:03 +05:30
|
|
|
leafName, err := saveFormPart(partName, partReader, reportDir)
|
2017-05-02 22:23:12 +05:30
|
|
|
if err != nil {
|
2017-12-04 19:30:05 +05:30
|
|
|
log.Printf("Error saving %s %s: %v", field, partName, err)
|
|
|
|
p.FileErrors = append(p.FileErrors, fmt.Sprintf("Error saving %s: %v", partName, err))
|
|
|
|
} else {
|
|
|
|
p.Files = append(p.Files, leafName)
|
2017-05-02 22:23:12 +05:30
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-05-04 20:24:25 +05:30
|
|
|
if field == "log" || field == "compressed-log" {
|
2017-12-04 18:08:03 +05:30
|
|
|
leafName, err := saveLogPart(len(p.Logs), partName, partReader, reportDir)
|
2017-05-04 20:24:25 +05:30
|
|
|
if err != nil {
|
2017-12-04 19:30:05 +05:30
|
|
|
log.Printf("Error saving %s %s: %v", field, partName, err)
|
|
|
|
p.LogErrors = append(p.LogErrors, fmt.Sprintf("Error saving %s: %v", partName, err))
|
|
|
|
} else {
|
|
|
|
p.Logs = append(p.Logs, leafName)
|
2017-05-04 20:24:25 +05:30
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-04-18 20:17:45 +05:30
|
|
|
b, err := ioutil.ReadAll(partReader)
|
2017-04-18 20:13:04 +05:30
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
data := string(b)
|
2017-05-04 20:24:25 +05:30
|
|
|
formPartToPayload(field, data, p)
|
2017-05-03 15:03:27 +05:30
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// formPartToPayload updates the relevant part of *p from a name/value pair
|
|
|
|
// read from the form data.
|
2017-05-04 20:24:25 +05:30
|
|
|
func formPartToPayload(field, data string, p *parsedPayload) {
|
2017-04-18 20:13:04 +05:30
|
|
|
if field == "text" {
|
2017-05-04 20:24:25 +05:30
|
|
|
p.UserText = data
|
2017-04-18 20:13:04 +05:30
|
|
|
} else if field == "app" {
|
|
|
|
p.AppName = data
|
|
|
|
} else if field == "version" {
|
2017-05-04 20:24:25 +05:30
|
|
|
p.Data["Version"] = data
|
2017-04-18 20:13:04 +05:30
|
|
|
} else if field == "user_agent" {
|
2017-05-04 20:24:25 +05:30
|
|
|
p.Data["User-Agent"] = data
|
2017-05-03 15:03:27 +05:30
|
|
|
} else if field == "label" {
|
|
|
|
p.Labels = append(p.Labels, data)
|
2017-04-18 20:13:04 +05:30
|
|
|
} else {
|
|
|
|
p.Data[field] = data
|
|
|
|
}
|
2017-04-05 23:01:34 +05:30
|
|
|
}
|
|
|
|
|
2017-05-02 22:23:12 +05:30
|
|
|
// we use a quite restrictive regexp for the filenames; in particular:
|
|
|
|
//
|
|
|
|
// * a limited set of extensions. We are careful to limit the content-types
|
|
|
|
// we will serve the files with, but somebody might accidentally point an
|
|
|
|
// Apache or nginx at the upload directory, which would serve js files as
|
|
|
|
// application/javascript and open XSS vulnerabilities.
|
|
|
|
//
|
|
|
|
// * no silly characters (/, ctrl chars, etc)
|
|
|
|
//
|
|
|
|
// * nothing starting with '.'
|
|
|
|
var filenameRegexp = regexp.MustCompile(`^[a-zA-Z0-9_-]+\.(jpg|png|txt)$`)
|
2017-04-18 16:25:22 +05:30
|
|
|
|
2017-05-02 22:23:12 +05:30
|
|
|
// saveFormPart saves a file upload to the report directory.
|
|
|
|
//
|
|
|
|
// Returns the leafname of the saved file.
|
|
|
|
func saveFormPart(leafName string, reader io.Reader, reportDir string) (string, error) {
|
|
|
|
if !filenameRegexp.MatchString(leafName) {
|
|
|
|
return "", fmt.Errorf("Invalid upload filename")
|
|
|
|
}
|
|
|
|
|
|
|
|
fullName := filepath.Join(reportDir, leafName)
|
|
|
|
|
|
|
|
log.Println("Saving uploaded file", leafName, "to", fullName)
|
|
|
|
|
|
|
|
f, err := os.Create(fullName)
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
defer f.Close()
|
|
|
|
|
|
|
|
_, err = io.Copy(f, reader)
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
return leafName, nil
|
|
|
|
}
|
|
|
|
|
2017-05-04 20:28:23 +05:30
|
|
|
// we require a sensible extension, and don't allow the filename to start with
|
|
|
|
// '.'
|
|
|
|
var logRegexp = regexp.MustCompile(`^[a-zA-Z0-9_-][a-zA-Z0-9_.-]*\.(log|txt)$`)
|
|
|
|
|
2017-05-04 20:24:25 +05:30
|
|
|
// saveLogPart saves a log upload to the report directory.
|
|
|
|
//
|
|
|
|
// Returns the leafname of the saved file.
|
2017-05-04 20:28:23 +05:30
|
|
|
func saveLogPart(logNum int, filename string, reader io.Reader, reportDir string) (string, error) {
|
|
|
|
// pick a name to save the log file with.
|
|
|
|
//
|
|
|
|
// some clients use sensible names (foo.N.log), which we preserve. For
|
|
|
|
// others, we just make up a filename.
|
|
|
|
//
|
|
|
|
// Either way, we need to append .gz, because we're compressing it.
|
|
|
|
var leafName string
|
|
|
|
if logRegexp.MatchString(filename) {
|
|
|
|
leafName = filename + ".gz"
|
|
|
|
} else {
|
|
|
|
leafName = fmt.Sprintf("logs-%04d.log.gz", logNum)
|
|
|
|
}
|
|
|
|
|
2017-05-04 20:24:25 +05:30
|
|
|
fullname := filepath.Join(reportDir, leafName)
|
|
|
|
|
|
|
|
f, err := os.Create(fullname)
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
defer f.Close()
|
|
|
|
|
|
|
|
gz := gzip.NewWriter(f)
|
|
|
|
defer gz.Close()
|
|
|
|
|
|
|
|
_, err = io.Copy(gz, reader)
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
return leafName, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *submitServer) saveReport(ctx context.Context, p parsedPayload, reportDir, listingURL string) (*submitResponse, error) {
|
2017-04-11 16:51:30 +05:30
|
|
|
var summaryBuf bytes.Buffer
|
2019-04-10 14:06:24 +05:30
|
|
|
resp := submitResponse{}
|
|
|
|
p.WriteTo(&summaryBuf)
|
2017-05-02 22:23:12 +05:30
|
|
|
if err := gzipAndSave(summaryBuf.Bytes(), reportDir, "details.log.gz"); err != nil {
|
2017-04-18 16:25:22 +05:30
|
|
|
return nil, err
|
2017-04-05 23:01:34 +05:30
|
|
|
}
|
2017-04-11 16:51:30 +05:30
|
|
|
|
2019-03-07 19:12:08 +05:30
|
|
|
if err := s.submitGithubIssue(ctx, p, listingURL, &resp); err != nil {
|
|
|
|
return nil, err
|
2017-04-07 20:13:19 +05:30
|
|
|
}
|
|
|
|
|
2019-03-07 19:12:08 +05:30
|
|
|
if err := s.submitSlackNotification(p, listingURL); err != nil {
|
|
|
|
return nil, err
|
2017-04-12 19:36:40 +05:30
|
|
|
}
|
|
|
|
|
2019-03-07 19:12:08 +05:30
|
|
|
return &resp, nil
|
|
|
|
}
|
2017-04-13 19:48:20 +05:30
|
|
|
|
2019-03-07 19:12:08 +05:30
|
|
|
func (s *submitServer) submitGithubIssue(ctx context.Context, p parsedPayload, listingURL string, resp *submitResponse) error {
|
|
|
|
if s.ghClient == nil {
|
|
|
|
log.Println("GH issue submission disabled")
|
|
|
|
} else {
|
|
|
|
// submit a github issue
|
|
|
|
ghProj := s.githubProjectMappings[p.AppName]
|
|
|
|
if ghProj == "" {
|
|
|
|
log.Println("Not creating GH issue for unknown app", p.AppName)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
splits := strings.SplitN(ghProj, "/", 2)
|
|
|
|
if len(splits) < 2 {
|
|
|
|
log.Println("Can't create GH issue for invalid repo", ghProj)
|
|
|
|
}
|
|
|
|
owner, repo := splits[0], splits[1]
|
2017-04-13 19:48:20 +05:30
|
|
|
|
2019-03-07 19:12:08 +05:30
|
|
|
issueReq := buildGithubIssueRequest(p, listingURL)
|
2017-04-13 19:48:20 +05:30
|
|
|
|
2019-03-07 19:12:08 +05:30
|
|
|
issue, _, err := s.ghClient.Issues.Create(ctx, owner, repo, &issueReq)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2017-04-18 16:25:22 +05:30
|
|
|
|
2019-03-07 19:12:08 +05:30
|
|
|
log.Println("Created issue:", *issue.HTMLURL)
|
|
|
|
|
|
|
|
resp.ReportURL = *issue.HTMLURL
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *submitServer) submitSlackNotification(p parsedPayload, listingURL string) error {
|
|
|
|
if s.slack == nil {
|
|
|
|
log.Println("Slack notifications disabled")
|
|
|
|
} else {
|
|
|
|
slackBuf := fmt.Sprintf(
|
|
|
|
"%s\nApplication: %s\nReport: %s",
|
|
|
|
p.UserText, p.AppName, listingURL,
|
|
|
|
)
|
|
|
|
|
|
|
|
err := s.slack.Notify(slackBuf)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
2017-04-13 19:48:20 +05:30
|
|
|
}
|
|
|
|
|
2017-05-04 20:24:25 +05:30
|
|
|
func buildGithubIssueRequest(p parsedPayload, listingURL string) github.IssueRequest {
|
2017-09-08 16:41:56 +05:30
|
|
|
// set the title to the first (non-empty) line of the user's report, if any
|
2017-04-12 19:36:40 +05:30
|
|
|
var title string
|
2017-09-08 16:41:56 +05:30
|
|
|
trimmedUserText := strings.TrimSpace(p.UserText)
|
|
|
|
if trimmedUserText == "" {
|
2017-04-12 19:36:40 +05:30
|
|
|
title = "Untitled report"
|
|
|
|
} else {
|
2017-09-08 16:41:56 +05:30
|
|
|
if i := strings.IndexAny(trimmedUserText, "\r\n"); i < 0 {
|
|
|
|
title = trimmedUserText
|
2017-04-12 19:36:40 +05:30
|
|
|
} else {
|
2017-09-08 16:41:56 +05:30
|
|
|
title = trimmedUserText[0:i]
|
2017-04-12 19:36:40 +05:30
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-05-02 22:09:16 +05:30
|
|
|
var bodyBuf bytes.Buffer
|
2018-09-29 01:15:29 +05:30
|
|
|
fmt.Fprintf(&bodyBuf, "User message:\n\n%s\n\n", p.UserText)
|
2019-04-10 14:06:24 +05:30
|
|
|
var dataKeys []string
|
|
|
|
for k := range p.Data {
|
|
|
|
dataKeys = append(dataKeys, k)
|
|
|
|
}
|
|
|
|
sort.Strings(dataKeys)
|
|
|
|
for _, k := range dataKeys {
|
|
|
|
v := p.Data[k]
|
2017-05-02 22:09:16 +05:30
|
|
|
fmt.Fprintf(&bodyBuf, "%s: `%s`\n", k, v)
|
|
|
|
}
|
2017-05-02 22:23:12 +05:30
|
|
|
fmt.Fprintf(&bodyBuf, "[Logs](%s)", listingURL)
|
|
|
|
|
|
|
|
for _, file := range p.Files {
|
|
|
|
fmt.Fprintf(
|
|
|
|
&bodyBuf,
|
|
|
|
" / [%s](%s)",
|
|
|
|
file,
|
|
|
|
listingURL+"/"+file,
|
|
|
|
)
|
|
|
|
}
|
2017-04-12 19:36:40 +05:30
|
|
|
|
2017-05-02 22:23:12 +05:30
|
|
|
body := bodyBuf.String()
|
2017-05-04 20:51:09 +05:30
|
|
|
|
|
|
|
labels := p.Labels
|
|
|
|
// go-github doesn't like nils
|
|
|
|
if labels == nil {
|
|
|
|
labels = []string{}
|
|
|
|
}
|
2017-04-13 19:48:20 +05:30
|
|
|
return github.IssueRequest{
|
2017-05-03 15:03:27 +05:30
|
|
|
Title: &title,
|
|
|
|
Body: &body,
|
2017-05-04 20:51:09 +05:30
|
|
|
Labels: &labels,
|
2017-04-07 20:13:19 +05:30
|
|
|
}
|
2017-04-05 23:01:34 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
func respond(code int, w http.ResponseWriter) {
|
|
|
|
w.WriteHeader(code)
|
|
|
|
w.Write([]byte("{}"))
|
|
|
|
}
|
|
|
|
|
|
|
|
func gzipAndSave(data []byte, dirname, fpath string) error {
|
2017-05-02 22:23:12 +05:30
|
|
|
fpath = filepath.Join(dirname, fpath)
|
2017-04-05 23:01:34 +05:30
|
|
|
|
|
|
|
if _, err := os.Stat(fpath); err == nil {
|
|
|
|
return fmt.Errorf("file already exists") // the user can just retry
|
|
|
|
}
|
|
|
|
var b bytes.Buffer
|
|
|
|
gz := gzip.NewWriter(&b)
|
|
|
|
if _, err := gz.Write(data); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if err := gz.Flush(); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if err := gz.Close(); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if err := ioutil.WriteFile(fpath, b.Bytes(), 0644); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|