2017-04-05 23:01:34 +05:30
|
|
|
/*
|
|
|
|
Copyright 2017 Vector Creations Ltd
|
|
|
|
|
|
|
|
Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
you may not use this file except in compliance with the License.
|
|
|
|
You may obtain a copy of the License at
|
|
|
|
|
|
|
|
http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
|
|
|
|
Unless required by applicable law or agreed to in writing, software
|
|
|
|
distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
See the License for the specific language governing permissions and
|
|
|
|
limitations under the License.
|
|
|
|
*/
|
|
|
|
|
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"compress/gzip"
|
2017-04-07 20:13:19 +05:30
|
|
|
"context"
|
2021-12-09 20:33:25 +05:30
|
|
|
"encoding/base32"
|
2017-04-05 23:01:34 +05:30
|
|
|
"encoding/json"
|
|
|
|
"fmt"
|
2017-04-18 20:13:04 +05:30
|
|
|
"io"
|
2017-04-05 23:01:34 +05:30
|
|
|
"io/ioutil"
|
2017-04-07 20:13:19 +05:30
|
|
|
"log"
|
2021-12-09 20:33:25 +05:30
|
|
|
"math/rand"
|
2017-04-18 20:13:04 +05:30
|
|
|
"mime"
|
|
|
|
"mime/multipart"
|
2017-04-05 23:01:34 +05:30
|
|
|
"net/http"
|
2020-09-16 14:55:56 +05:30
|
|
|
"net/smtp"
|
2017-04-05 23:01:34 +05:30
|
|
|
"os"
|
|
|
|
"path/filepath"
|
2017-05-02 22:23:12 +05:30
|
|
|
"regexp"
|
2019-04-10 14:06:24 +05:30
|
|
|
"sort"
|
2017-04-05 23:01:34 +05:30
|
|
|
"strconv"
|
2017-04-12 19:36:40 +05:30
|
|
|
"strings"
|
2017-04-05 23:01:34 +05:30
|
|
|
"time"
|
2019-04-10 14:06:24 +05:30
|
|
|
|
|
|
|
"github.com/google/go-github/github"
|
2020-09-16 14:55:56 +05:30
|
|
|
"github.com/jordan-wright/email"
|
2021-08-10 22:34:58 +05:30
|
|
|
"github.com/xanzy/go-gitlab"
|
2017-04-05 23:01:34 +05:30
|
|
|
)
|
|
|
|
|
|
|
|
var maxPayloadSize = 1024 * 1024 * 55 // 55 MB
|
|
|
|
|
|
|
|
type submitServer struct {
|
2017-04-07 20:13:19 +05:30
|
|
|
// github client for reporting bugs. may be nil, in which case,
|
|
|
|
// reporting is disabled.
|
|
|
|
ghClient *github.Client
|
2021-08-10 22:34:58 +05:30
|
|
|
glClient *gitlab.Client
|
2017-04-12 19:36:40 +05:30
|
|
|
|
|
|
|
// External URI to /api
|
|
|
|
apiPrefix string
|
|
|
|
|
2019-03-07 19:12:08 +05:30
|
|
|
slack *slackClient
|
2020-09-16 14:55:56 +05:30
|
|
|
|
2022-01-25 15:28:50 +05:30
|
|
|
genericWebhookClient *http.Client
|
2022-04-01 18:47:48 +05:30
|
|
|
cfg *config
|
2017-04-05 23:01:34 +05:30
|
|
|
}
|
|
|
|
|
2017-05-04 20:24:25 +05:30
|
|
|
// the type of payload which can be uploaded as JSON to the submit endpoint
|
|
|
|
type jsonPayload struct {
|
2017-04-11 16:51:30 +05:30
|
|
|
Text string `json:"text"`
|
|
|
|
AppName string `json:"app"`
|
|
|
|
Version string `json:"version"`
|
|
|
|
UserAgent string `json:"user_agent"`
|
2017-05-04 20:24:25 +05:30
|
|
|
Logs []jsonLogEntry `json:"logs"`
|
2017-04-11 16:51:30 +05:30
|
|
|
Data map[string]string `json:"data"`
|
2017-05-03 15:03:27 +05:30
|
|
|
Labels []string `json:"labels"`
|
2017-04-05 23:01:34 +05:30
|
|
|
}
|
|
|
|
|
2017-05-04 20:24:25 +05:30
|
|
|
type jsonLogEntry struct {
|
2017-04-05 23:01:34 +05:30
|
|
|
ID string `json:"id"`
|
|
|
|
Lines string `json:"lines"`
|
|
|
|
}
|
|
|
|
|
2022-01-25 15:28:50 +05:30
|
|
|
type genericWebhookPayload struct {
|
2022-04-13 17:51:46 +05:30
|
|
|
payload
|
2022-04-01 18:47:48 +05:30
|
|
|
ReportURL string `json:"report_url"`
|
|
|
|
ListingURL string `json:"listing_url"`
|
2022-01-25 15:28:50 +05:30
|
|
|
}
|
|
|
|
|
2017-05-04 20:24:25 +05:30
|
|
|
// the payload after parsing
|
2022-04-13 17:51:46 +05:30
|
|
|
type payload struct {
|
2022-04-01 18:47:59 +05:30
|
|
|
ID string `json:"id"`
|
2022-01-25 15:28:50 +05:30
|
|
|
UserText string `json:"user_text"`
|
|
|
|
AppName string `json:"app"`
|
|
|
|
Data map[string]string `json:"data"`
|
|
|
|
Labels []string `json:"labels"`
|
|
|
|
Logs []string `json:"logs"`
|
|
|
|
LogErrors []string `json:"logErrors"`
|
|
|
|
Files []string `json:"files"`
|
|
|
|
FileErrors []string `json:"fileErrors"`
|
2017-05-04 20:24:25 +05:30
|
|
|
}
|
|
|
|
|
2022-04-13 17:51:46 +05:30
|
|
|
func (p payload) WriteTo(out io.Writer) {
|
2019-04-10 14:06:24 +05:30
|
|
|
fmt.Fprintf(
|
|
|
|
out,
|
|
|
|
"%s\n\nNumber of logs: %d\nApplication: %s\n",
|
|
|
|
p.UserText, len(p.Logs), p.AppName,
|
|
|
|
)
|
|
|
|
fmt.Fprintf(out, "Labels: %s\n", strings.Join(p.Labels, ", "))
|
|
|
|
|
|
|
|
var dataKeys []string
|
|
|
|
for k := range p.Data {
|
|
|
|
dataKeys = append(dataKeys, k)
|
|
|
|
}
|
|
|
|
sort.Strings(dataKeys)
|
|
|
|
for _, k := range dataKeys {
|
|
|
|
v := p.Data[k]
|
|
|
|
fmt.Fprintf(out, "%s: %s\n", k, v)
|
|
|
|
}
|
|
|
|
if len(p.LogErrors) > 0 {
|
|
|
|
fmt.Fprint(out, "Log upload failures:\n")
|
|
|
|
for _, e := range p.LogErrors {
|
|
|
|
fmt.Fprintf(out, " %s\n", e)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if len(p.FileErrors) > 0 {
|
|
|
|
fmt.Fprint(out, "Attachment upload failures:\n")
|
|
|
|
for _, e := range p.FileErrors {
|
|
|
|
fmt.Fprintf(out, " %s\n", e)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-04-18 16:25:22 +05:30
|
|
|
type submitResponse struct {
|
|
|
|
ReportURL string `json:"report_url,omitempty"`
|
|
|
|
}
|
|
|
|
|
2017-04-05 23:01:34 +05:30
|
|
|
func (s *submitServer) ServeHTTP(w http.ResponseWriter, req *http.Request) {
|
2017-12-04 18:19:48 +05:30
|
|
|
// if we attempt to return a response without reading the request body,
|
2018-03-06 16:19:30 +05:30
|
|
|
// apache gets upset and returns a 500. Let's try this.
|
2017-12-04 18:19:48 +05:30
|
|
|
defer req.Body.Close()
|
2018-03-06 15:53:24 +05:30
|
|
|
defer io.Copy(ioutil.Discard, req.Body)
|
2017-12-04 18:19:48 +05:30
|
|
|
|
2017-04-05 23:01:34 +05:30
|
|
|
if req.Method != "POST" && req.Method != "OPTIONS" {
|
|
|
|
respond(405, w)
|
|
|
|
return
|
|
|
|
}
|
2017-04-07 20:13:19 +05:30
|
|
|
|
2017-04-05 23:01:34 +05:30
|
|
|
// Set CORS
|
|
|
|
w.Header().Set("Access-Control-Allow-Origin", "*")
|
|
|
|
w.Header().Set("Access-Control-Allow-Methods", "POST, OPTIONS")
|
|
|
|
w.Header().Set("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept")
|
|
|
|
if req.Method == "OPTIONS" {
|
|
|
|
respond(200, w)
|
|
|
|
return
|
|
|
|
}
|
2017-04-13 19:48:20 +05:30
|
|
|
|
2017-05-02 22:23:12 +05:30
|
|
|
// create the report dir before parsing the request, so that we can dump
|
|
|
|
// files straight in
|
|
|
|
t := time.Now().UTC()
|
|
|
|
prefix := t.Format("2006-01-02/150405")
|
2021-12-09 20:33:25 +05:30
|
|
|
randBytes := make([]byte, 5)
|
|
|
|
rand.Read(randBytes)
|
|
|
|
prefix += "-" + base32.StdEncoding.EncodeToString(randBytes)
|
2017-05-02 22:23:12 +05:30
|
|
|
reportDir := filepath.Join("bugs", prefix)
|
|
|
|
if err := os.MkdirAll(reportDir, os.ModePerm); err != nil {
|
|
|
|
log.Println("Unable to create report directory", err)
|
|
|
|
http.Error(w, "Internal error", 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
listingURL := s.apiPrefix + "/listing/" + prefix
|
|
|
|
log.Println("Handling report submission; listing URI will be", listingURL)
|
|
|
|
|
|
|
|
p := parseRequest(w, req, reportDir)
|
2017-04-13 19:48:20 +05:30
|
|
|
if p == nil {
|
2017-05-02 22:23:12 +05:30
|
|
|
// parseRequest already wrote an error, but now let's delete the
|
|
|
|
// useless report dir
|
|
|
|
if err := os.RemoveAll(reportDir); err != nil {
|
|
|
|
log.Printf("Unable to remove report dir %s after invalid upload: %v\n",
|
|
|
|
reportDir, err)
|
|
|
|
}
|
2017-04-13 19:48:20 +05:30
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-04-01 18:47:59 +05:30
|
|
|
// We use this prefix (eg, 2022-05-01/125223-abcde) as a unique identifier for this rageshake.
|
|
|
|
// This is going to be used to uniquely identify rageshakes, even if they are not submitted to
|
|
|
|
// an issue tracker for instance with automatic rageshakes that can be plentiful
|
|
|
|
p.ID = prefix
|
|
|
|
|
2017-05-02 22:23:12 +05:30
|
|
|
resp, err := s.saveReport(req.Context(), *p, reportDir, listingURL)
|
2017-04-18 16:25:22 +05:30
|
|
|
if err != nil {
|
2017-05-04 20:51:09 +05:30
|
|
|
log.Println("Error handling report submission:", err)
|
2017-04-13 19:48:20 +05:30
|
|
|
http.Error(w, "Internal error", 500)
|
2017-04-05 23:01:34 +05:30
|
|
|
return
|
|
|
|
}
|
2017-04-13 19:48:20 +05:30
|
|
|
|
2017-04-18 16:25:22 +05:30
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
|
|
w.WriteHeader(200)
|
|
|
|
json.NewEncoder(w).Encode(resp)
|
2017-04-13 19:48:20 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
// parseRequest attempts to parse a received request as a bug report. If
|
|
|
|
// the request cannot be parsed, it responds with an error and returns nil.
|
2022-04-13 17:51:46 +05:30
|
|
|
func parseRequest(w http.ResponseWriter, req *http.Request, reportDir string) *payload {
|
2017-04-13 19:48:20 +05:30
|
|
|
length, err := strconv.Atoi(req.Header.Get("Content-Length"))
|
|
|
|
if err != nil {
|
|
|
|
log.Println("Couldn't parse content-length", err)
|
|
|
|
http.Error(w, "Bad content-length", 400)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
if length > maxPayloadSize {
|
|
|
|
log.Println("Content-length", length, "too large")
|
2018-03-06 15:53:24 +05:30
|
|
|
http.Error(w, fmt.Sprintf("Content too large (max %d)", maxPayloadSize), 413)
|
2017-04-13 19:48:20 +05:30
|
|
|
return nil
|
|
|
|
}
|
2017-04-18 20:13:04 +05:30
|
|
|
|
|
|
|
contentType := req.Header.Get("Content-Type")
|
|
|
|
if contentType != "" {
|
|
|
|
d, _, _ := mime.ParseMediaType(contentType)
|
|
|
|
if d == "multipart/form-data" {
|
2017-05-02 22:23:12 +05:30
|
|
|
p, err1 := parseMultipartRequest(w, req, reportDir)
|
2017-04-18 20:13:04 +05:30
|
|
|
if err1 != nil {
|
2017-05-04 20:24:25 +05:30
|
|
|
log.Println("Error parsing multipart data:", err1)
|
2017-04-18 20:13:04 +05:30
|
|
|
http.Error(w, "Bad multipart data", 400)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
return p
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-05-04 20:24:25 +05:30
|
|
|
p, err := parseJSONRequest(w, req, reportDir)
|
2017-04-18 20:13:04 +05:30
|
|
|
if err != nil {
|
|
|
|
log.Println("Error parsing JSON body", err)
|
2017-04-11 16:51:30 +05:30
|
|
|
http.Error(w, fmt.Sprintf("Could not decode payload: %s", err.Error()), 400)
|
2017-04-13 19:48:20 +05:30
|
|
|
return nil
|
|
|
|
}
|
2017-04-18 20:13:04 +05:30
|
|
|
return p
|
|
|
|
}
|
|
|
|
|
2022-04-13 17:51:46 +05:30
|
|
|
func parseJSONRequest(w http.ResponseWriter, req *http.Request, reportDir string) (*payload, error) {
|
2017-05-04 20:24:25 +05:30
|
|
|
var p jsonPayload
|
2017-04-18 20:13:04 +05:30
|
|
|
if err := json.NewDecoder(req.Body).Decode(&p); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2017-04-13 19:48:20 +05:30
|
|
|
|
2022-04-13 17:51:46 +05:30
|
|
|
parsed := payload{
|
2017-05-04 20:24:25 +05:30
|
|
|
UserText: strings.TrimSpace(p.Text),
|
|
|
|
Data: make(map[string]string),
|
|
|
|
Labels: p.Labels,
|
|
|
|
}
|
|
|
|
|
|
|
|
if p.Data != nil {
|
|
|
|
parsed.Data = p.Data
|
|
|
|
}
|
2017-04-13 19:48:20 +05:30
|
|
|
|
2017-12-04 19:30:05 +05:30
|
|
|
for i, logfile := range p.Logs {
|
|
|
|
buf := bytes.NewBufferString(logfile.Lines)
|
|
|
|
leafName, err := saveLogPart(i, logfile.ID, buf, reportDir)
|
2017-05-04 20:24:25 +05:30
|
|
|
if err != nil {
|
2017-12-04 19:30:05 +05:30
|
|
|
log.Printf("Error saving log %s: %v", leafName, err)
|
|
|
|
parsed.LogErrors = append(parsed.LogErrors, fmt.Sprintf("Error saving log %s: %v", leafName, err))
|
|
|
|
} else {
|
|
|
|
parsed.Logs = append(parsed.Logs, leafName)
|
2017-05-04 20:24:25 +05:30
|
|
|
}
|
2017-04-05 23:01:34 +05:30
|
|
|
}
|
|
|
|
|
2017-04-12 20:51:21 +05:30
|
|
|
// backwards-compatibility hack: current versions of riot-android
|
|
|
|
// don't set 'app', so we don't correctly file github issues.
|
|
|
|
if p.AppName == "" && p.UserAgent == "Android" {
|
2017-05-04 20:24:25 +05:30
|
|
|
parsed.AppName = "riot-android"
|
2017-04-12 20:51:21 +05:30
|
|
|
|
2017-04-13 19:48:20 +05:30
|
|
|
// they also shove lots of stuff into 'Version' which we don't really
|
|
|
|
// want in the github report
|
|
|
|
for _, line := range strings.Split(p.Version, "\n") {
|
|
|
|
line = strings.TrimSpace(line)
|
|
|
|
if line == "" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
parts := strings.SplitN(line, ":", 2)
|
|
|
|
key := strings.TrimSpace(parts[0])
|
|
|
|
val := ""
|
|
|
|
if len(parts) > 1 {
|
|
|
|
val = strings.TrimSpace(parts[1])
|
|
|
|
}
|
2017-05-04 20:24:25 +05:30
|
|
|
parsed.Data[key] = val
|
|
|
|
}
|
|
|
|
} else {
|
2017-05-10 15:12:44 +05:30
|
|
|
parsed.AppName = p.AppName
|
|
|
|
|
2017-05-04 20:24:25 +05:30
|
|
|
if p.UserAgent != "" {
|
|
|
|
parsed.Data["User-Agent"] = p.UserAgent
|
|
|
|
}
|
|
|
|
if p.Version != "" {
|
|
|
|
parsed.Data["Version"] = p.Version
|
2017-04-13 19:48:20 +05:30
|
|
|
}
|
2017-04-05 23:01:34 +05:30
|
|
|
}
|
|
|
|
|
2017-05-04 20:24:25 +05:30
|
|
|
return &parsed, nil
|
2017-04-18 20:13:04 +05:30
|
|
|
}
|
|
|
|
|
2022-04-13 17:51:46 +05:30
|
|
|
func parseMultipartRequest(w http.ResponseWriter, req *http.Request, reportDir string) (*payload, error) {
|
2017-04-18 20:13:04 +05:30
|
|
|
rdr, err := req.MultipartReader()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2022-04-13 17:51:46 +05:30
|
|
|
p := payload{
|
2017-04-18 20:13:04 +05:30
|
|
|
Data: make(map[string]string),
|
|
|
|
}
|
|
|
|
|
|
|
|
for true {
|
|
|
|
part, err := rdr.NextPart()
|
|
|
|
if err == io.EOF {
|
|
|
|
break
|
|
|
|
} else if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2017-05-02 22:23:12 +05:30
|
|
|
if err = parseFormPart(part, &p, reportDir); err != nil {
|
2017-04-18 20:13:04 +05:30
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return &p, nil
|
|
|
|
}
|
|
|
|
|
2022-04-13 17:51:46 +05:30
|
|
|
func parseFormPart(part *multipart.Part, p *payload, reportDir string) error {
|
2017-04-18 20:13:04 +05:30
|
|
|
defer part.Close()
|
|
|
|
field := part.FormName()
|
2017-12-04 18:08:03 +05:30
|
|
|
partName := part.FileName()
|
2017-04-18 20:13:04 +05:30
|
|
|
|
2017-04-18 20:17:45 +05:30
|
|
|
var partReader io.Reader
|
|
|
|
if field == "compressed-log" {
|
2017-05-04 20:24:25 +05:30
|
|
|
// decompress logs as we read them.
|
|
|
|
//
|
|
|
|
// we could save the log directly rather than unzipping and re-zipping,
|
|
|
|
// but doing so conveys the benefit of checking the validity of the
|
|
|
|
// gzip at upload time.
|
2017-04-18 20:17:45 +05:30
|
|
|
zrdr, err := gzip.NewReader(part)
|
|
|
|
if err != nil {
|
2017-12-04 19:30:05 +05:30
|
|
|
// we don't reject the whole request if there is an
|
|
|
|
// error reading one attachment.
|
|
|
|
log.Printf("Error unzipping %s: %v", partName, err)
|
|
|
|
|
|
|
|
p.LogErrors = append(p.LogErrors, fmt.Sprintf("Error unzipping %s: %v", partName, err))
|
|
|
|
return nil
|
2017-04-18 20:17:45 +05:30
|
|
|
}
|
|
|
|
defer zrdr.Close()
|
|
|
|
partReader = zrdr
|
|
|
|
} else {
|
|
|
|
// read the field data directly from the multipart part
|
|
|
|
partReader = part
|
|
|
|
}
|
2017-05-02 22:23:12 +05:30
|
|
|
|
|
|
|
if field == "file" {
|
2017-12-04 18:08:03 +05:30
|
|
|
leafName, err := saveFormPart(partName, partReader, reportDir)
|
2017-05-02 22:23:12 +05:30
|
|
|
if err != nil {
|
2017-12-04 19:30:05 +05:30
|
|
|
log.Printf("Error saving %s %s: %v", field, partName, err)
|
|
|
|
p.FileErrors = append(p.FileErrors, fmt.Sprintf("Error saving %s: %v", partName, err))
|
|
|
|
} else {
|
|
|
|
p.Files = append(p.Files, leafName)
|
2017-05-02 22:23:12 +05:30
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-05-04 20:24:25 +05:30
|
|
|
if field == "log" || field == "compressed-log" {
|
2017-12-04 18:08:03 +05:30
|
|
|
leafName, err := saveLogPart(len(p.Logs), partName, partReader, reportDir)
|
2017-05-04 20:24:25 +05:30
|
|
|
if err != nil {
|
2017-12-04 19:30:05 +05:30
|
|
|
log.Printf("Error saving %s %s: %v", field, partName, err)
|
|
|
|
p.LogErrors = append(p.LogErrors, fmt.Sprintf("Error saving %s: %v", partName, err))
|
|
|
|
} else {
|
|
|
|
p.Logs = append(p.Logs, leafName)
|
2017-05-04 20:24:25 +05:30
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-04-18 20:17:45 +05:30
|
|
|
b, err := ioutil.ReadAll(partReader)
|
2017-04-18 20:13:04 +05:30
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
data := string(b)
|
2017-05-04 20:24:25 +05:30
|
|
|
formPartToPayload(field, data, p)
|
2017-05-03 15:03:27 +05:30
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// formPartToPayload updates the relevant part of *p from a name/value pair
|
|
|
|
// read from the form data.
|
2022-04-13 17:51:46 +05:30
|
|
|
func formPartToPayload(field, data string, p *payload) {
|
2017-04-18 20:13:04 +05:30
|
|
|
if field == "text" {
|
2017-05-04 20:24:25 +05:30
|
|
|
p.UserText = data
|
2017-04-18 20:13:04 +05:30
|
|
|
} else if field == "app" {
|
|
|
|
p.AppName = data
|
|
|
|
} else if field == "version" {
|
2017-05-04 20:24:25 +05:30
|
|
|
p.Data["Version"] = data
|
2017-04-18 20:13:04 +05:30
|
|
|
} else if field == "user_agent" {
|
2017-05-04 20:24:25 +05:30
|
|
|
p.Data["User-Agent"] = data
|
2017-05-03 15:03:27 +05:30
|
|
|
} else if field == "label" {
|
|
|
|
p.Labels = append(p.Labels, data)
|
2017-04-18 20:13:04 +05:30
|
|
|
} else {
|
|
|
|
p.Data[field] = data
|
|
|
|
}
|
2017-04-05 23:01:34 +05:30
|
|
|
}
|
|
|
|
|
2017-05-02 22:23:12 +05:30
|
|
|
// we use a quite restrictive regexp for the filenames; in particular:
|
|
|
|
//
|
|
|
|
// * a limited set of extensions. We are careful to limit the content-types
|
|
|
|
// we will serve the files with, but somebody might accidentally point an
|
|
|
|
// Apache or nginx at the upload directory, which would serve js files as
|
|
|
|
// application/javascript and open XSS vulnerabilities.
|
|
|
|
//
|
|
|
|
// * no silly characters (/, ctrl chars, etc)
|
|
|
|
//
|
|
|
|
// * nothing starting with '.'
|
2022-02-08 16:28:21 +05:30
|
|
|
var filenameRegexp = regexp.MustCompile(`^[a-zA-Z0-9_-]+\.(jpg|png|txt|json)$`)
|
2017-04-18 16:25:22 +05:30
|
|
|
|
2017-05-02 22:23:12 +05:30
|
|
|
// saveFormPart saves a file upload to the report directory.
|
|
|
|
//
|
|
|
|
// Returns the leafname of the saved file.
|
|
|
|
func saveFormPart(leafName string, reader io.Reader, reportDir string) (string, error) {
|
|
|
|
if !filenameRegexp.MatchString(leafName) {
|
|
|
|
return "", fmt.Errorf("Invalid upload filename")
|
|
|
|
}
|
|
|
|
|
|
|
|
fullName := filepath.Join(reportDir, leafName)
|
|
|
|
|
|
|
|
log.Println("Saving uploaded file", leafName, "to", fullName)
|
|
|
|
|
|
|
|
f, err := os.Create(fullName)
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
defer f.Close()
|
|
|
|
|
|
|
|
_, err = io.Copy(f, reader)
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
return leafName, nil
|
|
|
|
}
|
|
|
|
|
2017-05-04 20:28:23 +05:30
|
|
|
// we require a sensible extension, and don't allow the filename to start with
|
|
|
|
// '.'
|
2022-01-18 22:55:44 +05:30
|
|
|
var logRegexp = regexp.MustCompile(`^[a-zA-Z0-9_-][a-zA-Z0-9_.-]*\.(log|txt)(\.gz)?$`)
|
2017-05-04 20:28:23 +05:30
|
|
|
|
2017-05-04 20:24:25 +05:30
|
|
|
// saveLogPart saves a log upload to the report directory.
|
|
|
|
//
|
|
|
|
// Returns the leafname of the saved file.
|
2017-05-04 20:28:23 +05:30
|
|
|
func saveLogPart(logNum int, filename string, reader io.Reader, reportDir string) (string, error) {
|
|
|
|
// pick a name to save the log file with.
|
|
|
|
//
|
|
|
|
// some clients use sensible names (foo.N.log), which we preserve. For
|
|
|
|
// others, we just make up a filename.
|
|
|
|
//
|
2022-01-24 19:11:36 +05:30
|
|
|
// We append a ".gz" extension if not already present, as the final file we store on
|
|
|
|
// disk will be gzipped. The original filename may or may not contain a '.gz' depending
|
|
|
|
// on the client that uploaded it, and if it was uploaded already compressed.
|
|
|
|
|
2017-05-04 20:28:23 +05:30
|
|
|
var leafName string
|
|
|
|
if logRegexp.MatchString(filename) {
|
2022-01-24 19:11:36 +05:30
|
|
|
leafName = filename
|
2022-01-24 19:08:50 +05:30
|
|
|
if !strings.HasSuffix(filename, ".gz") {
|
|
|
|
leafName += ".gz"
|
2022-01-18 23:00:50 +05:30
|
|
|
}
|
2017-05-04 20:28:23 +05:30
|
|
|
} else {
|
|
|
|
leafName = fmt.Sprintf("logs-%04d.log.gz", logNum)
|
|
|
|
}
|
|
|
|
|
2017-05-04 20:24:25 +05:30
|
|
|
fullname := filepath.Join(reportDir, leafName)
|
|
|
|
|
|
|
|
f, err := os.Create(fullname)
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
defer f.Close()
|
|
|
|
|
|
|
|
gz := gzip.NewWriter(f)
|
|
|
|
defer gz.Close()
|
|
|
|
|
|
|
|
_, err = io.Copy(gz, reader)
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
return leafName, nil
|
|
|
|
}
|
|
|
|
|
2022-04-13 17:51:46 +05:30
|
|
|
func (s *submitServer) saveReport(ctx context.Context, p payload, reportDir, listingURL string) (*submitResponse, error) {
|
2017-04-11 16:51:30 +05:30
|
|
|
var summaryBuf bytes.Buffer
|
2019-04-10 14:06:24 +05:30
|
|
|
resp := submitResponse{}
|
|
|
|
p.WriteTo(&summaryBuf)
|
2017-05-02 22:23:12 +05:30
|
|
|
if err := gzipAndSave(summaryBuf.Bytes(), reportDir, "details.log.gz"); err != nil {
|
2017-04-18 16:25:22 +05:30
|
|
|
return nil, err
|
2017-04-05 23:01:34 +05:30
|
|
|
}
|
2017-04-11 16:51:30 +05:30
|
|
|
|
2019-03-07 19:12:08 +05:30
|
|
|
if err := s.submitGithubIssue(ctx, p, listingURL, &resp); err != nil {
|
|
|
|
return nil, err
|
2017-04-07 20:13:19 +05:30
|
|
|
}
|
|
|
|
|
2021-08-10 22:34:58 +05:30
|
|
|
if err := s.submitGitlabIssue(p, listingURL, &resp); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2019-03-07 19:12:08 +05:30
|
|
|
if err := s.submitSlackNotification(p, listingURL); err != nil {
|
|
|
|
return nil, err
|
2017-04-12 19:36:40 +05:30
|
|
|
}
|
|
|
|
|
2020-09-16 14:55:56 +05:30
|
|
|
if err := s.sendEmail(p, reportDir); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2022-01-25 15:28:50 +05:30
|
|
|
if err := s.submitGenericWebhook(p, listingURL, resp.ReportURL); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2019-03-07 19:12:08 +05:30
|
|
|
return &resp, nil
|
|
|
|
}
|
2017-04-13 19:48:20 +05:30
|
|
|
|
2022-01-25 15:28:50 +05:30
|
|
|
// submitGenericWebhook submits a basic JSON body to an endpoint configured in the config
|
|
|
|
//
|
2022-04-13 17:51:46 +05:30
|
|
|
// The request does not include the log body, only the metadata in the payload,
|
2022-01-25 15:28:50 +05:30
|
|
|
// with the required listingURL to obtain the logs over http if required.
|
|
|
|
//
|
|
|
|
// If a github or gitlab issue was previously made, the reportURL will also be passed.
|
|
|
|
//
|
|
|
|
// Uses a goroutine to handle the http request asynchronously as by this point all critical
|
|
|
|
// information has been stored.
|
|
|
|
|
2022-04-13 17:51:46 +05:30
|
|
|
func (s *submitServer) submitGenericWebhook(p payload, listingURL string, reportURL string) error {
|
2022-01-25 15:28:50 +05:30
|
|
|
if s.genericWebhookClient == nil {
|
|
|
|
return nil
|
|
|
|
}
|
2022-02-01 19:29:01 +05:30
|
|
|
genericHookPayload := genericWebhookPayload{
|
2022-04-13 17:51:46 +05:30
|
|
|
payload: p,
|
2022-04-01 18:47:48 +05:30
|
|
|
ReportURL: reportURL,
|
|
|
|
ListingURL: listingURL,
|
2022-02-01 19:29:01 +05:30
|
|
|
}
|
2022-02-01 18:43:37 +05:30
|
|
|
for _, url := range s.cfg.GenericWebhookURLs {
|
2022-04-13 17:51:46 +05:30
|
|
|
// Enrich the payload with a reportURL and listingURL, to convert a single struct
|
2022-02-01 18:43:37 +05:30
|
|
|
// to JSON easily
|
|
|
|
|
|
|
|
payloadBuffer := new(bytes.Buffer)
|
|
|
|
json.NewEncoder(payloadBuffer).Encode(genericHookPayload)
|
|
|
|
req, err := http.NewRequest("POST", url, payloadBuffer)
|
|
|
|
req.Header.Set("Content-Type", "application/json")
|
|
|
|
if err != nil {
|
|
|
|
log.Println("Unable to submit to URL ", url, " ", err)
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
log.Println("Making generic webhook request to URL ", url)
|
|
|
|
go s.sendGenericWebhook(req)
|
2022-01-25 15:28:50 +05:30
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *submitServer) sendGenericWebhook(req *http.Request) {
|
|
|
|
resp, err := s.genericWebhookClient.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
log.Println("Unable to submit notification", err)
|
|
|
|
} else {
|
|
|
|
defer resp.Body.Close()
|
|
|
|
log.Println("Got response", resp.Status)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-13 17:51:46 +05:30
|
|
|
func (s *submitServer) submitGithubIssue(ctx context.Context, p payload, listingURL string, resp *submitResponse) error {
|
2019-03-07 19:12:08 +05:30
|
|
|
if s.ghClient == nil {
|
2020-09-16 14:55:56 +05:30
|
|
|
return nil
|
|
|
|
}
|
2017-04-13 19:48:20 +05:30
|
|
|
|
2020-09-16 14:55:56 +05:30
|
|
|
// submit a github issue
|
|
|
|
ghProj := s.cfg.GithubProjectMappings[p.AppName]
|
|
|
|
if ghProj == "" {
|
|
|
|
log.Println("Not creating GH issue for unknown app", p.AppName)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
splits := strings.SplitN(ghProj, "/", 2)
|
|
|
|
if len(splits) < 2 {
|
|
|
|
log.Println("Can't create GH issue for invalid repo", ghProj)
|
|
|
|
}
|
|
|
|
owner, repo := splits[0], splits[1]
|
2017-04-18 16:25:22 +05:30
|
|
|
|
2020-09-16 14:55:56 +05:30
|
|
|
issueReq := buildGithubIssueRequest(p, listingURL)
|
2019-03-07 19:12:08 +05:30
|
|
|
|
2020-09-16 14:55:56 +05:30
|
|
|
issue, _, err := s.ghClient.Issues.Create(ctx, owner, repo, &issueReq)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2019-03-07 19:12:08 +05:30
|
|
|
}
|
2020-09-16 14:55:56 +05:30
|
|
|
|
|
|
|
log.Println("Created issue:", *issue.HTMLURL)
|
|
|
|
|
|
|
|
resp.ReportURL = *issue.HTMLURL
|
|
|
|
|
2019-03-07 19:12:08 +05:30
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2022-04-13 17:51:46 +05:30
|
|
|
func (s *submitServer) submitGitlabIssue(p payload, listingURL string, resp *submitResponse) error {
|
2021-08-10 22:34:58 +05:30
|
|
|
if s.glClient == nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
glProj := s.cfg.GitlabProjectMappings[p.AppName]
|
|
|
|
glLabels := s.cfg.GitlabProjectLabels[p.AppName]
|
|
|
|
|
|
|
|
issueReq := buildGitlabIssueRequest(p, listingURL, glLabels, s.cfg.GitlabIssueConfidential)
|
|
|
|
|
|
|
|
issue, _, err := s.glClient.Issues.CreateIssue(glProj, issueReq)
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
log.Println("Created issue:", issue.WebURL)
|
|
|
|
|
|
|
|
resp.ReportURL = issue.WebURL
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2022-04-13 17:51:46 +05:30
|
|
|
func (s *submitServer) submitSlackNotification(p payload, listingURL string) error {
|
2019-03-07 19:12:08 +05:30
|
|
|
if s.slack == nil {
|
2020-09-16 14:55:56 +05:30
|
|
|
return nil
|
|
|
|
}
|
2019-03-07 19:12:08 +05:30
|
|
|
|
2020-09-16 14:55:56 +05:30
|
|
|
slackBuf := fmt.Sprintf(
|
|
|
|
"%s\nApplication: %s\nReport: %s",
|
|
|
|
p.UserText, p.AppName, listingURL,
|
|
|
|
)
|
|
|
|
|
|
|
|
err := s.slack.Notify(slackBuf)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2019-03-07 19:12:08 +05:30
|
|
|
}
|
2020-09-16 14:55:56 +05:30
|
|
|
|
2019-03-07 19:12:08 +05:30
|
|
|
return nil
|
2017-04-13 19:48:20 +05:30
|
|
|
}
|
|
|
|
|
2022-04-13 17:51:46 +05:30
|
|
|
func buildReportTitle(p payload) string {
|
2017-09-08 16:41:56 +05:30
|
|
|
// set the title to the first (non-empty) line of the user's report, if any
|
|
|
|
trimmedUserText := strings.TrimSpace(p.UserText)
|
|
|
|
if trimmedUserText == "" {
|
2020-09-16 14:55:56 +05:30
|
|
|
return "Untitled report"
|
2017-04-12 19:36:40 +05:30
|
|
|
}
|
|
|
|
|
2020-09-16 14:55:56 +05:30
|
|
|
if i := strings.IndexAny(trimmedUserText, "\r\n"); i >= 0 {
|
|
|
|
return trimmedUserText[0:i]
|
|
|
|
}
|
|
|
|
|
|
|
|
return trimmedUserText
|
|
|
|
}
|
|
|
|
|
2022-04-13 17:51:46 +05:30
|
|
|
func buildReportBody(p payload, newline, quoteChar string) *bytes.Buffer {
|
2017-05-02 22:09:16 +05:30
|
|
|
var bodyBuf bytes.Buffer
|
2018-09-29 01:15:29 +05:30
|
|
|
fmt.Fprintf(&bodyBuf, "User message:\n\n%s\n\n", p.UserText)
|
2019-04-10 14:06:24 +05:30
|
|
|
var dataKeys []string
|
|
|
|
for k := range p.Data {
|
|
|
|
dataKeys = append(dataKeys, k)
|
|
|
|
}
|
|
|
|
sort.Strings(dataKeys)
|
|
|
|
for _, k := range dataKeys {
|
|
|
|
v := p.Data[k]
|
2021-08-10 22:34:58 +05:30
|
|
|
fmt.Fprintf(&bodyBuf, "%s: %s%s%s%s", k, quoteChar, v, quoteChar, newline)
|
2017-05-02 22:09:16 +05:30
|
|
|
}
|
2020-09-16 14:55:56 +05:30
|
|
|
|
|
|
|
return &bodyBuf
|
|
|
|
}
|
|
|
|
|
2022-04-13 17:51:46 +05:30
|
|
|
func buildGenericIssueRequest(p payload, listingURL string) (title, body string) {
|
2021-08-10 22:34:58 +05:30
|
|
|
bodyBuf := buildReportBody(p, " \n", "`")
|
2020-09-16 14:55:56 +05:30
|
|
|
|
|
|
|
// Add log links to the body
|
2021-08-10 22:34:58 +05:30
|
|
|
fmt.Fprintf(bodyBuf, "\n[Logs](%s)", listingURL)
|
2017-05-02 22:23:12 +05:30
|
|
|
|
|
|
|
for _, file := range p.Files {
|
|
|
|
fmt.Fprintf(
|
2020-09-16 14:55:56 +05:30
|
|
|
bodyBuf,
|
2017-05-02 22:23:12 +05:30
|
|
|
" / [%s](%s)",
|
|
|
|
file,
|
|
|
|
listingURL+"/"+file,
|
|
|
|
)
|
|
|
|
}
|
2017-04-12 19:36:40 +05:30
|
|
|
|
2021-08-10 22:34:58 +05:30
|
|
|
title = buildReportTitle(p)
|
|
|
|
|
|
|
|
body = bodyBuf.String()
|
|
|
|
|
|
|
|
return
|
|
|
|
}
|
2020-09-16 14:55:56 +05:30
|
|
|
|
2022-04-13 17:51:46 +05:30
|
|
|
func buildGithubIssueRequest(p payload, listingURL string) github.IssueRequest {
|
2021-08-10 22:34:58 +05:30
|
|
|
title, body := buildGenericIssueRequest(p, listingURL)
|
2017-05-04 20:51:09 +05:30
|
|
|
|
|
|
|
labels := p.Labels
|
|
|
|
// go-github doesn't like nils
|
|
|
|
if labels == nil {
|
|
|
|
labels = []string{}
|
|
|
|
}
|
2017-04-13 19:48:20 +05:30
|
|
|
return github.IssueRequest{
|
2017-05-03 15:03:27 +05:30
|
|
|
Title: &title,
|
|
|
|
Body: &body,
|
2017-05-04 20:51:09 +05:30
|
|
|
Labels: &labels,
|
2017-04-07 20:13:19 +05:30
|
|
|
}
|
2017-04-05 23:01:34 +05:30
|
|
|
}
|
|
|
|
|
2022-04-13 17:51:46 +05:30
|
|
|
func buildGitlabIssueRequest(p payload, listingURL string, labels []string, confidential bool) *gitlab.CreateIssueOptions {
|
2021-08-10 22:34:58 +05:30
|
|
|
title, body := buildGenericIssueRequest(p, listingURL)
|
|
|
|
|
|
|
|
if p.Labels != nil {
|
|
|
|
labels = append(labels, p.Labels...)
|
|
|
|
}
|
|
|
|
|
|
|
|
return &gitlab.CreateIssueOptions{
|
|
|
|
Title: &title,
|
|
|
|
Description: &body,
|
|
|
|
Confidential: &confidential,
|
|
|
|
Labels: labels,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-13 17:51:46 +05:30
|
|
|
func (s *submitServer) sendEmail(p payload, reportDir string) error {
|
2020-09-16 14:55:56 +05:30
|
|
|
if len(s.cfg.EmailAddresses) == 0 {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
e := email.NewEmail()
|
|
|
|
|
|
|
|
e.From = "Rageshake <rageshake@matrix.org>"
|
|
|
|
if s.cfg.EmailFrom != "" {
|
|
|
|
e.From = s.cfg.EmailFrom
|
|
|
|
}
|
|
|
|
|
|
|
|
e.To = s.cfg.EmailAddresses
|
|
|
|
|
|
|
|
e.Subject = fmt.Sprintf("[%s] %s", p.AppName, buildReportTitle(p))
|
|
|
|
|
2021-08-10 22:34:58 +05:30
|
|
|
e.Text = buildReportBody(p, "\n", "\"").Bytes()
|
2020-09-16 14:55:56 +05:30
|
|
|
|
|
|
|
allFiles := append(p.Files, p.Logs...)
|
|
|
|
for _, file := range allFiles {
|
|
|
|
fullPath := filepath.Join(reportDir, file)
|
|
|
|
e.AttachFile(fullPath)
|
|
|
|
}
|
|
|
|
|
|
|
|
var auth smtp.Auth = nil
|
|
|
|
if s.cfg.SMTPPassword != "" || s.cfg.SMTPUsername != "" {
|
|
|
|
auth = smtp.PlainAuth("", s.cfg.SMTPUsername, s.cfg.SMTPPassword, s.cfg.SMTPServer)
|
|
|
|
}
|
|
|
|
err := e.Send(s.cfg.SMTPServer, auth)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-04-05 23:01:34 +05:30
|
|
|
func respond(code int, w http.ResponseWriter) {
|
|
|
|
w.WriteHeader(code)
|
|
|
|
w.Write([]byte("{}"))
|
|
|
|
}
|
|
|
|
|
|
|
|
func gzipAndSave(data []byte, dirname, fpath string) error {
|
2017-05-02 22:23:12 +05:30
|
|
|
fpath = filepath.Join(dirname, fpath)
|
2017-04-05 23:01:34 +05:30
|
|
|
|
|
|
|
if _, err := os.Stat(fpath); err == nil {
|
|
|
|
return fmt.Errorf("file already exists") // the user can just retry
|
|
|
|
}
|
|
|
|
var b bytes.Buffer
|
|
|
|
gz := gzip.NewWriter(&b)
|
|
|
|
if _, err := gz.Write(data); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if err := gz.Flush(); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if err := gz.Close(); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if err := ioutil.WriteFile(fpath, b.Bytes(), 0644); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|