Add support for attaching files to reports (#14)
* Create report dir before reading POST body ... so that we can stream uploaded files straight to disk, instead of reading them into memory first. * Add support for attaching files to reports If a 'file' parameter is provided, save it as a file on disk. Also include a link to it from the github issue.
This commit is contained in:
parent
9bbdf64e5f
commit
530fcd69db
4 changed files with 254 additions and 42 deletions
|
@ -64,6 +64,13 @@ logs.)
|
|||
|
||||
Compressed logs are not supported for the JSON upload encoding.
|
||||
|
||||
* `file`: an arbitrary file to attach to the report. Saved as-is to disk, and
|
||||
a link is added to the github issue. The filename must be in the format
|
||||
`name.ext`, where `name` contains only alphanumerics, `-` or `_`, and `ext`
|
||||
is one of `jpg`, `png`, or `txt`.
|
||||
|
||||
Not supported for the JSON upload encoding.
|
||||
|
||||
* Any other form field names are interpreted as arbitrary name/value strings to
|
||||
include in the `details.log.gz` file.
|
||||
|
||||
|
|
|
@ -76,13 +76,53 @@ func serveFile(w http.ResponseWriter, r *http.Request, path string) {
|
|||
return
|
||||
}
|
||||
|
||||
// if it's a directory, or doesn't look like a gzip, serve as normal
|
||||
if d.IsDir() || !strings.HasSuffix(path, ".gz") {
|
||||
// for anti-XSS belt-and-braces, set a very restrictive CSP
|
||||
w.Header().Set("Content-Security-Policy", "default-src: none")
|
||||
|
||||
// if it's a directory, serve a listing
|
||||
if d.IsDir() {
|
||||
log.Println("Serving", path)
|
||||
http.ServeFile(w, r, path)
|
||||
return
|
||||
}
|
||||
|
||||
// if it's a gzipped log file, serve it as text
|
||||
if strings.HasSuffix(path, ".gz") {
|
||||
serveGzippedFile(w, r, path, d.Size())
|
||||
return
|
||||
}
|
||||
|
||||
// otherwise, limit ourselves to a number of known-safe content-types, to
|
||||
// guard against XSS vulnerabilities.
|
||||
// http.serveFile preserves the content-type header if one is already set.
|
||||
w.Header().Set("Content-Type", extensionToMimeType(path))
|
||||
|
||||
http.ServeFile(w, r, path)
|
||||
}
|
||||
|
||||
// extensionToMimeType returns a suitable mime type for the given filename
|
||||
//
|
||||
// Unlike mime.TypeByExtension, the results are limited to a set of types which
|
||||
// should be safe to serve to a browser without introducing XSS vulnerabilities.
|
||||
func extensionToMimeType(path string) string {
|
||||
if strings.HasSuffix(path, ".txt") {
|
||||
// anyone uploading text in anything other than utf-8 needs to be
|
||||
// re-educated.
|
||||
return "text/plain; charset=utf-8"
|
||||
}
|
||||
|
||||
if strings.HasSuffix(path, ".png") {
|
||||
return "image/png"
|
||||
}
|
||||
|
||||
if strings.HasSuffix(path, ".jpg") {
|
||||
return "image/jpeg"
|
||||
}
|
||||
|
||||
return "application/octet-stream"
|
||||
}
|
||||
|
||||
func serveGzippedFile(w http.ResponseWriter, r *http.Request, path string, size int64) {
|
||||
w.Header().Set("Content-Type", "text/plain; charset=utf-8")
|
||||
|
||||
acceptsGzip := false
|
||||
|
@ -97,7 +137,7 @@ func serveFile(w http.ResponseWriter, r *http.Request, path string) {
|
|||
}
|
||||
|
||||
if acceptsGzip {
|
||||
serveGzip(w, r, path, d.Size())
|
||||
serveGzip(w, r, path, size)
|
||||
} else {
|
||||
serveUngzipped(w, r, path)
|
||||
}
|
||||
|
|
|
@ -31,6 +31,7 @@ import (
|
|||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
@ -57,6 +58,7 @@ type payload struct {
|
|||
UserAgent string `json:"user_agent"`
|
||||
Logs []logEntry `json:"logs"`
|
||||
Data map[string]string `json:"data"`
|
||||
Files []string
|
||||
}
|
||||
|
||||
type logEntry struct {
|
||||
|
@ -83,13 +85,32 @@ func (s *submitServer) ServeHTTP(w http.ResponseWriter, req *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
p := parseRequest(w, req)
|
||||
if p == nil {
|
||||
// parseRequest already wrote an error
|
||||
// create the report dir before parsing the request, so that we can dump
|
||||
// files straight in
|
||||
t := time.Now().UTC()
|
||||
prefix := t.Format("2006-01-02/150405")
|
||||
reportDir := filepath.Join("bugs", prefix)
|
||||
if err := os.MkdirAll(reportDir, os.ModePerm); err != nil {
|
||||
log.Println("Unable to create report directory", err)
|
||||
http.Error(w, "Internal error", 500)
|
||||
return
|
||||
}
|
||||
|
||||
resp, err := s.saveReport(req.Context(), *p)
|
||||
listingURL := s.apiPrefix + "/listing/" + prefix
|
||||
log.Println("Handling report submission; listing URI will be", listingURL)
|
||||
|
||||
p := parseRequest(w, req, reportDir)
|
||||
if p == nil {
|
||||
// parseRequest already wrote an error, but now let's delete the
|
||||
// useless report dir
|
||||
if err := os.RemoveAll(reportDir); err != nil {
|
||||
log.Printf("Unable to remove report dir %s after invalid upload: %v\n",
|
||||
reportDir, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
resp, err := s.saveReport(req.Context(), *p, reportDir, listingURL)
|
||||
if err != nil {
|
||||
log.Println("Error handling report", err)
|
||||
http.Error(w, "Internal error", 500)
|
||||
|
@ -103,7 +124,7 @@ func (s *submitServer) ServeHTTP(w http.ResponseWriter, req *http.Request) {
|
|||
|
||||
// parseRequest attempts to parse a received request as a bug report. If
|
||||
// the request cannot be parsed, it responds with an error and returns nil.
|
||||
func parseRequest(w http.ResponseWriter, req *http.Request) *payload {
|
||||
func parseRequest(w http.ResponseWriter, req *http.Request, reportDir string) *payload {
|
||||
length, err := strconv.Atoi(req.Header.Get("Content-Length"))
|
||||
if err != nil {
|
||||
log.Println("Couldn't parse content-length", err)
|
||||
|
@ -120,7 +141,7 @@ func parseRequest(w http.ResponseWriter, req *http.Request) *payload {
|
|||
if contentType != "" {
|
||||
d, _, _ := mime.ParseMediaType(contentType)
|
||||
if d == "multipart/form-data" {
|
||||
p, err1 := parseMultipartRequest(w, req)
|
||||
p, err1 := parseMultipartRequest(w, req, reportDir)
|
||||
if err1 != nil {
|
||||
log.Println("Error parsing multipart data", err1)
|
||||
http.Error(w, "Bad multipart data", 400)
|
||||
|
@ -177,7 +198,7 @@ func parseJSONRequest(w http.ResponseWriter, req *http.Request) (*payload, error
|
|||
return &p, nil
|
||||
}
|
||||
|
||||
func parseMultipartRequest(w http.ResponseWriter, req *http.Request) (*payload, error) {
|
||||
func parseMultipartRequest(w http.ResponseWriter, req *http.Request, reportDir string) (*payload, error) {
|
||||
rdr, err := req.MultipartReader()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -196,14 +217,14 @@ func parseMultipartRequest(w http.ResponseWriter, req *http.Request) (*payload,
|
|||
return nil, err
|
||||
}
|
||||
|
||||
if err = parseFormPart(part, &p); err != nil {
|
||||
if err = parseFormPart(part, &p, reportDir); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return &p, nil
|
||||
}
|
||||
|
||||
func parseFormPart(part *multipart.Part, p *payload) error {
|
||||
func parseFormPart(part *multipart.Part, p *payload, reportDir string) error {
|
||||
defer part.Close()
|
||||
field := part.FormName()
|
||||
|
||||
|
@ -220,6 +241,16 @@ func parseFormPart(part *multipart.Part, p *payload) error {
|
|||
// read the field data directly from the multipart part
|
||||
partReader = part
|
||||
}
|
||||
|
||||
if field == "file" {
|
||||
leafName, err := saveFormPart(part.FileName(), partReader, reportDir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
p.Files = append(p.Files, leafName)
|
||||
return nil
|
||||
}
|
||||
|
||||
b, err := ioutil.ReadAll(partReader)
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -235,6 +266,8 @@ func parseFormPart(part *multipart.Part, p *payload) error {
|
|||
} else if field == "user_agent" {
|
||||
p.UserAgent = data
|
||||
} else if field == "log" || field == "compressed-log" {
|
||||
// todo: we could save the log directly rather than pointlessly
|
||||
// unzipping and re-zipping.
|
||||
p.Logs = append(p.Logs, logEntry{
|
||||
ID: part.FileName(),
|
||||
Lines: data,
|
||||
|
@ -245,20 +278,48 @@ func parseFormPart(part *multipart.Part, p *payload) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (s *submitServer) saveReport(ctx context.Context, p payload) (*submitResponse, error) {
|
||||
// we use a quite restrictive regexp for the filenames; in particular:
|
||||
//
|
||||
// * a limited set of extensions. We are careful to limit the content-types
|
||||
// we will serve the files with, but somebody might accidentally point an
|
||||
// Apache or nginx at the upload directory, which would serve js files as
|
||||
// application/javascript and open XSS vulnerabilities.
|
||||
//
|
||||
// * no silly characters (/, ctrl chars, etc)
|
||||
//
|
||||
// * nothing starting with '.'
|
||||
var filenameRegexp = regexp.MustCompile(`^[a-zA-Z0-9_-]+\.(jpg|png|txt)$`)
|
||||
|
||||
// saveFormPart saves a file upload to the report directory.
|
||||
//
|
||||
// Returns the leafname of the saved file.
|
||||
func saveFormPart(leafName string, reader io.Reader, reportDir string) (string, error) {
|
||||
|
||||
if !filenameRegexp.MatchString(leafName) {
|
||||
return "", fmt.Errorf("Invalid upload filename")
|
||||
}
|
||||
|
||||
fullName := filepath.Join(reportDir, leafName)
|
||||
|
||||
log.Println("Saving uploaded file", leafName, "to", fullName)
|
||||
|
||||
f, err := os.Create(fullName)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
_, err = io.Copy(f, reader)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return leafName, nil
|
||||
}
|
||||
|
||||
func (s *submitServer) saveReport(ctx context.Context, p payload, reportDir, listingURL string) (*submitResponse, error) {
|
||||
resp := submitResponse{}
|
||||
|
||||
// Dump bug report to disk as form:
|
||||
// "bugreport-20170115-112233.log.gz" => user text, version, user agent, # logs
|
||||
// "bugreport-20170115-112233-0.log.gz" => most recent log
|
||||
// "bugreport-20170115-112233-1.log.gz" => ...
|
||||
// "bugreport-20170115-112233-N.log.gz" => oldest log
|
||||
t := time.Now().UTC()
|
||||
prefix := t.Format("2006-01-02/150405")
|
||||
listingURL := s.apiPrefix + "/listing/" + prefix
|
||||
|
||||
log.Println("Handling report submission; listing URI will be", listingURL)
|
||||
|
||||
var summaryBuf bytes.Buffer
|
||||
fmt.Fprintf(
|
||||
&summaryBuf,
|
||||
|
@ -268,12 +329,12 @@ func (s *submitServer) saveReport(ctx context.Context, p payload) (*submitRespon
|
|||
for k, v := range p.Data {
|
||||
fmt.Fprintf(&summaryBuf, "%s: %s\n", k, v)
|
||||
}
|
||||
if err := gzipAndSave(summaryBuf.Bytes(), prefix, "details.log.gz"); err != nil {
|
||||
if err := gzipAndSave(summaryBuf.Bytes(), reportDir, "details.log.gz"); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for i, log := range p.Logs {
|
||||
if err := gzipAndSave([]byte(log.Lines), prefix, fmt.Sprintf("logs-%04d.log.gz", i)); err != nil {
|
||||
if err := gzipAndSave([]byte(log.Lines), reportDir, fmt.Sprintf("logs-%04d.log.gz", i)); err != nil {
|
||||
return nil, err // TODO: Rollback?
|
||||
}
|
||||
}
|
||||
|
@ -331,9 +392,18 @@ func buildGithubIssueRequest(p payload, listingURL string) github.IssueRequest {
|
|||
if p.Version != "" {
|
||||
fmt.Fprintf(&bodyBuf, "Version: `%s`\n", p.Version)
|
||||
}
|
||||
fmt.Fprintf(&bodyBuf, "[Logs](%s)\n", listingURL)
|
||||
body := bodyBuf.String()
|
||||
fmt.Fprintf(&bodyBuf, "[Logs](%s)", listingURL)
|
||||
|
||||
for _, file := range p.Files {
|
||||
fmt.Fprintf(
|
||||
&bodyBuf,
|
||||
" / [%s](%s)",
|
||||
file,
|
||||
listingURL+"/"+file,
|
||||
)
|
||||
}
|
||||
|
||||
body := bodyBuf.String()
|
||||
return github.IssueRequest{
|
||||
Title: &title,
|
||||
Body: &body,
|
||||
|
@ -346,8 +416,7 @@ func respond(code int, w http.ResponseWriter) {
|
|||
}
|
||||
|
||||
func gzipAndSave(data []byte, dirname, fpath string) error {
|
||||
_ = os.MkdirAll(filepath.Join("bugs", dirname), os.ModePerm)
|
||||
fpath = filepath.Join("bugs", dirname, fpath)
|
||||
fpath = filepath.Join(dirname, fpath)
|
||||
|
||||
if _, err := os.Stat(fpath); err == nil {
|
||||
return fmt.Errorf("file already exists") // the user can just retry
|
||||
|
|
|
@ -17,8 +17,11 @@ limitations under the License.
|
|||
package main
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"testing"
|
||||
|
@ -26,7 +29,10 @@ import (
|
|||
|
||||
// testParsePayload builds a /submit request with the given body, and calls
|
||||
// parseRequest with it.
|
||||
func testParsePayload(t *testing.T, body, contentType string) (p *payload) {
|
||||
//
|
||||
// if tempDir is empty, a new temp dir is created, and deleted when the test
|
||||
// completes.
|
||||
func testParsePayload(t *testing.T, body, contentType string, tempDir string) (*payload, *http.Response) {
|
||||
req, err := http.NewRequest("POST", "/api/submit", strings.NewReader(body))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
|
@ -35,20 +41,26 @@ func testParsePayload(t *testing.T, body, contentType string) (p *payload) {
|
|||
if contentType != "" {
|
||||
req.Header.Set("Content-Type", contentType)
|
||||
}
|
||||
rr := httptest.NewRecorder()
|
||||
p = parseRequest(rr, req)
|
||||
|
||||
if p == nil {
|
||||
t.Error("parseRequest returned nil")
|
||||
// temporary dir for the uploaded files
|
||||
if tempDir == "" {
|
||||
tempDir = mkTempDir(t)
|
||||
defer os.RemoveAll(tempDir)
|
||||
}
|
||||
return
|
||||
|
||||
rr := httptest.NewRecorder()
|
||||
p := parseRequest(rr, req, tempDir)
|
||||
return p, rr.Result()
|
||||
}
|
||||
|
||||
func TestEmptyJson(t *testing.T) {
|
||||
body := "{}"
|
||||
|
||||
// we just test it is parsed without errors for now
|
||||
testParsePayload(t, body, "application/json")
|
||||
p, _ := testParsePayload(t, body, "application/json", "")
|
||||
if p == nil {
|
||||
t.Fatal("parseRequest returned nil")
|
||||
}
|
||||
}
|
||||
|
||||
// check that we can unpick the json submitted by the android clients
|
||||
|
@ -57,7 +69,10 @@ func TestUnpickAndroidMangling(t *testing.T) {
|
|||
"version": "User : @ylc8001:matrix.org\nPhone : Lenovo P2a42\nVector version: 0:6:9\n",
|
||||
"user_agent": "Android"
|
||||
}`
|
||||
p := testParsePayload(t, body, "")
|
||||
p, _ := testParsePayload(t, body, "", "")
|
||||
if p == nil {
|
||||
t.Fatal("parseRequest returned nil")
|
||||
}
|
||||
if p.Text != "test ylc 001" {
|
||||
t.Errorf("user text: got %s, want %s", p.Text, "test ylc 001")
|
||||
}
|
||||
|
@ -79,7 +94,35 @@ func TestUnpickAndroidMangling(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestMultipartUpload(t *testing.T) {
|
||||
body := `------WebKitFormBoundarySsdgl8Nq9voFyhdO
|
||||
reportDir := mkTempDir(t)
|
||||
defer os.RemoveAll(reportDir)
|
||||
|
||||
p, _ := testParsePayload(t, multipartBody(),
|
||||
"multipart/form-data; boundary=----WebKitFormBoundarySsdgl8Nq9voFyhdO",
|
||||
reportDir,
|
||||
)
|
||||
|
||||
if p == nil {
|
||||
t.Fatal("parseRequest returned nil")
|
||||
}
|
||||
|
||||
checkParsedMultipartUpload(t, p)
|
||||
|
||||
// check file uploaded correctly
|
||||
dat, err := ioutil.ReadFile(filepath.Join(reportDir, "passwd.txt"))
|
||||
if err != nil {
|
||||
t.Error("unable to read uploaded file", err)
|
||||
} else {
|
||||
datstr := string(dat)
|
||||
wanted := "bibblybobbly"
|
||||
if datstr != wanted {
|
||||
t.Errorf("File contents: got %s, want %s", datstr, wanted)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func multipartBody() (body string) {
|
||||
body = `------WebKitFormBoundarySsdgl8Nq9voFyhdO
|
||||
Content-Disposition: form-data; name="text"
|
||||
|
||||
test words.
|
||||
|
@ -121,10 +164,21 @@ Content-Type: application/octet-stream
|
|||
body += string([]byte{
|
||||
0x1f, 0x8b, 0x08, 0x00, 0xbf, 0xd8, 0xf5, 0x58, 0x00, 0x03,
|
||||
0x2b, 0x49, 0x2d, 0x2e, 0xe1, 0x02, 0x00,
|
||||
0xc6, 0x35, 0xb9, 0x3b, 0x05, 0x00, 0x00, 0x00})
|
||||
body += "\n------WebKitFormBoundarySsdgl8Nq9voFyhdO--\n"
|
||||
0xc6, 0x35, 0xb9, 0x3b, 0x05, 0x00, 0x00, 0x00,
|
||||
0x0a,
|
||||
})
|
||||
|
||||
p := testParsePayload(t, body, "multipart/form-data; boundary=----WebKitFormBoundarySsdgl8Nq9voFyhdO")
|
||||
body += `------WebKitFormBoundarySsdgl8Nq9voFyhdO
|
||||
Content-Disposition: form-data; name="file"; filename="passwd.txt"
|
||||
Content-Type: application/octet-stream
|
||||
|
||||
bibblybobbly
|
||||
`
|
||||
body += "------WebKitFormBoundarySsdgl8Nq9voFyhdO--\n"
|
||||
return
|
||||
}
|
||||
|
||||
func checkParsedMultipartUpload(t *testing.T, p *payload) {
|
||||
wanted := "test words."
|
||||
if p.Text != wanted {
|
||||
t.Errorf("User text: got %s, want %s", p.Text, wanted)
|
||||
|
@ -160,3 +214,45 @@ Content-Type: application/octet-stream
|
|||
t.Errorf("Log 2: got %s, want %s", p.Logs[2].Lines, wanted)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEmptyFilename(t *testing.T) {
|
||||
body := `------WebKitFormBoundarySsdgl8Nq9voFyhdO
|
||||
Content-Disposition: form-data; name="file"
|
||||
|
||||
file
|
||||
------WebKitFormBoundarySsdgl8Nq9voFyhdO--
|
||||
`
|
||||
p, resp := testParsePayload(t, body, "multipart/form-data; boundary=----WebKitFormBoundarySsdgl8Nq9voFyhdO", "")
|
||||
if p != nil {
|
||||
t.Error("parsePayload accepted upload with no filename")
|
||||
}
|
||||
|
||||
if resp.StatusCode != 400 {
|
||||
t.Errorf("response code: got %v, want %v", resp.StatusCode, 400)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBadFilename(t *testing.T) {
|
||||
body := `------WebKitFormBoundarySsdgl8Nq9voFyhdO
|
||||
Content-Disposition: form-data; name="file"; filename="etc/passwd"
|
||||
|
||||
file
|
||||
------WebKitFormBoundarySsdgl8Nq9voFyhdO--
|
||||
`
|
||||
p, resp := testParsePayload(t, body, "multipart/form-data; boundary=----WebKitFormBoundarySsdgl8Nq9voFyhdO", "")
|
||||
if p != nil {
|
||||
t.Error("parsePayload accepted upload with bad filename")
|
||||
}
|
||||
|
||||
if resp.StatusCode != 400 {
|
||||
t.Errorf("response code: got %v, want %v", resp.StatusCode, 400)
|
||||
}
|
||||
}
|
||||
|
||||
func mkTempDir(t *testing.T) string {
|
||||
td, err := ioutil.TempDir("", "rageshake_test")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
return td
|
||||
}
|
||||
|
|
Reference in a new issue