sort lexicographically data fields (#25)
* sort lexicographically data fields Fixes #23 * remove reduntant error check This doesn't look nice with the rest of the code. I think error handling on writes can be ignored here or addressed in a separate PR
This commit is contained in:
parent
10c36dc480
commit
6b3f2b4e5f
2 changed files with 100 additions and 26 deletions
|
@ -22,7 +22,6 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/google/go-github/github"
|
|
||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"log"
|
"log"
|
||||||
|
@ -32,9 +31,12 @@ import (
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
"sort"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/go-github/github"
|
||||||
)
|
)
|
||||||
|
|
||||||
var maxPayloadSize = 1024 * 1024 * 55 // 55 MB
|
var maxPayloadSize = 1024 * 1024 * 55 // 55 MB
|
||||||
|
@ -79,6 +81,37 @@ type parsedPayload struct {
|
||||||
FileErrors []string
|
FileErrors []string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p parsedPayload) WriteTo(out io.Writer) {
|
||||||
|
fmt.Fprintf(
|
||||||
|
out,
|
||||||
|
"%s\n\nNumber of logs: %d\nApplication: %s\n",
|
||||||
|
p.UserText, len(p.Logs), p.AppName,
|
||||||
|
)
|
||||||
|
fmt.Fprintf(out, "Labels: %s\n", strings.Join(p.Labels, ", "))
|
||||||
|
|
||||||
|
var dataKeys []string
|
||||||
|
for k := range p.Data {
|
||||||
|
dataKeys = append(dataKeys, k)
|
||||||
|
}
|
||||||
|
sort.Strings(dataKeys)
|
||||||
|
for _, k := range dataKeys {
|
||||||
|
v := p.Data[k]
|
||||||
|
fmt.Fprintf(out, "%s: %s\n", k, v)
|
||||||
|
}
|
||||||
|
if len(p.LogErrors) > 0 {
|
||||||
|
fmt.Fprint(out, "Log upload failures:\n")
|
||||||
|
for _, e := range p.LogErrors {
|
||||||
|
fmt.Fprintf(out, " %s\n", e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(p.FileErrors) > 0 {
|
||||||
|
fmt.Fprint(out, "Attachment upload failures:\n")
|
||||||
|
for _, e := range p.FileErrors {
|
||||||
|
fmt.Fprintf(out, " %s\n", e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
type submitResponse struct {
|
type submitResponse struct {
|
||||||
ReportURL string `json:"report_url,omitempty"`
|
ReportURL string `json:"report_url,omitempty"`
|
||||||
}
|
}
|
||||||
|
@ -420,31 +453,9 @@ func saveLogPart(logNum int, filename string, reader io.Reader, reportDir string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *submitServer) saveReport(ctx context.Context, p parsedPayload, reportDir, listingURL string) (*submitResponse, error) {
|
func (s *submitServer) saveReport(ctx context.Context, p parsedPayload, reportDir, listingURL string) (*submitResponse, error) {
|
||||||
resp := submitResponse{}
|
|
||||||
|
|
||||||
var summaryBuf bytes.Buffer
|
var summaryBuf bytes.Buffer
|
||||||
fmt.Fprintf(
|
resp := submitResponse{}
|
||||||
&summaryBuf,
|
p.WriteTo(&summaryBuf)
|
||||||
"%s\n\nNumber of logs: %d\nApplication: %s\n",
|
|
||||||
p.UserText, len(p.Logs), p.AppName,
|
|
||||||
)
|
|
||||||
fmt.Fprintf(&summaryBuf, "Labels: %s\n", strings.Join(p.Labels, ", "))
|
|
||||||
for k, v := range p.Data {
|
|
||||||
fmt.Fprintf(&summaryBuf, "%s: %s\n", k, v)
|
|
||||||
}
|
|
||||||
if len(p.LogErrors) > 0 {
|
|
||||||
fmt.Fprint(&summaryBuf, "Log upload failures:\n")
|
|
||||||
for _, e := range p.LogErrors {
|
|
||||||
fmt.Fprintf(&summaryBuf, " %s\n", e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if len(p.FileErrors) > 0 {
|
|
||||||
fmt.Fprint(&summaryBuf, "Attachment upload failures:\n")
|
|
||||||
for _, e := range p.FileErrors {
|
|
||||||
fmt.Fprintf(&summaryBuf, " %s\n", e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := gzipAndSave(summaryBuf.Bytes(), reportDir, "details.log.gz"); err != nil {
|
if err := gzipAndSave(summaryBuf.Bytes(), reportDir, "details.log.gz"); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -497,7 +508,13 @@ func buildGithubIssueRequest(p parsedPayload, listingURL string) github.IssueReq
|
||||||
|
|
||||||
var bodyBuf bytes.Buffer
|
var bodyBuf bytes.Buffer
|
||||||
fmt.Fprintf(&bodyBuf, "User message:\n\n%s\n\n", p.UserText)
|
fmt.Fprintf(&bodyBuf, "User message:\n\n%s\n\n", p.UserText)
|
||||||
for k, v := range p.Data {
|
var dataKeys []string
|
||||||
|
for k := range p.Data {
|
||||||
|
dataKeys = append(dataKeys, k)
|
||||||
|
}
|
||||||
|
sort.Strings(dataKeys)
|
||||||
|
for _, k := range dataKeys {
|
||||||
|
v := p.Data[k]
|
||||||
fmt.Fprintf(&bodyBuf, "%s: `%s`\n", k, v)
|
fmt.Fprintf(&bodyBuf, "%s: `%s`\n", k, v)
|
||||||
}
|
}
|
||||||
fmt.Fprintf(&bodyBuf, "[Logs](%s)", listingURL)
|
fmt.Fprintf(&bodyBuf, "[Logs](%s)", listingURL)
|
||||||
|
|
|
@ -17,6 +17,7 @@ limitations under the License.
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"compress/gzip"
|
"compress/gzip"
|
||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
|
@ -426,3 +427,59 @@ Content-Disposition: form-data; name="text"
|
||||||
t.Errorf("Body: got %s, want %s", *issueReq.Body, expectedBody)
|
t.Errorf("Body: got %s, want %s", *issueReq.Body, expectedBody)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTestSortDataKeys(t *testing.T) {
|
||||||
|
expect := `
|
||||||
|
Number of logs: 0
|
||||||
|
Application:
|
||||||
|
Labels:
|
||||||
|
User-Agent: xxx
|
||||||
|
Version: 1
|
||||||
|
device_id: id
|
||||||
|
user_id: id
|
||||||
|
`
|
||||||
|
expect = strings.TrimSpace(expect)
|
||||||
|
sample := []struct {
|
||||||
|
data map[string]string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
map[string]string{
|
||||||
|
"Version": "1",
|
||||||
|
"User-Agent": "xxx",
|
||||||
|
"user_id": "id",
|
||||||
|
"device_id": "id",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
map[string]string{
|
||||||
|
"user_id": "id",
|
||||||
|
"device_id": "id",
|
||||||
|
"Version": "1",
|
||||||
|
"User-Agent": "xxx",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
var buf bytes.Buffer
|
||||||
|
for _, v := range sample {
|
||||||
|
p := parsedPayload{Data: v.data}
|
||||||
|
buf.Reset()
|
||||||
|
p.WriteTo(&buf)
|
||||||
|
got := strings.TrimSpace(buf.String())
|
||||||
|
if got != expect {
|
||||||
|
t.Errorf("expected %s got %s", expect, got)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for k, v := range sample {
|
||||||
|
p := parsedPayload{Data: v.data}
|
||||||
|
res := buildGithubIssueRequest(p, "")
|
||||||
|
got := *res.Body
|
||||||
|
if k == 0 {
|
||||||
|
expect = got
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if got != expect {
|
||||||
|
t.Errorf("expected %s got %s", expect, got)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
Reference in a new issue