diff --git a/dashboard/app/app.yaml b/dashboard/app/app.yaml
index 389aa9017f8..8424cd0cdd9 100644
--- a/dashboard/app/app.yaml
+++ b/dashboard/app/app.yaml
@@ -13,8 +13,9 @@ handlers:
static_dir: static
- url: /(|gccgo/)log/.+
script: _go_app
-- url: /(|gccgo/)(|commit|packages|result|tag|todo)
+- url: /(|gccgo/)(|commit|packages|result|perf-result|tag|todo|perf|perfdetail|perfgraph|updatebenchmark)
script: _go_app
-- url: /(|gccgo/)(init|buildtest|key|_ah/queue/go/delay)
+- url: /(|gccgo/)(init|buildtest|key|perflearn|_ah/queue/go/delay)
script: _go_app
- login: admin
\ No newline at end of file
+ login: admin
+
diff --git a/dashboard/app/build/perf_changes.go b/dashboard/app/build/perf_changes.go
new file mode 100644
index 00000000000..701d07a343f
--- /dev/null
+++ b/dashboard/app/build/perf_changes.go
@@ -0,0 +1,251 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build appengine
+
+package build
+
+import (
+ "bytes"
+ "fmt"
+ "html/template"
+ "net/http"
+ "sort"
+ "strconv"
+
+ "appengine"
+ "appengine/datastore"
+)
+
+func init() {
+ http.HandleFunc("/perf", perfChangesHandler)
+}
+
+// perfSummaryHandler draws the main benchmarking page.
+func perfChangesHandler(w http.ResponseWriter, r *http.Request) {
+ d := dashboardForRequest(r)
+ c := d.Context(appengine.NewContext(r))
+
+ page, _ := strconv.Atoi(r.FormValue("page"))
+ if page < 0 {
+ page = 0
+ }
+
+ pc, err := GetPerfConfig(c, r)
+ if err != nil {
+ logErr(w, r, err)
+ return
+ }
+
+ commits, err := dashPerfCommits(c, page)
+ if err != nil {
+ logErr(w, r, err)
+ return
+ }
+
+ // Fetch PerfResult's for the commits.
+ var uiCommits []*perfChangesCommit
+ rc := MakePerfResultCache(c, commits[0], false)
+
+ // But first compare tip with the last release.
+ if page == 0 {
+ res0 := &PerfResult{CommitHash: knownTags[lastRelease]}
+ if err := datastore.Get(c, res0.Key(c), res0); err != nil && err != datastore.ErrNoSuchEntity {
+ logErr(w, r, fmt.Errorf("getting PerfResult: %v", err))
+ return
+ }
+ if err != datastore.ErrNoSuchEntity {
+ uiCom, err := handleOneCommit(pc, commits[0], rc, res0)
+ if err != nil {
+ logErr(w, r, err)
+ return
+ }
+ uiCom.IsSummary = true
+ uiCom.ParentHash = lastRelease
+ uiCommits = append(uiCommits, uiCom)
+ }
+ }
+
+ for _, com := range commits {
+ uiCom, err := handleOneCommit(pc, com, rc, nil)
+ if err != nil {
+ logErr(w, r, err)
+ return
+ }
+ uiCommits = append(uiCommits, uiCom)
+ }
+
+ p := &Pagination{}
+ if len(commits) == commitsPerPage {
+ p.Next = page + 1
+ }
+ if page > 0 {
+ p.Prev = page - 1
+ p.HasPrev = true
+ }
+
+ data := &perfChangesData{d, p, uiCommits}
+
+ var buf bytes.Buffer
+ if err := perfChangesTemplate.Execute(&buf, data); err != nil {
+ logErr(w, r, err)
+ return
+ }
+
+ buf.WriteTo(w)
+}
+
+func handleOneCommit(pc *PerfConfig, com *Commit, rc *PerfResultCache, baseRes *PerfResult) (*perfChangesCommit, error) {
+ uiCom := new(perfChangesCommit)
+ uiCom.Commit = com
+ res1 := rc.Get(com.Num)
+ for builder, benchmarks1 := range res1.ParseData() {
+ for benchmark, data1 := range benchmarks1 {
+ if benchmark != "meta-done" || !data1.OK {
+ uiCom.NumResults++
+ }
+ if !data1.OK {
+ v := new(perfChangesChange)
+ v.diff = 10000
+ v.Style = "fail"
+ v.Builder = builder
+ v.Link = fmt.Sprintf("log/%v", data1.Artifacts["log"])
+ v.Val = builder
+ v.Hint = builder
+ if benchmark != "meta-done" {
+ v.Hint += "/" + benchmark
+ }
+ m := findMetric(uiCom, "failure")
+ m.BadChanges = append(m.BadChanges, v)
+ }
+ }
+ res0 := baseRes
+ if res0 == nil {
+ var err error
+ res0, err = rc.NextForComparison(com.Num, builder)
+ if err != nil {
+ return nil, err
+ }
+ if res0 == nil {
+ continue
+ }
+ }
+ changes := significantPerfChanges(pc, builder, res0, res1)
+ for _, ch := range changes {
+ v := new(perfChangesChange)
+ v.Builder = builder
+ v.Benchmark, v.Procs = splitBench(ch.bench)
+ v.diff = ch.diff
+ v.Val = fmt.Sprintf("%+.2f%%", ch.diff)
+ v.Hint = fmt.Sprintf("%v/%v", builder, ch.bench)
+ v.Link = fmt.Sprintf("perfdetail?commit=%v&commit0=%v&builder=%v&benchmark=%v", com.Hash, res0.CommitHash, builder, v.Benchmark)
+ m := findMetric(uiCom, ch.metric)
+ if v.diff > 0 {
+ v.Style = "bad"
+ m.BadChanges = append(m.BadChanges, v)
+ } else {
+ v.Style = "good"
+ m.GoodChanges = append(m.GoodChanges, v)
+ }
+ }
+ }
+
+ // Sort metrics and changes.
+ for _, m := range uiCom.Metrics {
+ sort.Sort(m.GoodChanges)
+ sort.Sort(m.BadChanges)
+ }
+ sort.Sort(uiCom.Metrics)
+ // Need at least one metric for UI.
+ if len(uiCom.Metrics) == 0 {
+ uiCom.Metrics = append(uiCom.Metrics, &perfChangesMetric{})
+ }
+ uiCom.Metrics[0].First = true
+ return uiCom, nil
+}
+
+func findMetric(c *perfChangesCommit, metric string) *perfChangesMetric {
+ for _, m := range c.Metrics {
+ if m.Name == metric {
+ return m
+ }
+ }
+ m := new(perfChangesMetric)
+ m.Name = metric
+ c.Metrics = append(c.Metrics, m)
+ return m
+}
+
+type uiPerfConfig struct {
+ Builders []uiPerfConfigElem
+ Benchmarks []uiPerfConfigElem
+ Metrics []uiPerfConfigElem
+ Procs []uiPerfConfigElem
+}
+
+type uiPerfConfigElem struct {
+ Name string
+ Selected bool
+}
+
+var perfChangesTemplate = template.Must(
+ template.New("perf_changes.html").Funcs(tmplFuncs).ParseFiles("build/perf_changes.html"),
+)
+
+type perfChangesData struct {
+ Dashboard *Dashboard
+ Pagination *Pagination
+ Commits []*perfChangesCommit
+}
+
+type perfChangesCommit struct {
+ *Commit
+ IsSummary bool
+ NumResults int
+ Metrics perfChangesMetricSlice
+}
+
+type perfChangesMetric struct {
+ Name string
+ First bool
+ BadChanges perfChangesChangeSlice
+ GoodChanges perfChangesChangeSlice
+}
+
+type perfChangesChange struct {
+ Builder string
+ Benchmark string
+ Link string
+ Hint string
+ Style string
+ Val string
+ Procs int
+ diff float64
+}
+
+type perfChangesMetricSlice []*perfChangesMetric
+
+func (l perfChangesMetricSlice) Len() int { return len(l) }
+func (l perfChangesMetricSlice) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
+func (l perfChangesMetricSlice) Less(i, j int) bool {
+ if l[i].Name == "failure" || l[j].Name == "failure" {
+ return l[i].Name == "failure"
+ }
+ return l[i].Name < l[j].Name
+}
+
+type perfChangesChangeSlice []*perfChangesChange
+
+func (l perfChangesChangeSlice) Len() int { return len(l) }
+func (l perfChangesChangeSlice) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
+func (l perfChangesChangeSlice) Less(i, j int) bool {
+ vi, vj := l[i].diff, l[j].diff
+ if vi > 0 && vj > 0 {
+ return vi > vj
+ } else if vi < 0 && vj < 0 {
+ return vi < vj
+ } else {
+ panic("comparing positive and negative diff")
+ }
+}
diff --git a/dashboard/app/build/perf_changes.html b/dashboard/app/build/perf_changes.html
new file mode 100644
index 00000000000..941bf804a90
--- /dev/null
+++ b/dashboard/app/build/perf_changes.html
@@ -0,0 +1,88 @@
+
+
+
+ {{$.Dashboard.Name}} Dashboard
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {{range $c := $.Commits}}
+ {{range $m := $c.Metrics}}
+ {{if $m.First}}
+
+ {{if $c.IsSummary}}
+ tip vs {{$c.ParentHash}} |
+ {{else}}
+ {{shortHash $c.Hash}} |
+ {{end}}
+ {{$c.NumResults}} |
+ {{else}}
+
+ |
+ |
+ {{end}}
+ {{$m.Name}} |
+
+ {{range $ch := $m.BadChanges}}
+ {{$ch.Val}}
+ {{end}}
+ |
+
+ {{range $ch := $m.GoodChanges}}
+ {{$ch.Val}}
+ {{end}}
+ |
+ {{if $m.First}}
+ {{shortUser $c.User}} |
+ {{$c.Time.Format "Mon 02 Jan 15:04"}} |
+ {{shortDesc $c.Desc}} |
+ {{else}}
+ |
+ |
+ |
+ {{end}}
+
+ {{end}}
+ {{if $c.IsSummary}}
+ --- |
+ {{end}}
+ {{end}}
+
+
+
+ {{with $.Pagination}}
+
+ {{end}}
+
+
+
+
+
+
diff --git a/dashboard/app/build/perf_detail.go b/dashboard/app/build/perf_detail.go
new file mode 100644
index 00000000000..f8d9bfda9ed
--- /dev/null
+++ b/dashboard/app/build/perf_detail.go
@@ -0,0 +1,221 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build appengine
+
+package build
+
+import (
+ "bytes"
+ "fmt"
+ "html/template"
+ "net/http"
+ "sort"
+ "strconv"
+ "strings"
+
+ "appengine"
+ "appengine/datastore"
+)
+
+func init() {
+ for _, d := range dashboards {
+ http.HandleFunc(d.RelPath+"perfdetail", perfDetailUIHandler)
+ }
+}
+
+func perfDetailUIHandler(w http.ResponseWriter, r *http.Request) {
+ d := dashboardForRequest(r)
+ c := d.Context(appengine.NewContext(r))
+ pc, err := GetPerfConfig(c, r)
+ if err != nil {
+ logErr(w, r, err)
+ return
+ }
+
+ kind := r.FormValue("kind")
+ builder := r.FormValue("builder")
+ benchmark := r.FormValue("benchmark")
+ if kind == "" {
+ kind = "benchmark"
+ }
+ if kind != "benchmark" && kind != "builder" {
+ logErr(w, r, fmt.Errorf("unknown kind %s", kind))
+ return
+ }
+
+ // Fetch the new commit.
+ com1 := new(Commit)
+ com1.Hash = r.FormValue("commit")
+ if hash, ok := knownTags[com1.Hash]; ok {
+ com1.Hash = hash
+ }
+ if err := datastore.Get(c, com1.Key(c), com1); err != nil {
+ logErr(w, r, fmt.Errorf("failed to fetch commit %s: %v", com1.Hash, err))
+ return
+ }
+ // Fetch the associated perf result.
+ ress1 := &PerfResult{CommitHash: com1.Hash}
+ if err := datastore.Get(c, ress1.Key(c), ress1); err != nil {
+ logErr(w, r, fmt.Errorf("failed to fetch perf result %s: %v", com1.Hash, err))
+ return
+ }
+
+ // Fetch the old commit.
+ var ress0 *PerfResult
+ com0 := new(Commit)
+ com0.Hash = r.FormValue("commit0")
+ if hash, ok := knownTags[com0.Hash]; ok {
+ com0.Hash = hash
+ }
+ if com0.Hash != "" {
+ // Have an exact commit hash, fetch directly.
+ if err := datastore.Get(c, com0.Key(c), com0); err != nil {
+ logErr(w, r, fmt.Errorf("failed to fetch commit %s: %v", com0.Hash, err))
+ return
+ }
+ ress0 = &PerfResult{CommitHash: com0.Hash}
+ if err := datastore.Get(c, ress0.Key(c), ress0); err != nil {
+ logErr(w, r, fmt.Errorf("failed to fetch perf result for %s: %v", com0.Hash, err))
+ return
+ }
+ } else {
+ // Don't have the commit hash, find the previous commit to compare.
+ rc := MakePerfResultCache(c, com1, false)
+ ress0, err = rc.NextForComparison(com1.Num, "")
+ if err != nil {
+ logErr(w, r, err)
+ return
+ }
+ if ress0 == nil {
+ logErr(w, r, fmt.Errorf("no previous commit with results"))
+ return
+ }
+ // Now that we know the right result, fetch the commit.
+ com0.Hash = ress0.CommitHash
+ if err := datastore.Get(c, com0.Key(c), com0); err != nil {
+ logErr(w, r, fmt.Errorf("failed to fetch commit %s: %v", com0.Hash, err))
+ return
+ }
+ }
+
+ res0 := ress0.ParseData()
+ res1 := ress1.ParseData()
+ var benchmarks []*uiPerfDetailBenchmark
+ var list []string
+ if kind == "builder" {
+ list = pc.BenchmarksForBuilder(builder)
+ } else {
+ list = pc.BuildersForBenchmark(benchmark)
+ }
+ for _, other := range list {
+ if kind == "builder" {
+ benchmark = other
+ } else {
+ builder = other
+ }
+ var procs []*uiPerfDetailProcs
+ allProcs := pc.ProcList(builder)
+ for _, p := range allProcs {
+ BenchProcs := fmt.Sprintf("%v-%v", benchmark, p)
+ if res0[builder] == nil || res0[builder][BenchProcs] == nil {
+ continue
+ }
+ pp := &uiPerfDetailProcs{Procs: p}
+ for metric, val := range res0[builder][BenchProcs].Metrics {
+ var pm uiPerfDetailMetric
+ pm.Name = metric
+ pm.Val0 = fmt.Sprintf("%v", val)
+ val1 := uint64(0)
+ if res1[builder] != nil && res1[builder][BenchProcs] != nil {
+ val1 = res1[builder][BenchProcs].Metrics[metric]
+ }
+ pm.Val1 = fmt.Sprintf("%v", val1)
+ v0 := val
+ v1 := val1
+ valf := perfDiff(v0, v1)
+ pm.Delta = fmt.Sprintf("%+.2f%%", valf)
+ pm.Style = perfChangeStyle(pc, valf, builder, BenchProcs, pm.Name)
+ pp.Metrics = append(pp.Metrics, pm)
+ }
+ sort.Sort(pp.Metrics)
+ for artifact, hash := range res0[builder][BenchProcs].Artifacts {
+ var pm uiPerfDetailMetric
+ pm.Val0 = fmt.Sprintf("%v", artifact)
+ pm.Link0 = fmt.Sprintf("log/%v", hash)
+ pm.Val1 = fmt.Sprintf("%v", artifact)
+ if res1[builder] != nil && res1[builder][BenchProcs] != nil && res1[builder][BenchProcs].Artifacts[artifact] != "" {
+ pm.Link1 = fmt.Sprintf("log/%v", res1[builder][BenchProcs].Artifacts[artifact])
+ }
+ pp.Metrics = append(pp.Metrics, pm)
+ }
+ procs = append(procs, pp)
+ }
+ benchmarks = append(benchmarks, &uiPerfDetailBenchmark{other, procs})
+ }
+
+ cfg := new(uiPerfConfig)
+ for _, v := range pc.BuildersForBenchmark("") {
+ cfg.Builders = append(cfg.Builders, uiPerfConfigElem{v, v == builder})
+ }
+ for _, v := range pc.BenchmarksForBuilder("") {
+ cfg.Benchmarks = append(cfg.Benchmarks, uiPerfConfigElem{v, v == benchmark})
+ }
+
+ data := &uiPerfDetailTemplateData{d, cfg, kind == "builder", com0, com1, benchmarks}
+
+ var buf bytes.Buffer
+ if err := uiPerfDetailTemplate.Execute(&buf, data); err != nil {
+ logErr(w, r, err)
+ return
+ }
+
+ buf.WriteTo(w)
+}
+
+func perfResultSplit(s string) (builder string, benchmark string, procs int) {
+ s1 := strings.Split(s, "|")
+ s2 := strings.Split(s1[1], "-")
+ procs, _ = strconv.Atoi(s2[1])
+ return s1[0], s2[0], procs
+}
+
+type uiPerfDetailTemplateData struct {
+ Dashboard *Dashboard
+ Config *uiPerfConfig
+ KindBuilder bool
+ Commit0 *Commit
+ Commit1 *Commit
+ Benchmarks []*uiPerfDetailBenchmark
+}
+
+type uiPerfDetailBenchmark struct {
+ Name string
+ Procs []*uiPerfDetailProcs
+}
+
+type uiPerfDetailProcs struct {
+ Procs int
+ Metrics uiPerfDetailMetrics
+}
+
+type uiPerfDetailMetric struct {
+ Name string
+ Val0 string
+ Val1 string
+ Link0 string
+ Link1 string
+ Delta string
+ Style string
+}
+
+type uiPerfDetailMetrics []uiPerfDetailMetric
+
+func (l uiPerfDetailMetrics) Len() int { return len(l) }
+func (l uiPerfDetailMetrics) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
+func (l uiPerfDetailMetrics) Less(i, j int) bool { return l[i].Name < l[j].Name }
+
+var uiPerfDetailTemplate = template.Must(
+ template.New("perf_detail.html").Funcs(tmplFuncs).ParseFiles("build/perf_detail.html"),
+)
diff --git a/dashboard/app/build/perf_detail.html b/dashboard/app/build/perf_detail.html
new file mode 100644
index 00000000000..70cca3b0052
--- /dev/null
+++ b/dashboard/app/build/perf_detail.html
@@ -0,0 +1,100 @@
+
+
+
+ {{$.Dashboard.Name}} Dashboard
+
+
+
+
+
+
+
+
+
+
+
+ {{range $b := $.Benchmarks}}
+
+
{{$b.Name}}
+ {{range $p := $b.Procs}}
+
+
GOMAXPROCS={{$p.Procs}}
+
+
+
+ Metric |
+ old |
+ new |
+ delta |
+
+
+
+ {{range $m := $p.Metrics}}
+
+ {{$m.Name}} |
+ {{if $m.Link0}}
+ {{$m.Val0}} |
+ {{else}}
+ {{$m.Val0}} |
+ {{end}}
+ {{if $m.Link1}}
+ {{$m.Val1}} |
+ {{else}}
+ {{$m.Val1}} |
+ {{end}}
+ {{$m.Delta}} |
+
+ {{end}}
+
+
+
+ {{end}}
+
+ {{end}}
+
+
+
+
+
+
+
diff --git a/dashboard/app/build/perf_graph.go b/dashboard/app/build/perf_graph.go
new file mode 100644
index 00000000000..206ae2173d3
--- /dev/null
+++ b/dashboard/app/build/perf_graph.go
@@ -0,0 +1,291 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build appengine
+
+package build
+
+import (
+ "bytes"
+ "fmt"
+ "html/template"
+ "net/http"
+ "strconv"
+
+ "appengine"
+ "appengine/datastore"
+)
+
+func init() {
+ for _, d := range dashboards {
+ http.HandleFunc(d.RelPath+"perfgraph", perfGraphHandler)
+ }
+}
+
+func perfGraphHandler(w http.ResponseWriter, r *http.Request) {
+ d := dashboardForRequest(r)
+ c := d.Context(appengine.NewContext(r))
+ pc, err := GetPerfConfig(c, r)
+ if err != nil {
+ logErr(w, r, err)
+ return
+ }
+ allBuilders := pc.BuildersForBenchmark("")
+ allBenchmarks := pc.BenchmarksForBuilder("")
+ allMetrics := pc.MetricsForBenchmark("")
+ allProcs := pc.ProcList("")
+ r.ParseForm()
+ absolute := r.FormValue("absolute") != ""
+ selBuilders := r.Form["builder"]
+ selBenchmarks := r.Form["benchmark"]
+ selMetrics := r.Form["metric"]
+ selProcs := r.Form["procs"]
+ if len(selBuilders) == 0 {
+ selBuilders = append(selBuilders, allBuilders[0])
+ }
+ if len(selBenchmarks) == 0 {
+ selBenchmarks = append(selBenchmarks, "json")
+ }
+ if len(selMetrics) == 0 {
+ selMetrics = append(selMetrics, "time")
+ }
+ if len(selProcs) == 0 {
+ selProcs = append(selProcs, "1")
+ }
+ // TODO(dvyukov): validate input
+
+ present := func(set []string, s string) bool {
+ for _, s1 := range set {
+ if s1 == s {
+ return true
+ }
+ }
+ return false
+ }
+
+ cfg := &uiPerfConfig{}
+ for _, v := range allBuilders {
+ cfg.Builders = append(cfg.Builders, uiPerfConfigElem{v, present(selBuilders, v)})
+ }
+ for _, v := range allBenchmarks {
+ cfg.Benchmarks = append(cfg.Benchmarks, uiPerfConfigElem{v, present(selBenchmarks, v)})
+ }
+ for _, v := range allMetrics {
+ cfg.Metrics = append(cfg.Metrics, uiPerfConfigElem{v, present(selMetrics, v)})
+ }
+ for _, v := range allProcs {
+ cfg.Procs = append(cfg.Procs, uiPerfConfigElem{strconv.Itoa(v), present(selProcs, strconv.Itoa(v))})
+ }
+
+ // Select last commit.
+ startCommit := 0
+ commitsToDisplay := 100
+ if r.FormValue("startcommit") != "" {
+ startCommit, _ = strconv.Atoi(r.FormValue("startcommit"))
+ commitsToDisplay, _ = strconv.Atoi(r.FormValue("commitnum"))
+ } else {
+ var commits1 []*Commit
+ _, err = datastore.NewQuery("Commit").
+ Ancestor((&Package{}).Key(c)).
+ Order("-Num").
+ Filter("NeedsBenchmarking =", true).
+ Limit(1).
+ GetAll(c, &commits1)
+ if err != nil || len(commits1) != 1 {
+ logErr(w, r, err)
+ return
+ }
+ startCommit = commits1[0].Num
+ }
+
+ if r.FormValue("zoomin") != "" {
+ commitsToDisplay /= 2
+ } else if r.FormValue("zoomout") != "" {
+ commitsToDisplay *= 2
+ } else if r.FormValue("older") != "" {
+ startCommit -= commitsToDisplay / 2
+ } else if r.FormValue("newer") != "" {
+ startCommit += commitsToDisplay / 2
+ }
+
+ // TODO(dvyukov): limit number of lines on the graph?
+ startCommitNum := startCommit - commitsToDisplay + 1
+ if startCommitNum < 0 {
+ startCommitNum = 0
+ }
+ var vals [][]float64
+ var hints [][]string
+ var certainty [][]bool
+ var headers []string
+ commits2, err := GetCommits(c, startCommitNum, commitsToDisplay)
+ if err != nil {
+ logErr(w, r, err)
+ return
+ }
+ for _, builder := range selBuilders {
+ for _, metric := range selMetrics {
+ for _, benchmark := range selBenchmarks {
+ for _, procs := range selProcs {
+ benchProcs := fmt.Sprintf("%v-%v", benchmark, procs)
+ vv, err := GetPerfMetricsForCommits(c, builder, benchProcs, metric, startCommitNum, commitsToDisplay)
+ if err != nil {
+ logErr(w, r, err)
+ return
+ }
+ nonzero := false
+ min := ^uint64(0)
+ max := uint64(0)
+ for _, v := range vv {
+ if v == 0 {
+ continue
+ }
+ if max < v {
+ max = v
+ }
+ if min > v {
+ min = v
+ }
+ nonzero = true
+ }
+ if nonzero {
+ noise := pc.NoiseLevel(builder, benchProcs, metric)
+ diff := (float64(max) - float64(min)) / float64(max) * 100
+ // Scale graph passes through 2 points: (noise, minScale) and (growthFactor*noise, 100).
+ // Plus it's bottom capped at minScale and top capped at 100.
+ // Intention:
+ // Diffs below noise are scaled to minScale.
+ // Diffs above growthFactor*noise are scaled to 100.
+ // Between noise and growthFactor*noise scale growths linearly.
+ const minScale = 5
+ const growthFactor = 4
+ scale := diff*(100-minScale)/(noise*(growthFactor-1)) + (minScale*growthFactor-100)/(growthFactor-1)
+ if scale < minScale {
+ scale = minScale
+ }
+ if scale > 100 {
+ scale = 100
+ }
+ descBuilder := "/" + builder
+ descBenchmark := "/" + benchProcs
+ descMetric := "/" + metric
+ if len(selBuilders) == 1 {
+ descBuilder = ""
+ }
+ if len(selBenchmarks) == 1 && len(selProcs) == 1 {
+ descBenchmark = ""
+ }
+ if len(selMetrics) == 1 && (len(selBuilders) > 1 || len(selBenchmarks) > 1 || len(selProcs) > 1) {
+ descMetric = ""
+ }
+ desc := fmt.Sprintf("%v%v%v", descBuilder, descBenchmark, descMetric)[1:]
+ hh := make([]string, commitsToDisplay)
+ valf := make([]float64, commitsToDisplay)
+ cert := make([]bool, commitsToDisplay)
+ lastval := uint64(0)
+ lastval0 := uint64(0)
+ for i, v := range vv {
+ cert[i] = true
+ if v == 0 {
+ if lastval == 0 {
+ continue
+ }
+ nextval := uint64(0)
+ nextidx := 0
+ for i2, v2 := range vv[i+1:] {
+ if v2 != 0 {
+ nextval = v2
+ nextidx = i + i2 + 1
+ break
+ }
+ }
+ if nextval == 0 {
+ continue
+ }
+ cert[i] = false
+ v = lastval + uint64(int64(nextval-lastval)/int64(nextidx-i+1))
+ _, _ = nextval, nextidx
+ }
+ f := float64(v)
+ if !absolute {
+ f = (float64(v) - float64(min)) * 100 / (float64(max) - float64(min))
+ f = f*scale/100 + (100-scale)/2
+ f += 0.000001
+ }
+ valf[i] = f
+ com := commits2[i]
+ comLink := "https://code.google.com/p/go/source/detail?r=" + com.Hash
+ if cert[i] {
+ d := ""
+ if lastval0 != 0 {
+ d = fmt.Sprintf(" (%.02f%%)", perfDiff(lastval0, v))
+ }
+ cmpLink := fmt.Sprintf("/perfdetail?commit=%v&builder=%v&benchmark=%v", com.Hash, builder, benchmark)
+ hh[i] = fmt.Sprintf("%v: %v%v
%v
%v", desc, cmpLink, v, d, comLink, com.Desc, com.Time.Format("Jan 2, 2006 1:04"))
+ } else {
+ hh[i] = fmt.Sprintf("%v: NO DATA
%v
%v", desc, comLink, com.Desc, com.Time.Format("Jan 2, 2006 1:04"))
+ }
+ lastval = v
+ if cert[i] {
+ lastval0 = v
+ }
+ }
+ vals = append(vals, valf)
+ hints = append(hints, hh)
+ certainty = append(certainty, cert)
+ headers = append(headers, fmt.Sprintf("%s (%.2f%% [%.2f%%])", desc, diff, noise))
+ }
+ }
+ }
+ }
+ }
+
+ var commits []perfGraphCommit
+ if len(vals) != 0 && len(vals[0]) != 0 {
+ for i := range vals[0] {
+ if !commits2[i].NeedsBenchmarking {
+ continue
+ }
+ var c perfGraphCommit
+ for j := range vals {
+ c.Vals = append(c.Vals, perfGraphValue{float64(vals[j][i]), certainty[j][i], hints[j][i]})
+ }
+ commits = append(commits, c)
+ }
+ }
+
+ data := &perfGraphData{d, cfg, startCommit, commitsToDisplay, absolute, headers, commits}
+
+ var buf bytes.Buffer
+ if err := perfGraphTemplate.Execute(&buf, data); err != nil {
+ logErr(w, r, err)
+ return
+ }
+
+ buf.WriteTo(w)
+}
+
+var perfGraphTemplate = template.Must(
+ template.New("perf_graph.html").ParseFiles("build/perf_graph.html"),
+)
+
+type perfGraphData struct {
+ Dashboard *Dashboard
+ Config *uiPerfConfig
+ StartCommit int
+ CommitNum int
+ Absolute bool
+ Headers []string
+ Commits []perfGraphCommit
+}
+
+type perfGraphCommit struct {
+ Name string
+ Vals []perfGraphValue
+}
+
+type perfGraphValue struct {
+ Val float64
+ Certainty bool
+ Hint string
+}
diff --git a/dashboard/app/build/perf_graph.html b/dashboard/app/build/perf_graph.html
new file mode 100644
index 00000000000..c6397b68a13
--- /dev/null
+++ b/dashboard/app/build/perf_graph.html
@@ -0,0 +1,116 @@
+
+
+
+ {{$.Dashboard.Name}} Dashboard
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dashboard/app/build/perf_learn.go b/dashboard/app/build/perf_learn.go
new file mode 100644
index 00000000000..bd308ea8599
--- /dev/null
+++ b/dashboard/app/build/perf_learn.go
@@ -0,0 +1,186 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build appengine
+
+package build
+
+import (
+ "bytes"
+ "fmt"
+ "html/template"
+ "net/http"
+ "sort"
+
+ "appengine"
+ "appengine/datastore"
+)
+
+func init() {
+ http.HandleFunc("/perflearn", perfLearnHandler)
+}
+
+const (
+ learnPercentile = 0.95
+ learnSignalMultiplier = 1.2
+ learnMinSignal = 0.5
+)
+
+func perfLearnHandler(w http.ResponseWriter, r *http.Request) {
+ d := dashboardForRequest(r)
+ c := d.Context(appengine.NewContext(r))
+
+ pc, err := GetPerfConfig(c, r)
+ if err != nil {
+ logErr(w, r, err)
+ return
+ }
+
+ p, err := GetPackage(c, "")
+ if err != nil {
+ logErr(w, r, err)
+ return
+ }
+
+ update := r.FormValue("update") != ""
+ noise := make(map[string]string)
+
+ data := &perfLearnData{}
+
+ commits, err := GetCommits(c, 0, p.NextNum)
+ if err != nil {
+ logErr(w, r, err)
+ return
+ }
+
+ for _, builder := range pc.BuildersForBenchmark("") {
+ for _, benchmark := range pc.BenchmarksForBuilder(builder) {
+ for _, metric := range pc.MetricsForBenchmark(benchmark) {
+ for _, procs := range pc.ProcList(builder) {
+ values, err := GetPerfMetricsForCommits(c, builder, fmt.Sprintf("%v-%v", benchmark, procs), metric, 0, p.NextNum)
+ if err != nil {
+ logErr(w, r, err)
+ return
+ }
+ var dd []float64
+ last := uint64(0)
+ for i, v := range values {
+ if v == 0 {
+ if commits[i].NeedsBenchmarking {
+ last = 0
+ }
+ continue
+ }
+ if last != 0 {
+ v1 := v
+ if v1 < last {
+ v1, last = last, v1
+ }
+ diff := float64(v1)/float64(last)*100 - 100
+ dd = append(dd, diff)
+ }
+ last = v
+ }
+ if len(dd) == 0 {
+ continue
+ }
+ sort.Float64s(dd)
+
+ baseIdx := int(float64(len(dd)) * learnPercentile)
+ baseVal := dd[baseIdx]
+ signalVal := baseVal * learnSignalMultiplier
+ if signalVal < learnMinSignal {
+ signalVal = learnMinSignal
+ }
+ signalIdx := -1
+ noiseNum := 0
+ signalNum := 0
+
+ var diffs []*perfLearnDiff
+ for i, d := range dd {
+ if d > 3*signalVal {
+ d = 3 * signalVal
+ }
+ diffs = append(diffs, &perfLearnDiff{Num: i, Val: d})
+ if signalIdx == -1 && d >= signalVal {
+ signalIdx = i
+ }
+ if d < signalVal {
+ noiseNum++
+ } else {
+ signalNum++
+ }
+ }
+ diffs[baseIdx].Hint = "95%"
+ if signalIdx != -1 {
+ diffs[signalIdx].Hint = "signal"
+ }
+ diffs = diffs[len(diffs)*4/5:]
+ name := fmt.Sprintf("%v/%v-%v/%v", builder, benchmark, procs, metric)
+ data.Entries = append(data.Entries, &perfLearnEntry{len(data.Entries), name, baseVal, noiseNum, signalVal, signalNum, diffs})
+
+ if len(dd) >= 100 || r.FormValue("force") != "" {
+ nname := fmt.Sprintf("%v|%v-%v", builder, benchmark, procs)
+ n := noise[nname] + fmt.Sprintf("|%v=%.2f", metric, signalVal)
+ noise[nname] = n
+ }
+ }
+ }
+ }
+ }
+
+ if update {
+ var noiseLevels []string
+ for k, v := range noise {
+ noiseLevels = append(noiseLevels, k+v)
+ }
+ tx := func(c appengine.Context) error {
+ pc, err := GetPerfConfig(c, r)
+ if err != nil {
+ return err
+ }
+ pc.NoiseLevels = noiseLevels
+ if _, err := datastore.Put(c, PerfConfigKey(c), pc); err != nil {
+ return fmt.Errorf("putting PerfConfig: %v", err)
+ }
+ return nil
+ }
+ if err := datastore.RunInTransaction(c, tx, nil); err != nil {
+ logErr(w, r, err)
+ return
+ }
+ }
+
+ var buf bytes.Buffer
+ if err := perfLearnTemplate.Execute(&buf, data); err != nil {
+ logErr(w, r, err)
+ return
+ }
+
+ buf.WriteTo(w)
+}
+
+var perfLearnTemplate = template.Must(
+ template.New("perf_learn.html").Funcs(tmplFuncs).ParseFiles("build/perf_learn.html"),
+)
+
+type perfLearnData struct {
+ Entries []*perfLearnEntry
+}
+
+type perfLearnEntry struct {
+ Num int
+ Name string
+ BaseVal float64
+ NoiseNum int
+ SignalVal float64
+ SignalNum int
+ Diffs []*perfLearnDiff
+}
+
+type perfLearnDiff struct {
+ Num int
+ Val float64
+ Hint string
+}
diff --git a/dashboard/app/build/perf_learn.html b/dashboard/app/build/perf_learn.html
new file mode 100644
index 00000000000..294e957b6db
--- /dev/null
+++ b/dashboard/app/build/perf_learn.html
@@ -0,0 +1,45 @@
+
+
+
+
+
+
+
+
+ {{range $.Entries}}
+
+ {{.Name}}: base={{printf "%.2f[%d]" .BaseVal .NoiseNum}} signal={{printf "%.2f[%d]" .SignalVal .SignalNum}}
+
+
+ {{end}}
+
+
diff --git a/dashboard/app/build/perf_notify.txt b/dashboard/app/build/perf_notify.txt
new file mode 100644
index 00000000000..1e5e2bb7e8a
--- /dev/null
+++ b/dashboard/app/build/perf_notify.txt
@@ -0,0 +1,11 @@
+Change {{shortHash .Commit.Hash}} caused perf changes on {{.Builder}}:
+
+{{.Commit.Desc}}
+
+http://code.google.com/p/go/source/detail?r={{shortHash .Commit.Hash}}
+{{range $b := .Benchmarks}}
+{{printf "%-16s %12s %12s %10s" $b.Name "old" "new" "delta"}}
+{{range $m := $b.Metrics}}{{printf "%-16s %12v %12v %+10.2f" $m.Name $m.Old $m.New $m.Delta}}
+{{end}}{{end}}
+{{.Url}}
+
diff --git a/dashboard/app/build/ui.go b/dashboard/app/build/ui.go
index 0536dc0ba29..e02ae4a01e0 100644
--- a/dashboard/app/build/ui.go
+++ b/dashboard/app/build/ui.go
@@ -71,7 +71,7 @@ func uiHandler(w http.ResponseWriter, r *http.Request) {
builders := commitBuilders(commits)
var tipState *TagState
- if pkg.Kind == "" && page == 0 {
+ if pkg.Kind == "" && page == 0 && commits != nil {
// only show sub-repo state on first page of normal repo view
tipState, err = TagStateByName(c, "tip")
if err != nil {
@@ -354,7 +354,7 @@ func shortDesc(desc string) string {
if i := strings.Index(desc, "\n"); i != -1 {
desc = desc[:i]
}
- return desc
+ return limitStringLength(desc, 100)
}
// shortHash returns a short version of a hash.
diff --git a/dashboard/app/build/ui.html b/dashboard/app/build/ui.html
index 041847323f4..a3cd1a0231b 100644
--- a/dashboard/app/build/ui.html
+++ b/dashboard/app/build/ui.html
@@ -2,72 +2,7 @@
{{$.Dashboard.Name}} Build Dashboard
-
+
+
- {{$.Dashboard.Name}} Build Status
+
+
{{with $.Package.Name}}{{.}}
{{end}}
- {{if $.Commits}}
+
+
+ {{if $.Commits}}
@@ -256,5 +203,6 @@
{{end}}
{{end}}
+
diff --git a/dashboard/app/static/style.css b/dashboard/app/static/style.css
new file mode 100644
index 00000000000..858adea1f66
--- /dev/null
+++ b/dashboard/app/static/style.css
@@ -0,0 +1,309 @@
+* { box-sizing: border-box; }
+
+ .dashboards {
+ padding: 0.5em;
+ }
+ .dashboards a {
+ padding: 0.5em;
+ background: #eee;
+ color: blue;
+ }
+
+body {
+ margin: 0;
+ font-family: sans-serif;
+ padding: 0; margin: 0;
+ color: #222;
+}
+
+.container {
+ max-width: 900px;
+ margin: 0 auto;
+}
+
+p, pre, ul, ol { margin: 20px; }
+
+h1, h2, h3, h4 {
+ margin: 20px 0;
+ padding: 0;
+ color: #375EAB;
+ font-weight: bold;
+}
+
+h1 { font-size: 24px; }
+h2 { font-size: 20px; }
+h3 { font-size: 20px; }
+h4 { font-size: 16px; }
+
+h2 { background: #E0EBF5; padding: 2px 5px; }
+h3, h4 { margin: 20px 5px; }
+
+dl, dd { font-size: 14px; }
+dl { margin: 20px; }
+dd { margin: 2px 20px; }
+
+.clear {
+ clear: both;
+}
+
+.button {
+ padding: 10px;
+
+ color: #222;
+ border: 1px solid #375EAB;
+ background: #E0EBF5;
+
+ border-radius: 5px;
+
+ cursor: pointer;
+
+ margin-left: 60px;
+}
+
+/* navigation bar */
+
+#topbar {
+ padding: 10px 10px;
+ background: #E0EBF5;
+}
+
+#topbar a {
+ color: #222;
+}
+#topbar h1 {
+ float: left;
+ margin: 0;
+ padding-top: 5px;
+}
+
+#topbar nav {
+ float: left;
+ margin-left: 20px;
+}
+#topbar nav a {
+ display: inline-block;
+ padding: 10px;
+
+ margin: 0;
+ margin-right: 5px;
+
+ color: white;
+ background: #375EAB;
+
+ text-decoration: none;
+ font-size: 16px;
+
+ border: 1px solid #375EAB;
+ -webkit-border-radius: 5px;
+ -moz-border-radius: 5px;
+ border-radius: 5px;
+}
+
+.page {
+ margin-top: 20px;
+}
+
+/* settings panels */
+aside {
+ margin-top: 5px;
+}
+
+.panel {
+ border: 1px solid #aaa;
+ border-radius: 5px;
+ margin-bottom: 5px;
+}
+
+.panel h1 {
+ font-size: 16px;
+ margin: 0;
+ padding: 2px 8px;
+}
+
+.panel select {
+ padding: 5px;
+ border: 0;
+ width: 100%;
+}
+
+/* results table */
+
+table {
+ margin: 5px;
+ border-collapse: collapse;
+ font-size: 11px;
+}
+
+table td, table th, table td, table th {
+ vertical-align: top;
+ padding: 2px 6px;
+}
+
+table tr:nth-child(2n+1) {
+ background: #F4F4F4;
+}
+
+table thead tr {
+ background: #fff !important;
+}
+
+/* build results */
+
+.build td, .build th, .packages td, .packages th {
+ vertical-align: top;
+ padding: 2px 4px;
+ font-size: 10pt;
+}
+
+.build .hash {
+ font-family: monospace;
+ font-size: 9pt;
+}
+
+.build .result {
+ text-align: center;
+ width: 2em;
+}
+
+.build .col-hash, .build .col-result, .build .col-metric, .build .col-numresults {
+ border-right: 1px solid #ccc;
+}
+
+.build .row-commit {
+ border-top: 2px solid #ccc;
+}
+
+.build .arch {
+ font-size: 83%;
+ font-weight: normal;
+}
+
+.build .time {
+ color: #666;
+}
+
+.build .ok {
+ font-size: 83%;
+}
+
+.build .desc, .build .date, .build .user {
+ white-space: nowrap;
+}
+
+.build .desc {
+ text-align: left;
+ max-width: 470px;
+ overflow: hidden;
+ text-overflow: ellipsis;
+}
+
+.good { text-decoration: none; text-shadow: 1px 1px 0 #BBF8AB; color: #000000; background: #38FF38;}
+.bad { text-decoration: none; text-shadow: 1px 1px 0 #000000; color: #FFFFFF; background: #E70000;}
+.noise { text-decoration: none; color: #888; }
+.fail { color: #C00; }
+
+/* pagination */
+
+.paginate nav {
+ text-align: center;
+ padding: 0.5em;
+ margin: 10px 0;
+}
+
+.paginate nav a {
+ padding: 0.5em;
+ background: #E0EBF5;
+ color: blue;
+
+ -webkit-border-radius: 5px;
+ -moz-border-radius: 5px;
+ border-radius: 5px;
+}
+
+.paginate nav a.inactive {
+ color: #888;
+ cursor: default;
+ text-decoration: none;
+}
+
+/* diffs */
+
+.diff-meta {
+ font-family: monospace;
+ margin-bottom: 10px;
+}
+
+.diff-container {
+ padding: 10px;
+}
+
+.diff table .metric {
+ font-weight: bold;
+}
+
+.diff {
+ border: 1px solid #aaa;
+ border-radius: 5px;
+ margin-bottom: 5px;
+ margin-right: 10px;
+ float: left;
+}
+
+.diff h1 {
+ font-size: 16px;
+ margin: 0;
+ padding: 2px 8px;
+}
+
+.diff-benchmark {
+ clear: both;
+ padding-top: 5px;
+}
+
+/* positioning elements */
+
+.page {
+ position: relative;
+ width: 100%;
+}
+
+aside {
+ position: absolute;
+ top: 0;
+ left: 0;
+ bottom: 0;
+ width: 200px;
+}
+
+.main-content {
+ position: absolute;
+ top: 0;
+ left: 210px;
+ right: 5px;
+ min-height: 200px;
+ overflow: hidden;
+}
+
+@media only screen and (max-width: 900px) {
+ aside {
+ position: relative;
+ display: block;
+ width: auto;
+ }
+
+ .main-content {
+ position: static;
+ padding: 0;
+ }
+
+ aside .panel {
+ float: left;
+ width: auto;
+ margin-right: 5px;
+ }
+ aside .button {
+ float: left;
+ margin: 0;
+ }
+}
+