mirror of
https://github.com/golang/go
synced 2024-11-17 21:44:43 -07:00
cmd/trace: move viewer data structs into cmd/internal/traceviewer
The ViewerEvent, ViewerData and ViewerFrame structs are moved into cmd/internal/traceviewer, and renamed Event, Data, and Frame. The structs are the same, except for the following: A definition for the JSON "bp" field that's defined in the trace format, but missing in the structs has been added. Also, the Tid and Pid fields on Event have been renamed TID and PID to better match Go style. Finally, the footer field on ViewerData, which hasn't been used for a while, has been removed. This CL is in preparation for the usage of these structs by cmd/go's tracing functionality. Updates #38714 Change-Id: I345f23617b96d4629b876ae717f89d56a67e05a3 Reviewed-on: https://go-review.googlesource.com/c/go/+/239098 Run-TryBot: Michael Matloob <matloob@golang.org> TryBot-Result: Gobot Gobot <gobot@golang.org> Reviewed-by: Bryan C. Mills <bcmills@google.com> Reviewed-by: Jay Conrod <jayconrod@google.com>
This commit is contained in:
parent
b3de3e52c2
commit
cc700bdc26
38
src/cmd/internal/traceviewer/format.go
Normal file
38
src/cmd/internal/traceviewer/format.go
Normal file
@ -0,0 +1,38 @@
|
||||
// Copyright 2020 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package traceviewer provides definitions of the JSON data structures
|
||||
// used by the Chrome trace viewer.
|
||||
//
|
||||
// The official description of the format is in this file:
|
||||
// https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/preview
|
||||
package traceviewer
|
||||
|
||||
type Data struct {
|
||||
Events []*Event `json:"traceEvents"`
|
||||
Frames map[string]Frame `json:"stackFrames"`
|
||||
TimeUnit string `json:"displayTimeUnit"`
|
||||
}
|
||||
|
||||
type Event struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
Phase string `json:"ph"`
|
||||
Scope string `json:"s,omitempty"`
|
||||
Time float64 `json:"ts"`
|
||||
Dur float64 `json:"dur,omitempty"`
|
||||
PID uint64 `json:"pid"`
|
||||
TID uint64 `json:"tid"`
|
||||
ID uint64 `json:"id,omitempty"`
|
||||
BindPoint string `json:"bp,omitempty"`
|
||||
Stack int `json:"sf,omitempty"`
|
||||
EndStack int `json:"esf,omitempty"`
|
||||
Arg interface{} `json:"args,omitempty"`
|
||||
Cname string `json:"cname,omitempty"`
|
||||
Category string `json:"cat,omitempty"`
|
||||
}
|
||||
|
||||
type Frame struct {
|
||||
Name string `json:"name"`
|
||||
Parent int `json:"parent,omitempty"`
|
||||
}
|
@ -5,6 +5,7 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"cmd/internal/traceviewer"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"internal/trace"
|
||||
@ -325,7 +326,7 @@ func splittingTraceConsumer(max int) (*splitter, traceConsumer) {
|
||||
}
|
||||
|
||||
var (
|
||||
data = ViewerData{Frames: make(map[string]ViewerFrame)}
|
||||
data = traceviewer.Data{Frames: make(map[string]traceviewer.Frame)}
|
||||
|
||||
sizes []eventSz
|
||||
cw countingWriter
|
||||
@ -337,7 +338,7 @@ func splittingTraceConsumer(max int) (*splitter, traceConsumer) {
|
||||
consumeTimeUnit: func(unit string) {
|
||||
data.TimeUnit = unit
|
||||
},
|
||||
consumeViewerEvent: func(v *ViewerEvent, required bool) {
|
||||
consumeViewerEvent: func(v *traceviewer.Event, required bool) {
|
||||
if required {
|
||||
// Store required events inside data
|
||||
// so flush can include them in the required
|
||||
@ -350,7 +351,7 @@ func splittingTraceConsumer(max int) (*splitter, traceConsumer) {
|
||||
sizes = append(sizes, eventSz{v.Time, cw.size + 1}) // +1 for ",".
|
||||
cw.size = 0
|
||||
},
|
||||
consumeViewerFrame: func(k string, v ViewerFrame) {
|
||||
consumeViewerFrame: func(k string, v traceviewer.Frame) {
|
||||
data.Frames[k] = v
|
||||
},
|
||||
flush: func() {
|
||||
@ -478,36 +479,6 @@ type gInfo struct {
|
||||
markAssist *trace.Event // if non-nil, the mark assist currently running.
|
||||
}
|
||||
|
||||
type ViewerData struct {
|
||||
Events []*ViewerEvent `json:"traceEvents"`
|
||||
Frames map[string]ViewerFrame `json:"stackFrames"`
|
||||
TimeUnit string `json:"displayTimeUnit"`
|
||||
|
||||
// This is where mandatory part of the trace starts (e.g. thread names)
|
||||
footer int
|
||||
}
|
||||
|
||||
type ViewerEvent struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
Phase string `json:"ph"`
|
||||
Scope string `json:"s,omitempty"`
|
||||
Time float64 `json:"ts"`
|
||||
Dur float64 `json:"dur,omitempty"`
|
||||
Pid uint64 `json:"pid"`
|
||||
Tid uint64 `json:"tid"`
|
||||
ID uint64 `json:"id,omitempty"`
|
||||
Stack int `json:"sf,omitempty"`
|
||||
EndStack int `json:"esf,omitempty"`
|
||||
Arg interface{} `json:"args,omitempty"`
|
||||
Cname string `json:"cname,omitempty"`
|
||||
Category string `json:"cat,omitempty"`
|
||||
}
|
||||
|
||||
type ViewerFrame struct {
|
||||
Name string `json:"name"`
|
||||
Parent int `json:"parent,omitempty"`
|
||||
}
|
||||
|
||||
type NameArg struct {
|
||||
Name string `json:"name"`
|
||||
}
|
||||
@ -528,8 +499,8 @@ type SortIndexArg struct {
|
||||
|
||||
type traceConsumer struct {
|
||||
consumeTimeUnit func(unit string)
|
||||
consumeViewerEvent func(v *ViewerEvent, required bool)
|
||||
consumeViewerFrame func(key string, f ViewerFrame)
|
||||
consumeViewerEvent func(v *traceviewer.Event, required bool)
|
||||
consumeViewerFrame func(key string, f traceviewer.Frame)
|
||||
flush func()
|
||||
}
|
||||
|
||||
@ -775,23 +746,23 @@ func generateTrace(params *traceParams, consumer traceConsumer) error {
|
||||
ctx.emitSectionFooter(procsSection, "PROCS", 2)
|
||||
}
|
||||
|
||||
ctx.emitFooter(&ViewerEvent{Name: "thread_name", Phase: "M", Pid: procsSection, Tid: trace.GCP, Arg: &NameArg{"GC"}})
|
||||
ctx.emitFooter(&ViewerEvent{Name: "thread_sort_index", Phase: "M", Pid: procsSection, Tid: trace.GCP, Arg: &SortIndexArg{-6}})
|
||||
ctx.emitFooter(&traceviewer.Event{Name: "thread_name", Phase: "M", PID: procsSection, TID: trace.GCP, Arg: &NameArg{"GC"}})
|
||||
ctx.emitFooter(&traceviewer.Event{Name: "thread_sort_index", Phase: "M", PID: procsSection, TID: trace.GCP, Arg: &SortIndexArg{-6}})
|
||||
|
||||
ctx.emitFooter(&ViewerEvent{Name: "thread_name", Phase: "M", Pid: procsSection, Tid: trace.NetpollP, Arg: &NameArg{"Network"}})
|
||||
ctx.emitFooter(&ViewerEvent{Name: "thread_sort_index", Phase: "M", Pid: procsSection, Tid: trace.NetpollP, Arg: &SortIndexArg{-5}})
|
||||
ctx.emitFooter(&traceviewer.Event{Name: "thread_name", Phase: "M", PID: procsSection, TID: trace.NetpollP, Arg: &NameArg{"Network"}})
|
||||
ctx.emitFooter(&traceviewer.Event{Name: "thread_sort_index", Phase: "M", PID: procsSection, TID: trace.NetpollP, Arg: &SortIndexArg{-5}})
|
||||
|
||||
ctx.emitFooter(&ViewerEvent{Name: "thread_name", Phase: "M", Pid: procsSection, Tid: trace.TimerP, Arg: &NameArg{"Timers"}})
|
||||
ctx.emitFooter(&ViewerEvent{Name: "thread_sort_index", Phase: "M", Pid: procsSection, Tid: trace.TimerP, Arg: &SortIndexArg{-4}})
|
||||
ctx.emitFooter(&traceviewer.Event{Name: "thread_name", Phase: "M", PID: procsSection, TID: trace.TimerP, Arg: &NameArg{"Timers"}})
|
||||
ctx.emitFooter(&traceviewer.Event{Name: "thread_sort_index", Phase: "M", PID: procsSection, TID: trace.TimerP, Arg: &SortIndexArg{-4}})
|
||||
|
||||
ctx.emitFooter(&ViewerEvent{Name: "thread_name", Phase: "M", Pid: procsSection, Tid: trace.SyscallP, Arg: &NameArg{"Syscalls"}})
|
||||
ctx.emitFooter(&ViewerEvent{Name: "thread_sort_index", Phase: "M", Pid: procsSection, Tid: trace.SyscallP, Arg: &SortIndexArg{-3}})
|
||||
ctx.emitFooter(&traceviewer.Event{Name: "thread_name", Phase: "M", PID: procsSection, TID: trace.SyscallP, Arg: &NameArg{"Syscalls"}})
|
||||
ctx.emitFooter(&traceviewer.Event{Name: "thread_sort_index", Phase: "M", PID: procsSection, TID: trace.SyscallP, Arg: &SortIndexArg{-3}})
|
||||
|
||||
// Display rows for Ps if we are in the default trace view mode (not goroutine-oriented presentation)
|
||||
if ctx.mode&modeGoroutineOriented == 0 {
|
||||
for i := 0; i <= maxProc; i++ {
|
||||
ctx.emitFooter(&ViewerEvent{Name: "thread_name", Phase: "M", Pid: procsSection, Tid: uint64(i), Arg: &NameArg{fmt.Sprintf("Proc %v", i)}})
|
||||
ctx.emitFooter(&ViewerEvent{Name: "thread_sort_index", Phase: "M", Pid: procsSection, Tid: uint64(i), Arg: &SortIndexArg{i}})
|
||||
ctx.emitFooter(&traceviewer.Event{Name: "thread_name", Phase: "M", PID: procsSection, TID: uint64(i), Arg: &NameArg{fmt.Sprintf("Proc %v", i)}})
|
||||
ctx.emitFooter(&traceviewer.Event{Name: "thread_sort_index", Phase: "M", PID: procsSection, TID: uint64(i), Arg: &SortIndexArg{i}})
|
||||
}
|
||||
}
|
||||
|
||||
@ -829,27 +800,27 @@ func generateTrace(params *traceParams, consumer traceConsumer) error {
|
||||
if !ctx.gs[k] {
|
||||
continue
|
||||
}
|
||||
ctx.emitFooter(&ViewerEvent{Name: "thread_name", Phase: "M", Pid: procsSection, Tid: k, Arg: &NameArg{v.name}})
|
||||
ctx.emitFooter(&traceviewer.Event{Name: "thread_name", Phase: "M", PID: procsSection, TID: k, Arg: &NameArg{v.name}})
|
||||
}
|
||||
// Row for the main goroutine (maing)
|
||||
ctx.emitFooter(&ViewerEvent{Name: "thread_sort_index", Phase: "M", Pid: procsSection, Tid: ctx.maing, Arg: &SortIndexArg{-2}})
|
||||
ctx.emitFooter(&traceviewer.Event{Name: "thread_sort_index", Phase: "M", PID: procsSection, TID: ctx.maing, Arg: &SortIndexArg{-2}})
|
||||
// Row for GC or global state (specified with G=0)
|
||||
ctx.emitFooter(&ViewerEvent{Name: "thread_sort_index", Phase: "M", Pid: procsSection, Tid: 0, Arg: &SortIndexArg{-1}})
|
||||
ctx.emitFooter(&traceviewer.Event{Name: "thread_sort_index", Phase: "M", PID: procsSection, TID: 0, Arg: &SortIndexArg{-1}})
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ctx *traceContext) emit(e *ViewerEvent) {
|
||||
func (ctx *traceContext) emit(e *traceviewer.Event) {
|
||||
ctx.consumer.consumeViewerEvent(e, false)
|
||||
}
|
||||
|
||||
func (ctx *traceContext) emitFooter(e *ViewerEvent) {
|
||||
func (ctx *traceContext) emitFooter(e *traceviewer.Event) {
|
||||
ctx.consumer.consumeViewerEvent(e, true)
|
||||
}
|
||||
func (ctx *traceContext) emitSectionFooter(sectionID uint64, name string, priority int) {
|
||||
ctx.emitFooter(&ViewerEvent{Name: "process_name", Phase: "M", Pid: sectionID, Arg: &NameArg{name}})
|
||||
ctx.emitFooter(&ViewerEvent{Name: "process_sort_index", Phase: "M", Pid: sectionID, Arg: &SortIndexArg{priority}})
|
||||
ctx.emitFooter(&traceviewer.Event{Name: "process_name", Phase: "M", PID: sectionID, Arg: &NameArg{name}})
|
||||
ctx.emitFooter(&traceviewer.Event{Name: "process_sort_index", Phase: "M", PID: sectionID, Arg: &SortIndexArg{priority}})
|
||||
}
|
||||
|
||||
func (ctx *traceContext) time(ev *trace.Event) float64 {
|
||||
@ -880,7 +851,7 @@ func (ctx *traceContext) emitSlice(ev *trace.Event, name string) {
|
||||
ctx.emit(ctx.makeSlice(ev, name))
|
||||
}
|
||||
|
||||
func (ctx *traceContext) makeSlice(ev *trace.Event, name string) *ViewerEvent {
|
||||
func (ctx *traceContext) makeSlice(ev *trace.Event, name string) *traceviewer.Event {
|
||||
// If ViewerEvent.Dur is not a positive value,
|
||||
// trace viewer handles it as a non-terminating time interval.
|
||||
// Avoid it by setting the field with a small value.
|
||||
@ -888,12 +859,12 @@ func (ctx *traceContext) makeSlice(ev *trace.Event, name string) *ViewerEvent {
|
||||
if ev.Link.Ts-ev.Ts <= 0 {
|
||||
durationUsec = 0.0001 // 0.1 nanoseconds
|
||||
}
|
||||
sl := &ViewerEvent{
|
||||
sl := &traceviewer.Event{
|
||||
Name: name,
|
||||
Phase: "X",
|
||||
Time: ctx.time(ev),
|
||||
Dur: durationUsec,
|
||||
Tid: ctx.proc(ev),
|
||||
TID: ctx.proc(ev),
|
||||
Stack: ctx.stack(ev.Stk),
|
||||
EndStack: ctx.stack(ev.Link.Stk),
|
||||
}
|
||||
@ -927,16 +898,16 @@ func (ctx *traceContext) emitTask(task *taskDesc, sortIndex int) {
|
||||
taskName := task.name
|
||||
durationUsec := float64(task.lastTimestamp()-task.firstTimestamp()) / 1e3
|
||||
|
||||
ctx.emitFooter(&ViewerEvent{Name: "thread_name", Phase: "M", Pid: tasksSection, Tid: taskRow, Arg: &NameArg{fmt.Sprintf("T%d %s", task.id, taskName)}})
|
||||
ctx.emit(&ViewerEvent{Name: "thread_sort_index", Phase: "M", Pid: tasksSection, Tid: taskRow, Arg: &SortIndexArg{sortIndex}})
|
||||
ctx.emitFooter(&traceviewer.Event{Name: "thread_name", Phase: "M", PID: tasksSection, TID: taskRow, Arg: &NameArg{fmt.Sprintf("T%d %s", task.id, taskName)}})
|
||||
ctx.emit(&traceviewer.Event{Name: "thread_sort_index", Phase: "M", PID: tasksSection, TID: taskRow, Arg: &SortIndexArg{sortIndex}})
|
||||
ts := float64(task.firstTimestamp()) / 1e3
|
||||
sl := &ViewerEvent{
|
||||
sl := &traceviewer.Event{
|
||||
Name: taskName,
|
||||
Phase: "X",
|
||||
Time: ts,
|
||||
Dur: durationUsec,
|
||||
Pid: tasksSection,
|
||||
Tid: taskRow,
|
||||
PID: tasksSection,
|
||||
TID: taskRow,
|
||||
Cname: pickTaskColor(task.id),
|
||||
}
|
||||
targ := TaskArg{ID: task.id}
|
||||
@ -953,8 +924,8 @@ func (ctx *traceContext) emitTask(task *taskDesc, sortIndex int) {
|
||||
|
||||
if task.create != nil && task.create.Type == trace.EvUserTaskCreate && task.create.Args[1] != 0 {
|
||||
ctx.arrowSeq++
|
||||
ctx.emit(&ViewerEvent{Name: "newTask", Phase: "s", Tid: task.create.Args[1], ID: ctx.arrowSeq, Time: ts, Pid: tasksSection})
|
||||
ctx.emit(&ViewerEvent{Name: "newTask", Phase: "t", Tid: taskRow, ID: ctx.arrowSeq, Time: ts, Pid: tasksSection})
|
||||
ctx.emit(&traceviewer.Event{Name: "newTask", Phase: "s", TID: task.create.Args[1], ID: ctx.arrowSeq, Time: ts, PID: tasksSection})
|
||||
ctx.emit(&traceviewer.Event{Name: "newTask", Phase: "t", TID: taskRow, ID: ctx.arrowSeq, Time: ts, PID: tasksSection})
|
||||
}
|
||||
}
|
||||
|
||||
@ -975,12 +946,12 @@ func (ctx *traceContext) emitRegion(s regionDesc) {
|
||||
scopeID := fmt.Sprintf("%x", id)
|
||||
name := s.Name
|
||||
|
||||
sl0 := &ViewerEvent{
|
||||
sl0 := &traceviewer.Event{
|
||||
Category: "Region",
|
||||
Name: name,
|
||||
Phase: "b",
|
||||
Time: float64(s.firstTimestamp()) / 1e3,
|
||||
Tid: s.G, // only in goroutine-oriented view
|
||||
TID: s.G, // only in goroutine-oriented view
|
||||
ID: uint64(regionID),
|
||||
Scope: scopeID,
|
||||
Cname: pickTaskColor(s.TaskID),
|
||||
@ -990,12 +961,12 @@ func (ctx *traceContext) emitRegion(s regionDesc) {
|
||||
}
|
||||
ctx.emit(sl0)
|
||||
|
||||
sl1 := &ViewerEvent{
|
||||
sl1 := &traceviewer.Event{
|
||||
Category: "Region",
|
||||
Name: name,
|
||||
Phase: "e",
|
||||
Time: float64(s.lastTimestamp()) / 1e3,
|
||||
Tid: s.G,
|
||||
TID: s.G,
|
||||
ID: uint64(regionID),
|
||||
Scope: scopeID,
|
||||
Cname: pickTaskColor(s.TaskID),
|
||||
@ -1021,7 +992,7 @@ func (ctx *traceContext) emitHeapCounters(ev *trace.Event) {
|
||||
diff = ctx.heapStats.nextGC - ctx.heapStats.heapAlloc
|
||||
}
|
||||
if tsWithinRange(ev.Ts, ctx.startTime, ctx.endTime) {
|
||||
ctx.emit(&ViewerEvent{Name: "Heap", Phase: "C", Time: ctx.time(ev), Pid: 1, Arg: &heapCountersArg{ctx.heapStats.heapAlloc, diff}})
|
||||
ctx.emit(&traceviewer.Event{Name: "Heap", Phase: "C", Time: ctx.time(ev), PID: 1, Arg: &heapCountersArg{ctx.heapStats.heapAlloc, diff}})
|
||||
}
|
||||
ctx.prevHeapStats = ctx.heapStats
|
||||
}
|
||||
@ -1037,7 +1008,7 @@ func (ctx *traceContext) emitGoroutineCounters(ev *trace.Event) {
|
||||
return
|
||||
}
|
||||
if tsWithinRange(ev.Ts, ctx.startTime, ctx.endTime) {
|
||||
ctx.emit(&ViewerEvent{Name: "Goroutines", Phase: "C", Time: ctx.time(ev), Pid: 1, Arg: &goroutineCountersArg{uint64(ctx.gstates[gRunning]), uint64(ctx.gstates[gRunnable]), uint64(ctx.gstates[gWaitingGC])}})
|
||||
ctx.emit(&traceviewer.Event{Name: "Goroutines", Phase: "C", Time: ctx.time(ev), PID: 1, Arg: &goroutineCountersArg{uint64(ctx.gstates[gRunning]), uint64(ctx.gstates[gRunnable]), uint64(ctx.gstates[gWaitingGC])}})
|
||||
}
|
||||
ctx.prevGstates = ctx.gstates
|
||||
}
|
||||
@ -1052,7 +1023,7 @@ func (ctx *traceContext) emitThreadCounters(ev *trace.Event) {
|
||||
return
|
||||
}
|
||||
if tsWithinRange(ev.Ts, ctx.startTime, ctx.endTime) {
|
||||
ctx.emit(&ViewerEvent{Name: "Threads", Phase: "C", Time: ctx.time(ev), Pid: 1, Arg: &threadCountersArg{
|
||||
ctx.emit(&traceviewer.Event{Name: "Threads", Phase: "C", Time: ctx.time(ev), PID: 1, Arg: &threadCountersArg{
|
||||
Running: ctx.threadStats.prunning,
|
||||
InSyscall: ctx.threadStats.insyscall}})
|
||||
}
|
||||
@ -1090,13 +1061,13 @@ func (ctx *traceContext) emitInstant(ev *trace.Event, name, category string) {
|
||||
}
|
||||
arg = &Arg{ev.Args[0]}
|
||||
}
|
||||
ctx.emit(&ViewerEvent{
|
||||
ctx.emit(&traceviewer.Event{
|
||||
Name: name,
|
||||
Category: category,
|
||||
Phase: "I",
|
||||
Scope: "t",
|
||||
Time: ctx.time(ev),
|
||||
Tid: ctx.proc(ev),
|
||||
TID: ctx.proc(ev),
|
||||
Stack: ctx.stack(ev.Stk),
|
||||
Cname: cname,
|
||||
Arg: arg})
|
||||
@ -1134,8 +1105,8 @@ func (ctx *traceContext) emitArrow(ev *trace.Event, name string) {
|
||||
}
|
||||
|
||||
ctx.arrowSeq++
|
||||
ctx.emit(&ViewerEvent{Name: name, Phase: "s", Tid: ctx.proc(ev), ID: ctx.arrowSeq, Time: ctx.time(ev), Stack: ctx.stack(ev.Stk), Cname: color})
|
||||
ctx.emit(&ViewerEvent{Name: name, Phase: "t", Tid: ctx.proc(ev.Link), ID: ctx.arrowSeq, Time: ctx.time(ev.Link), Cname: color})
|
||||
ctx.emit(&traceviewer.Event{Name: name, Phase: "s", TID: ctx.proc(ev), ID: ctx.arrowSeq, Time: ctx.time(ev), Stack: ctx.stack(ev.Stk), Cname: color})
|
||||
ctx.emit(&traceviewer.Event{Name: name, Phase: "t", TID: ctx.proc(ev.Link), ID: ctx.arrowSeq, Time: ctx.time(ev.Link), Cname: color})
|
||||
}
|
||||
|
||||
func (ctx *traceContext) stack(stk []*trace.Frame) int {
|
||||
@ -1157,7 +1128,7 @@ func (ctx *traceContext) buildBranch(parent frameNode, stk []*trace.Frame) int {
|
||||
node.id = ctx.frameSeq
|
||||
node.children = make(map[uint64]frameNode)
|
||||
parent.children[frame.PC] = node
|
||||
ctx.consumer.consumeViewerFrame(strconv.Itoa(node.id), ViewerFrame{fmt.Sprintf("%v:%v", frame.Fn, frame.Line), parent.id})
|
||||
ctx.consumer.consumeViewerFrame(strconv.Itoa(node.id), traceviewer.Frame{Name: fmt.Sprintf("%v:%v", frame.Fn, frame.Line), Parent: parent.id})
|
||||
}
|
||||
return ctx.buildBranch(node, stk)
|
||||
}
|
||||
@ -1192,7 +1163,7 @@ type jsonWriter struct {
|
||||
}
|
||||
|
||||
func viewerDataTraceConsumer(w io.Writer, start, end int64) traceConsumer {
|
||||
frames := make(map[string]ViewerFrame)
|
||||
frames := make(map[string]traceviewer.Frame)
|
||||
enc := json.NewEncoder(w)
|
||||
written := 0
|
||||
index := int64(-1)
|
||||
@ -1204,7 +1175,7 @@ func viewerDataTraceConsumer(w io.Writer, start, end int64) traceConsumer {
|
||||
enc.Encode(unit)
|
||||
io.WriteString(w, ",")
|
||||
},
|
||||
consumeViewerEvent: func(v *ViewerEvent, required bool) {
|
||||
consumeViewerEvent: func(v *traceviewer.Event, required bool) {
|
||||
index++
|
||||
if !required && (index < start || index > end) {
|
||||
// not in the range. Skip!
|
||||
@ -1221,7 +1192,7 @@ func viewerDataTraceConsumer(w io.Writer, start, end int64) traceConsumer {
|
||||
// Same should be applied to splittingTraceConsumer.
|
||||
written++
|
||||
},
|
||||
consumeViewerFrame: func(k string, v ViewerFrame) {
|
||||
consumeViewerFrame: func(k string, v traceviewer.Frame) {
|
||||
frames[k] = v
|
||||
},
|
||||
flush: func() {
|
||||
|
@ -7,6 +7,7 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"cmd/internal/traceviewer"
|
||||
"context"
|
||||
"internal/trace"
|
||||
"io/ioutil"
|
||||
@ -78,7 +79,7 @@ func TestGoroutineCount(t *testing.T) {
|
||||
// Use the default viewerDataTraceConsumer but replace
|
||||
// consumeViewerEvent to intercept the ViewerEvents for testing.
|
||||
c := viewerDataTraceConsumer(ioutil.Discard, 0, 1<<63-1)
|
||||
c.consumeViewerEvent = func(ev *ViewerEvent, _ bool) {
|
||||
c.consumeViewerEvent = func(ev *traceviewer.Event, _ bool) {
|
||||
if ev.Name == "Goroutines" {
|
||||
cnt := ev.Arg.(*goroutineCountersArg)
|
||||
if cnt.Runnable+cnt.Running > 2 {
|
||||
@ -165,7 +166,7 @@ func TestPreemptedMarkAssist(t *testing.T) {
|
||||
c := viewerDataTraceConsumer(ioutil.Discard, 0, 1<<63-1)
|
||||
|
||||
marks := 0
|
||||
c.consumeViewerEvent = func(ev *ViewerEvent, _ bool) {
|
||||
c.consumeViewerEvent = func(ev *traceviewer.Event, _ bool) {
|
||||
if strings.Contains(ev.Name, "MARK ASSIST") {
|
||||
marks++
|
||||
}
|
||||
@ -216,7 +217,7 @@ func TestFoo(t *testing.T) {
|
||||
c := viewerDataTraceConsumer(ioutil.Discard, 0, 1<<63-1)
|
||||
|
||||
var logBeforeTaskEnd, logAfterTaskEnd bool
|
||||
c.consumeViewerEvent = func(ev *ViewerEvent, _ bool) {
|
||||
c.consumeViewerEvent = func(ev *traceviewer.Event, _ bool) {
|
||||
if ev.Name == "log before task ends" {
|
||||
logBeforeTaskEnd = true
|
||||
}
|
||||
|
@ -8,6 +8,7 @@ package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"cmd/internal/traceviewer"
|
||||
traceparser "internal/trace"
|
||||
"io/ioutil"
|
||||
"runtime"
|
||||
@ -83,7 +84,7 @@ func TestGoroutineInSyscall(t *testing.T) {
|
||||
// Check only one thread for the pipe read goroutine is
|
||||
// considered in-syscall.
|
||||
c := viewerDataTraceConsumer(ioutil.Discard, 0, 1<<63-1)
|
||||
c.consumeViewerEvent = func(ev *ViewerEvent, _ bool) {
|
||||
c.consumeViewerEvent = func(ev *traceviewer.Event, _ bool) {
|
||||
if ev.Name == "Threads" {
|
||||
arg := ev.Arg.(*threadCountersArg)
|
||||
if arg.InSyscall > 1 {
|
||||
|
Loading…
Reference in New Issue
Block a user