[symbolize] Adopt 'as private as possible' interface
Change-Id: I7877250357486dc0fa93fc0eeb05a7a4647d78a8
diff --git a/symbolize/ast.go b/symbolize/ast.go
index 4ffa30e..483c4b6 100644
--- a/symbolize/ast.go
+++ b/symbolize/ast.go
@@ -69,7 +69,7 @@
type BacktraceElement struct {
vaddr uint64
num uint64
- info AddressInfo
+ info addressInfo
}
func (b *BacktraceElement) Accept(visitor NodeVisitor) {
@@ -79,7 +79,7 @@
// PcElement is an AST node representing a pc element in the markup
type PCElement struct {
vaddr uint64
- info AddressInfo
+ info addressInfo
}
func (p *PCElement) Accept(visitor NodeVisitor) {
diff --git a/symbolize/demuxer_test.go b/symbolize/demuxer_test.go
index 21815b9..a78f650 100644
--- a/symbolize/demuxer_test.go
+++ b/symbolize/demuxer_test.go
@@ -65,7 +65,7 @@
// mock ids.txt
repo := NewRepo()
- repo.AddSource(NewMockSource("mock_source.txt", testBinaries))
+ repo.AddSource(testBinaries)
// make a demuxer
demuxer := NewDemuxer(repo, symbo)
@@ -110,7 +110,7 @@
// mock ids.txt:q
repo := NewRepo()
- repo.AddSource(NewMockSource("mock_source.txt", testBinaries))
+ repo.AddSource(testBinaries)
// make a demuxer
demuxer := NewDemuxer(repo, symbo)
@@ -151,7 +151,7 @@
// mock ids.txt
repo := NewRepo()
- repo.AddSource(NewMockSource("mock_source.txt", testBinaries))
+ repo.AddSource(testBinaries)
// make a demuxer
demuxer := NewDemuxer(repo, symbo)
diff --git a/symbolize/filter.go b/symbolize/filter.go
index 057ff08..63fb4c7 100644
--- a/symbolize/filter.go
+++ b/symbolize/filter.go
@@ -52,7 +52,7 @@
modRelAddr uint64
}
-type AddressInfo struct {
+type addressInfo struct {
locs []SourceLocation
mod Module
seg Segment
@@ -85,7 +85,7 @@
// handles for llvm-symbolizer
symbolizer Symbolizer
// Symbolizer context
- symContext MappingStore
+ symContext mappingStore
modules map[uint64]Module
modNamesByBuildID map[string]string
// Symbolizer repository
@@ -93,9 +93,9 @@
}
// FindInfoForAddress takes a process an in memory address and converts it to a source location.
-func (s *Filter) FindInfoForAddress(vaddr uint64) (AddressInfo, error) {
- info := AddressInfo{addr: vaddr}
- seg := s.symContext.Find(vaddr)
+func (s *Filter) findInfoForAddress(vaddr uint64) (addressInfo, error) {
+ info := addressInfo{addr: vaddr}
+ seg := s.symContext.find(vaddr)
if seg == nil {
return info, fmt.Errorf("could not find segment that covers 0x%x", vaddr)
}
@@ -134,13 +134,13 @@
}
// Reset resets the filter so that it can work for a new process
-func (s *Filter) Reset() {
+func (s *Filter) reset() {
s.modules = make(map[uint64]Module)
- s.symContext.Clear()
+ s.symContext.clear()
}
// AddModule updates the filter state to inform it of a new module
-func (s *Filter) AddModule(m Module) error {
+func (s *Filter) addModule(m Module) error {
var err error
// Flag odd build IDs.
if modName, ok := s.modNamesByBuildID[m.build]; ok {
@@ -156,8 +156,8 @@
}
// AddSegment updates the filter state to inform it of a new memory mapped location.
-func (s *Filter) AddSegment(seg Segment) {
- s.symContext.Add(seg)
+func (s *Filter) addSegment(seg Segment) {
+ s.symContext.add(seg)
}
// Start tells the filter to start consuming input and produce output.
@@ -179,7 +179,7 @@
}
// Update AST with source locations.
for _, token := range res.line {
- token.Accept(&FilterVisitor{filter: f, lineno: elem.lineno, ctx: ctx})
+ token.Accept(&filterVisitor{filter: f, lineno: elem.lineno, ctx: ctx})
}
res.LogLine = elem.LogLine
out <- res
@@ -189,18 +189,18 @@
return out
}
-type FilterVisitor struct {
+type filterVisitor struct {
filter *Filter
lineno uint64
ctx context.Context
}
-func (f *FilterVisitor) warn(err error) {
+func (f *filterVisitor) warn(err error) {
logger.Warningf(f.ctx, "on line %d: %v", f.lineno, err)
}
-func (f *FilterVisitor) VisitBt(elem *BacktraceElement) {
- info, err := f.filter.FindInfoForAddress(elem.vaddr)
+func (f *filterVisitor) VisitBt(elem *BacktraceElement) {
+ info, err := f.filter.findInfoForAddress(elem.vaddr)
if err != nil {
// Don't be noisy about missing objects.
if _, ok := err.(*missingObjError); !ok {
@@ -210,8 +210,8 @@
elem.info = info
}
-func (f *FilterVisitor) VisitPc(elem *PCElement) {
- info, err := f.filter.FindInfoForAddress(elem.vaddr)
+func (f *filterVisitor) VisitPc(elem *PCElement) {
+ info, err := f.filter.findInfoForAddress(elem.vaddr)
if err != nil {
// Don't be noisy about missing objects.
if _, ok := err.(*missingObjError); !ok {
@@ -221,28 +221,28 @@
elem.info = info
}
-func (f *FilterVisitor) VisitColor(group *ColorCode) {
+func (f *filterVisitor) VisitColor(group *ColorCode) {
}
-func (f *FilterVisitor) VisitText(_ *Text) {
+func (f *filterVisitor) VisitText(_ *Text) {
// This must be implemented in order to meet the interface but it has no effect.
// This visitor is supposed to do all of the non-parsing parts of constructing the AST.
// There is nothing to do for Text however.
}
-func (f *FilterVisitor) VisitReset(elem *ResetElement) {
+func (f *filterVisitor) VisitReset(elem *ResetElement) {
// TODO: Check if Reset had an effect and output that a pid reuse occured.
- f.filter.Reset()
+ f.filter.reset()
}
-func (f *FilterVisitor) VisitModule(elem *ModuleElement) {
- err := f.filter.AddModule(elem.mod)
+func (f *filterVisitor) VisitModule(elem *ModuleElement) {
+ err := f.filter.addModule(elem.mod)
if err != nil {
f.warn(err)
}
}
-func (f *FilterVisitor) VisitMapping(elem *MappingElement) {
- f.filter.AddSegment(elem.seg)
+func (f *filterVisitor) VisitMapping(elem *MappingElement) {
+ f.filter.addSegment(elem.seg)
}
diff --git a/symbolize/filter_test.go b/symbolize/filter_test.go
index e3223e6..ebb47c6 100644
--- a/symbolize/filter_test.go
+++ b/symbolize/filter_test.go
@@ -67,26 +67,26 @@
// mock ids.txt
repo := NewRepo()
- repo.AddSource(NewMockSource("mock_source.txt", testBinaries))
+ repo.AddSource(testBinaries)
// make an actual filter using those two mock objects
filter := NewFilter(repo, symbo)
// parse some example lines
- err := filter.AddModule(Module{"libc.elf", "4fcb712aa6387724a9f465a32cd8c14b", 1})
+ err := filter.addModule(Module{"libc.elf", "4fcb712aa6387724a9f465a32cd8c14b", 1})
if err != nil {
t.Fatal(err)
}
- err = filter.AddModule(Module{"libcrypto.elf", "12ef5c50b3ed3599c07c02d4509311be", 2})
+ err = filter.addModule(Module{"libcrypto.elf", "12ef5c50b3ed3599c07c02d4509311be", 2})
if err != nil {
t.Fatal(err)
}
- filter.AddSegment(Segment{1, 0x12345000, 849596, "rx", 0x0})
- filter.AddSegment(Segment{2, 0x23456000, 539776, "rx", 0x80000})
+ filter.addSegment(Segment{1, 0x12345000, 849596, "rx", 0x0})
+ filter.addSegment(Segment{2, 0x23456000, 539776, "rx", 0x80000})
line := parseLine("\033[1m Error at {{{pc:0x123879c0}}}")
// print out a more precise form
for _, token := range line {
- token.Accept(&FilterVisitor{filter, 1, context.Background()})
+ token.Accept(&filterVisitor{filter, 1, context.Background()})
}
json, err := GetLineJson(line)
if err != nil {
@@ -145,7 +145,7 @@
})
// mock ids.txt
repo := NewRepo()
- err := repo.AddSource(NewMockSource("mock_source.txt", testBinaries))
+ err := repo.AddSource(testBinaries)
if err != nil {
t.Fatal(err)
}
@@ -154,13 +154,13 @@
filter := NewFilter(repo, symbo)
// add some context
- err = filter.AddModule(Module{"libc.so", "4fcb712aa6387724a9f465a32cd8c14b", 1})
+ err = filter.addModule(Module{"libc.so", "4fcb712aa6387724a9f465a32cd8c14b", 1})
if err != nil {
t.Fatal(err)
}
- filter.AddSegment(Segment{1, 0x12345000, 849596, "rx", 0x0})
+ filter.addSegment(Segment{1, 0x12345000, 849596, "rx", 0x0})
for _, token := range line {
- token.Accept(&FilterVisitor{filter, 1, context.Background()})
+ token.Accept(&filterVisitor{filter, 1, context.Background()})
}
json, err := GetLineJson(line)
@@ -203,7 +203,7 @@
})
// mock ids.txt
repo := NewRepo()
- err = repo.AddSource(NewMockSource("mock_source.txt", testBinaries))
+ err = repo.AddSource(testBinaries)
if err != nil {
t.Fatal(err)
@@ -214,16 +214,16 @@
// add some context
mod := Module{"libc.so", "4fcb712aa6387724a9f465a32cd8c14b", 1}
- err = filter.AddModule(mod)
+ err = filter.addModule(mod)
if err != nil {
t.Fatal(err)
}
seg := Segment{1, 0x12345000, 849596, "rx", 0x0}
- filter.AddSegment(seg)
+ filter.addSegment(seg)
addr := uint64(0x12389987)
- if info, err := filter.FindInfoForAddress(addr); err != nil {
+ if info, err := filter.findInfoForAddress(addr); err != nil {
t.Error("expected", nil, "got", err)
if len(info.locs) != 1 {
t.Error("expected", 1, "source location but got", len(info.locs))
@@ -246,10 +246,10 @@
// now forget the context
for _, token := range line {
- token.Accept(&FilterVisitor{filter, 1, context.Background()})
+ token.Accept(&filterVisitor{filter, 1, context.Background()})
}
- if _, err := filter.FindInfoForAddress(addr); err == nil {
+ if _, err := filter.findInfoForAddress(addr); err == nil {
t.Error("expected non-nil error but got", err)
}
}
diff --git a/symbolize/json.go b/symbolize/json.go
index 34a18c8..8f59028 100644
--- a/symbolize/json.go
+++ b/symbolize/json.go
@@ -8,23 +8,23 @@
"encoding/json"
)
-type JsonVisitor struct {
+type jsonVisitor struct {
stack []json.RawMessage
}
func GetLineJson(line []Node) ([]byte, error) {
- var j JsonVisitor
+ var j jsonVisitor
for _, token := range line {
token.Accept(&j)
}
return j.getJson()
}
-func (j *JsonVisitor) getJson() ([]byte, error) {
+func (j *jsonVisitor) getJson() ([]byte, error) {
return json.MarshalIndent(j.stack, "", "\t")
}
-func (j *JsonVisitor) VisitBt(elem *BacktraceElement) {
+func (j *jsonVisitor) VisitBt(elem *BacktraceElement) {
type loc struct {
File OptStr `json:"file"`
Line int `json:"line"`
@@ -52,7 +52,7 @@
j.stack = append(j.stack, msg)
}
-func (j *JsonVisitor) VisitPc(elem *PCElement) {
+func (j *jsonVisitor) VisitPc(elem *PCElement) {
loc := elem.info.locs[0]
msg, _ := json.Marshal(struct {
Tipe string `json:"type"`
@@ -70,7 +70,7 @@
j.stack = append(j.stack, msg)
}
-func (j *JsonVisitor) VisitColor(elem *ColorCode) {
+func (j *jsonVisitor) VisitColor(elem *ColorCode) {
out := j.stack
msg, _ := json.Marshal(struct {
Tipe string `json:"type"`
@@ -82,7 +82,7 @@
j.stack = append(out, msg)
}
-func (j *JsonVisitor) VisitText(elem *Text) {
+func (j *jsonVisitor) VisitText(elem *Text) {
msg, _ := json.Marshal(struct {
Tipe string `json:"type"`
Text string `json:"text"`
@@ -94,7 +94,7 @@
}
// TODO: update this for generalized modules
-func (j *JsonVisitor) VisitModule(elem *ModuleElement) {
+func (j *jsonVisitor) VisitModule(elem *ModuleElement) {
msg, _ := json.Marshal(struct {
Tipe string `json:"type"`
Name string `json:"name"`
@@ -109,7 +109,7 @@
j.stack = append(j.stack, msg)
}
-func (j *JsonVisitor) VisitReset(elem *ResetElement) {
+func (j *jsonVisitor) VisitReset(elem *ResetElement) {
msg, _ := json.Marshal(map[string]string{
"type": "reset",
})
@@ -117,7 +117,7 @@
}
// TODO: update this for generalized loads
-func (j *JsonVisitor) VisitMapping(elem *MappingElement) {
+func (j *jsonVisitor) VisitMapping(elem *MappingElement) {
msg, _ := json.Marshal(struct {
Tipe string `json:"type"`
Mod uint64 `json:"mod"`
diff --git a/symbolize/mapstore.go b/symbolize/mapstore.go
index 817788e..4dab53e 100644
--- a/symbolize/mapstore.go
+++ b/symbolize/mapstore.go
@@ -27,7 +27,7 @@
// allows them to be efficentily looked up by an address within them.
// MappingStore quasi-efficently indexes segments by their start address.
-type MappingStore struct {
+type mappingStore struct {
segments []Segment
sorted int
}
@@ -54,7 +54,7 @@
// sortAndFind sorts the unsorted range of segments, finds the missing element
// and then merges the two sorted ranges so that sortAndFind won't have to be
// called again for the same element.
-func (m *MappingStore) sortAndFind(vaddr uint64) *Segment {
+func (m *mappingStore) sortAndFind(vaddr uint64) *Segment {
sort.Sort(byVAddr(m.segments[m.sorted:]))
seg := findSegment(m.segments[m.sorted:], vaddr)
newMods := merge(m.segments[m.sorted:], m.segments[:m.sorted])
@@ -77,7 +77,7 @@
// Find first trys to find the desired segment in the sorted segment. If the segment
// can't be found we consult the unsorted part and update the structure.
-func (m *MappingStore) Find(vaddr uint64) *Segment {
+func (m *mappingStore) find(vaddr uint64) *Segment {
out := findSegment(m.segments[:m.sorted], vaddr)
if out == nil {
out = m.sortAndFind(vaddr)
@@ -86,12 +86,12 @@
}
// Add adds a segment to the segment.
-func (m *MappingStore) Add(seg Segment) {
+func (m *mappingStore) add(seg Segment) {
m.segments = append(m.segments, seg)
}
// Clear clears the mapping store of all previous information
-func (m *MappingStore) Clear() {
+func (m *mappingStore) clear() {
m.segments = nil
m.sorted = 0
}
diff --git a/symbolize/mock_elf.go b/symbolize/mock_elf.go
index c55d7c1..1bc0b32 100644
--- a/symbolize/mock_elf.go
+++ b/symbolize/mock_elf.go
@@ -8,29 +8,16 @@
"fuchsia.googlesource.com/tools/elflib"
)
+type mockSource []elflib.BinaryFileRef
+
// Common binaries used for tests in this package.
-var testBinaries = []elflib.BinaryFileRef{
+var testBinaries = mockSource{
{Filepath: "testdata/gobug.elf", BuildID: "5bf6a28a259b95b4f20ffbcea0cbb149"},
{Filepath: "testdata/libc.elf", BuildID: "4fcb712aa6387724a9f465a32cd8c14b"},
{Filepath: "testdata/libcrypto.elf", BuildID: "12ef5c50b3ed3599c07c02d4509311be"},
}
-type mockSource struct {
- name string
- binaries []elflib.BinaryFileRef
-}
-
-// NewMockSource creates a new Source for testing.
-func NewMockSource(name string, binaries []elflib.BinaryFileRef) BinaryFileSource {
- return &mockSource{name: name, binaries: binaries}
-}
-
-// Name implements Source
-func (m *mockSource) Name() string {
- return m.name
-}
-
// GetBinaries implements Source.
-func (m *mockSource) GetBinaries() ([]elflib.BinaryFileRef, error) {
- return m.binaries, nil
+func (m mockSource) getBinaries() ([]elflib.BinaryFileRef, error) {
+ return []elflib.BinaryFileRef(m), nil
}
diff --git a/symbolize/parser.go b/symbolize/parser.go
index b0d7d4d..89cdbc0 100644
--- a/symbolize/parser.go
+++ b/symbolize/parser.go
@@ -48,32 +48,32 @@
type ParseLineFunc func(msg string) []Node
func GetLineParser() ParseLineFunc {
- var b RegexpTokenizerBuilder
+ var b regexpTokenizerBuilder
out := []Node{}
dec := decRegexp
ptr := ptrRegexp
str := strRegexp
num := fmt.Sprintf("(?:%s|%s)", dec, ptr)
- b.AddRule(fmt.Sprintf("{{{bt:(%s):(%s)}}}", dec, ptr), func(args ...string) {
+ b.addRule(fmt.Sprintf("{{{bt:(%s):(%s)}}}", dec, ptr), func(args ...string) {
out = append(out, &BacktraceElement{
num: str2dec(args[1]),
vaddr: str2int(args[2]),
})
})
- b.AddRule(fmt.Sprintf("{{{pc:(%s)}}}", ptr), func(args ...string) {
+ b.addRule(fmt.Sprintf("{{{pc:(%s)}}}", ptr), func(args ...string) {
out = append(out, &PCElement{vaddr: str2int(args[1])})
})
- b.AddRule(fmt.Sprintf("\033\\[(%s)m", dec), func(args ...string) {
+ b.addRule(fmt.Sprintf("\033\\[(%s)m", dec), func(args ...string) {
out = append(out, &ColorCode{color: str2dec(args[1])})
})
- b.AddRule(fmt.Sprintf(`{{{module:(%s):(%s):elf:(%s)}}}`, num, str, str), func(args ...string) {
+ b.addRule(fmt.Sprintf(`{{{module:(%s):(%s):elf:(%s)}}}`, num, str, str), func(args ...string) {
out = append(out, &ModuleElement{mod: Module{
id: str2int(args[1]),
name: args[2],
build: args[3],
}})
})
- b.AddRule(fmt.Sprintf(`{{{mmap:(%s):(%s):load:(%s):(%s):(%s)}}}`, ptr, num, num, str, ptr), func(args ...string) {
+ b.addRule(fmt.Sprintf(`{{{mmap:(%s):(%s):load:(%s):(%s):(%s)}}}`, ptr, num, num, str, ptr), func(args ...string) {
out = append(out, &MappingElement{seg: Segment{
vaddr: str2int(args[1]),
size: str2int(args[2]),
@@ -82,10 +82,10 @@
modRelAddr: str2int(args[5]),
}})
})
- b.AddRule(`{{{reset}}}`, func(args ...string) {
+ b.addRule(`{{{reset}}}`, func(args ...string) {
out = append(out, &ResetElement{})
})
- tokenizer, err := b.Compile(func(text string) {
+ tokenizer, err := b.compile(func(text string) {
out = append(out, &Text{text: text})
})
if err != nil {
@@ -93,7 +93,7 @@
}
return func(msg string) []Node {
out = nil
- tokenizer.Run(msg)
+ tokenizer.run(msg)
return out
}
}
@@ -102,12 +102,12 @@
out := make(chan InputLine)
// This is not used for demuxing. It is a human readable line number.
var lineno uint64 = 1
- var b RegexpTokenizerBuilder
+ var b regexpTokenizerBuilder
space := spaceRegexp
float := floatRegexp
dec := decRegexp
tags := `[^\[\]]*`
- b.AddRule(fmt.Sprintf(`\[(%s)\]%s(%s)\.(%s)>%s(.*)$`, float, space, dec, dec, space), func(args ...string) {
+ b.addRule(fmt.Sprintf(`\[(%s)\]%s(%s)\.(%s)>%s(.*)$`, float, space, dec, dec, space), func(args ...string) {
var hdr logHeader
var line InputLine
hdr.time = str2float(args[1])
@@ -119,7 +119,7 @@
line.msg = args[4]
out <- line
})
- b.AddRule(fmt.Sprintf(`\[(%s)\]\[(%s)\]\[(%s)\]\[(%s)\]%s(.*)$`, float, dec, dec, tags, space), func(args ...string) {
+ b.addRule(fmt.Sprintf(`\[(%s)\]\[(%s)\]\[(%s)\]\[(%s)\]%s(.*)$`, float, dec, dec, tags, space), func(args ...string) {
var hdr sysLogHeader
var line InputLine
hdr.time = str2float(args[1])
@@ -132,7 +132,7 @@
line.msg = args[5]
out <- line
})
- tokenizer, err := b.Compile(func(text string) {
+ tokenizer, err := b.compile(func(text string) {
var line InputLine
line.source = dummySource{}
line.msg = text
@@ -150,7 +150,7 @@
case <-ctx.Done():
return
default:
- tokenizer.Run(scanner.Text())
+ tokenizer.run(scanner.Text())
}
}
}()
diff --git a/symbolize/parserstate.go b/symbolize/parserstate.go
deleted file mode 100644
index 2dfd2e4..0000000
--- a/symbolize/parserstate.go
+++ /dev/null
@@ -1,102 +0,0 @@
-// Copyright 2018 The Fuchsia Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package symbolize
-
-import (
- "fmt"
- "reflect"
- "strconv"
- "strings"
-)
-
-type ParserState string
-type Parser func(*ParserState) interface{}
-
-func (b *ParserState) peek(what string) bool {
- if len(*b) < len(what) {
- return false
- }
- return (*b)[:len(what)] == ParserState(what)
-}
-
-func (b *ParserState) expect(what string) bool {
- if b.peek(what) {
- *b = (*b)[len(what):]
- return true
- }
- return false
-}
-
-func (b *ParserState) before(what string) (string, error) {
- idx := strings.Index(string(*b), what)
- if idx == -1 {
- return "", fmt.Errorf("expected '%s'", what)
- }
- str := (*b)[:idx]
- *b = (*b)[idx+len(what):]
- return string(str), nil
-}
-
-func (b *ParserState) decBefore(what string) (uint64, error) {
- out, err := b.before(what)
- if err != nil {
- return 0, err
- }
- return strconv.ParseUint(out, 10, 64)
-}
-
-func (b *ParserState) intBefore(what string) (uint64, error) {
- out, err := b.before(what)
- if err != nil {
- return 0, err
- }
- return strconv.ParseUint(out, 0, 64)
-}
-
-func (b *ParserState) floatBefore(what string) (float64, error) {
- out, err := b.before(what)
- if err != nil {
- return 0.0, err
- }
- return strconv.ParseFloat(out, 64)
-}
-
-func (b *ParserState) try(p Parser) interface{} {
- save := *b
- out := p(b)
- if out == nil {
- *b = save
- }
- return out
-}
-
-func (b *ParserState) prefix(prefix string, p Parser) interface{} {
- return b.try(func(b *ParserState) interface{} {
- if b.expect(prefix) {
- return p(b)
- }
- return nil
- })
-}
-
-func (b *ParserState) choice(toTry ...Parser) interface{} {
- for _, parser := range toTry {
- if node := parser(b); node != nil {
- return node
- }
- }
- return nil
-}
-
-func (b *ParserState) many(out interface{}, p Parser) {
- iout := reflect.ValueOf(out).Elem()
- for n := p(b); n != nil; n = p(b) {
- iout.Set(reflect.Append(iout, reflect.ValueOf(n)))
- }
-}
-
-func (b *ParserState) whitespace() {
- *b = ParserState(strings.TrimLeft(string(*b), " \t\n\r\v\f"))
-}
diff --git a/symbolize/parserstate_test.go b/symbolize/parserstate_test.go
deleted file mode 100644
index f1a8728..0000000
--- a/symbolize/parserstate_test.go
+++ /dev/null
@@ -1,146 +0,0 @@
-package symbolize
-
-import "testing"
-
-func TestPeek(t *testing.T) {
- buf := ParserState("this is a test")
- if !buf.peek("this") {
- t.Error("'this' is at the start")
- }
- if buf.peek("bob") {
- t.Error("'bob' is not at the start")
- }
- if buf.peek("thistle") {
- t.Error("'thistle' is not at the start")
- }
-}
-
-func TestExpect(t *testing.T) {
- buf := ParserState("this is a test")
- if !buf.expect("this") {
- t.Error("'this' is at the start")
- }
- if string(buf) != " is a test" {
- t.Error("expected ", " is a test", "got", string(buf))
- }
- if buf.expect("is") {
- t.Error("a space is missing")
- }
- if !buf.expect(" is") || !buf.expect(" a ") {
- t.Error("something is wrong")
- }
- if string(buf) != "test" {
- t.Error("expected", "test", "got", string(buf))
- }
- buf.expect("blarg")
- if string(buf) != "test" {
- t.Error("input consumed when it should not have been")
- }
-}
-
-func TestBefore(t *testing.T) {
- buf := ParserState("this is a test")
- v1, err := buf.before(" ")
- if err != nil {
- t.Error(err)
- }
- if v1 != "this" {
- t.Error("expected", "this", "got", v1)
- }
- v2, err := buf.before(" ")
- if err != nil {
- t.Error(err)
- }
- if v2 != "is" {
- t.Error("expected", "is", "got", v2)
- }
- if string(buf) != "a test" {
- t.Error("expected", "a test", "got", string(buf))
- }
- _, err = buf.before("#")
- if err == nil {
- t.Error("expected an error but got none")
- }
- if string(buf) != "a test" {
- t.Error("input consumed when it should not have been")
- }
-}
-
-func TestDecBefore(t *testing.T) {
- buf := ParserState("10:020:0030;")
- v1, err1 := buf.decBefore(":")
- v2, err2 := buf.decBefore(":")
- v3, err3 := buf.decBefore(";")
- if err1 != nil || err2 != nil || err3 != nil {
- t.Error(err1, err2, err3)
- }
- if v1 != 10 || v2 != 20 || v3 != 30 {
- t.Error("expected", []uint64{10, 20, 30}, "got", []uint64{v1, v2, v3})
- }
- if len(buf) != 0 {
- t.Error("not all input was consumed")
- }
-}
-
-func TestIntBefore(t *testing.T) {
- buf := ParserState("10:0x20:30;")
- v1, err1 := buf.intBefore(":")
- v2, err2 := buf.intBefore(":")
- v3, err3 := buf.intBefore(";")
- if err1 != nil || err2 != nil || err3 != nil {
- t.Error(err1, err2, err3)
- }
- if v1 != 10 || v2 != 0x20 || v3 != 30 {
- t.Error("expected", []uint64{10, 0x20, 30}, "got", []uint64{v1, v2, v3})
- }
- if len(buf) != 0 {
- t.Error("not all input was consumed")
- }
-}
-
-func TestFloatBefore(t *testing.T) {
- // Note these floats are exactly represented.
- buf := ParserState("0.125:.25:300.50;")
- v1, err1 := buf.floatBefore(":")
- v2, err2 := buf.floatBefore(":")
- v3, err3 := buf.floatBefore(";")
- if err1 != nil || err2 != nil || err3 != nil {
- t.Error(err1, err2, err3)
- }
- if v1 != 0.125 || v2 != 0.25 || v3 != 300.5 {
- t.Error("expected", []float64{0.125, 0.25, 300.5}, "got", []float64{v1, v2, v3})
- }
- if len(buf) != 0 {
- t.Error("not all input was consumed")
- }
-}
-
-func TestTry(t *testing.T) {
- buf := ParserState("foo bar baz")
- buf.try(func(buf *ParserState) interface{} {
- buf.expect("foo")
- return nil
- })
- if string(buf) != "foo bar baz" {
- t.Error("try should not consume input on failure")
- }
- node := buf.try(func(buf *ParserState) interface{} {
- buf.expect("foo ")
- buf.before(" ")
- buf.before("z")
- return "dummy string"
- })
- if node == nil || string(buf) != "" {
- t.Error("try did not succeed when it should have")
- }
-}
-
-func TestPrefix(t *testing.T) {
- buf := ParserState("foo bar baz")
- node := buf.prefix("baz", func(buf *ParserState) interface{} {
- return ""
- })
- if node != nil || buf != "foo bar baz" {
- t.Error("prefix messed up")
- }
-}
diff --git a/symbolize/presenter.go b/symbolize/presenter.go
index ccdc98e..aec8ddb 100644
--- a/symbolize/presenter.go
+++ b/symbolize/presenter.go
@@ -50,7 +50,7 @@
}
}
-func printSrcLoc(out io.Writer, loc SourceLocation, info AddressInfo) {
+func printSrcLoc(out io.Writer, loc SourceLocation, info addressInfo) {
modRelAddr := info.addr - info.seg.vaddr + info.seg.modRelAddr
if !loc.function.IsEmpty() {
fmt.Fprintf(out, "%s at ", loc.function)
diff --git a/symbolize/regextokenizer.go b/symbolize/regextokenizer.go
index f61aa01..be0ca21 100644
--- a/symbolize/regextokenizer.go
+++ b/symbolize/regextokenizer.go
@@ -21,7 +21,7 @@
// RegexpTokenizer allows for the splitting of input into tokens based on a list
// of regexs a la (f)lex.
-type RegexpTokenizer struct {
+type regexpTokenizer struct {
regexs []regexInfo
master *regexp.Regexp
defaultAction func(string)
@@ -33,20 +33,20 @@
}
// RegexpTokenizerBuilder is the means by which a RegexpTokenizer can be constructed.
-type RegexpTokenizerBuilder struct {
+type regexpTokenizerBuilder struct {
rules []rule
}
// TODO: Add a way to infer the automatic conversions that need to happen from
// a user supplied function's type via reflection.
// Rule adds a new regex to the builder
-func (r *RegexpTokenizerBuilder) AddRule(regex string, action actionFunc) {
+func (r *regexpTokenizerBuilder) addRule(regex string, action actionFunc) {
r.rules = append(r.rules, rule{regex, action})
}
// End compiles the list of regular expressions and actions into a RegexpTokenizer
-func (r *RegexpTokenizerBuilder) Compile(defaultAction func(string)) (*RegexpTokenizer, error) {
- out := RegexpTokenizer{defaultAction: defaultAction}
+func (r *regexpTokenizerBuilder) compile(defaultAction func(string)) (*regexpTokenizer, error) {
+ out := regexpTokenizer{defaultAction: defaultAction}
// Start groupIndex at 1 to account for the master regexp
groupIndex := 1
regexStrs := []string{}
@@ -72,7 +72,7 @@
}
// Run tokenizes 'input'
-func (r *RegexpTokenizer) Run(input string) {
+func (r *regexpTokenizer) run(input string) {
for len(input) > 0 {
locs := r.master.FindStringSubmatchIndex(input)
if locs == nil {
diff --git a/symbolize/regextokenizer_test.go b/symbolize/regextokenizer_test.go
index baba2b6..0c6dca2 100644
--- a/symbolize/regextokenizer_test.go
+++ b/symbolize/regextokenizer_test.go
@@ -11,24 +11,24 @@
)
func TestRegexTokenize(t *testing.T) {
- var builder RegexpTokenizerBuilder
+ var builder regexpTokenizerBuilder
var out bytes.Buffer
- builder.AddRule("{a+}", func(args ...string) {
+ builder.addRule("{a+}", func(args ...string) {
fmt.Fprintf(&out, "a+ case: %v\n", args)
})
- builder.AddRule("{b+}", func(args ...string) {
+ builder.addRule("{b+}", func(args ...string) {
fmt.Fprintf(&out, "b+ case: %v\n", args)
})
- builder.AddRule("{(x)(y)(z)}", func(args ...string) {
+ builder.addRule("{(x)(y)(z)}", func(args ...string) {
fmt.Fprintf(&out, "xyz case: %v\n", args)
})
- tokenizer, err := builder.Compile(func(str string) {
+ tokenizer, err := builder.compile(func(str string) {
fmt.Fprintf(&out, "default case: %s\n", str)
})
if err != nil {
t.Fatal(err)
}
- tokenizer.Run("blarg{a}foo{bbbbb}{xyz}{aa}{aa}baz[test]rest")
+ tokenizer.run("blarg{a}foo{bbbbb}{xyz}{aa}{aa}baz[test]rest")
expected := `default case: blarg
a+ case: [{a}]
default case: foo
diff --git a/symbolize/repo.go b/symbolize/repo.go
index 5415e6a..060db8c 100644
--- a/symbolize/repo.go
+++ b/symbolize/repo.go
@@ -18,13 +18,11 @@
// assumes that all binaries from a source will be files on the same system.
// Eventully we might relax this assumption.
type BinaryFileSource interface {
- // Name is deprecated. Do not use.
- Name() string
// Extracts the set of binaries from this source.
- GetBinaries() ([]elflib.BinaryFileRef, error)
+ getBinaries() ([]elflib.BinaryFileRef, error)
}
-// IDsSource is a BinaryFileSource parsed from ids.txt
+// idsSource is a BinaryFileSource parsed from ids.txt
type idsSource struct {
pathToIDs string
}
@@ -33,11 +31,11 @@
return &idsSource{pathToIDs}
}
-func (i *idsSource) Name() string {
+func (i *idsSource) name() string {
return i.pathToIDs
}
-func (i *idsSource) GetBinaries() ([]elflib.BinaryFileRef, error) {
+func (i *idsSource) getBinaries() ([]elflib.BinaryFileRef, error) {
file, err := os.Open(i.pathToIDs)
if err != nil {
return nil, err
@@ -74,7 +72,7 @@
}
func (s *SymbolizerRepo) loadSource(source BinaryFileSource) error {
- bins, err := source.GetBinaries()
+ bins, err := source.getBinaries()
if err != nil {
return err
}