Browse Source

Merge branch 'release/0.4.1'

pull/1842/head
Anton Kaliaev 7 years ago
parent
commit
b854baa1fc
No known key found for this signature in database GPG Key ID: 7B6881D965918214
15 changed files with 267 additions and 201 deletions
  1. +19
    -0
      .editorconfig
  2. +12
    -0
      CHANGELOG.md
  3. +4
    -6
      autofile/group.go
  4. +101
    -157
      autofile/group_test.go
  5. +18
    -26
      cli/setup_test.go
  6. +1
    -1
      clist/clist_test.go
  7. +11
    -0
      common/cmap.go
  8. +53
    -0
      common/cmap_test.go
  9. +8
    -3
      common/os.go
  10. +3
    -0
      db/mem_db.go
  11. +12
    -3
      log/tm_logger.go
  12. +14
    -0
      log/tm_logger_test.go
  13. +6
    -2
      log/tmfmt_logger.go
  14. +4
    -2
      log/tmfmt_logger_test.go
  15. +1
    -1
      version/version.go

+ 19
- 0
.editorconfig View File

@ -0,0 +1,19 @@
# top-most EditorConfig file
root = true
# Unix-style newlines with a newline ending every file
[*]
charset = utf-8
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
[Makefile]
indent_style = tab
[*.sh]
indent_style = tab
[*.proto]
indent_style = space
indent_size = 2

+ 12
- 0
CHANGELOG.md View File

@ -1,5 +1,17 @@
# Changelog
## 0.4.1 (November 27, 2017)
FEATURES:
- [common] `Keys()` method on `CMap`
IMPROVEMENTS:
- [log] complex types now encoded as "%+v" by default if `String()` method is undefined (previously resulted in error)
- [log] logger logs its own errors
BUG FIXES:
- [common] fixed `Kill()` to build on Windows (Windows does not have `syscall.Kill`)
## 0.4.0 (October 26, 2017)
BREAKING:


+ 4
- 6
autofile/group.go View File

@ -596,14 +596,12 @@ func (gr *GroupReader) Read(p []byte) (n int, err error) {
nn, err = gr.curReader.Read(p[n:])
n += nn
if err == io.EOF {
// Open the next file
if err1 := gr.openFile(gr.curIndex + 1); err1 != nil {
return n, err1
}
if n >= lenP {
return n, nil
} else {
continue
} else { // Open the next file
if err1 := gr.openFile(gr.curIndex + 1); err1 != nil {
return n, err1
}
}
} else if err != nil {
return n, err


+ 101
- 157
autofile/group_test.go View File

@ -1,8 +1,8 @@
package autofile
import (
"bytes"
"errors"
"fmt"
"io"
"io/ioutil"
"os"
@ -10,51 +10,37 @@ import (
"strings"
"testing"
. "github.com/tendermint/tmlibs/common"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
cmn "github.com/tendermint/tmlibs/common"
)
// NOTE: Returned group has ticker stopped
func createTestGroup(t *testing.T, headSizeLimit int64) *Group {
testID := RandStr(12)
testID := cmn.RandStr(12)
testDir := "_test_" + testID
err := EnsureDir(testDir, 0700)
if err != nil {
t.Fatal("Error creating dir", err)
}
err := cmn.EnsureDir(testDir, 0700)
require.NoError(t, err, "Error creating dir")
headPath := testDir + "/myfile"
g, err := OpenGroup(headPath)
if err != nil {
t.Fatal("Error opening Group", err)
}
require.NoError(t, err, "Error opening Group")
g.SetHeadSizeLimit(headSizeLimit)
g.stopTicker()
if g == nil {
t.Fatal("Failed to create Group")
}
require.NotEqual(t, nil, g, "Failed to create Group")
return g
}
func destroyTestGroup(t *testing.T, g *Group) {
err := os.RemoveAll(g.Dir)
if err != nil {
t.Fatal("Error removing test Group directory", err)
}
require.NoError(t, err, "Error removing test Group directory")
}
func assertGroupInfo(t *testing.T, gInfo GroupInfo, minIndex, maxIndex int, totalSize, headSize int64) {
if gInfo.MinIndex != minIndex {
t.Errorf("GroupInfo MinIndex expected %v, got %v", minIndex, gInfo.MinIndex)
}
if gInfo.MaxIndex != maxIndex {
t.Errorf("GroupInfo MaxIndex expected %v, got %v", maxIndex, gInfo.MaxIndex)
}
if gInfo.TotalSize != totalSize {
t.Errorf("GroupInfo TotalSize expected %v, got %v", totalSize, gInfo.TotalSize)
}
if gInfo.HeadSize != headSize {
t.Errorf("GroupInfo HeadSize expected %v, got %v", headSize, gInfo.HeadSize)
}
assert.Equal(t, minIndex, gInfo.MinIndex)
assert.Equal(t, maxIndex, gInfo.MaxIndex)
assert.Equal(t, totalSize, gInfo.TotalSize)
assert.Equal(t, headSize, gInfo.HeadSize)
}
func TestCheckHeadSizeLimit(t *testing.T) {
@ -65,10 +51,8 @@ func TestCheckHeadSizeLimit(t *testing.T) {
// Write 1000 bytes 999 times.
for i := 0; i < 999; i++ {
err := g.WriteLine(RandStr(999))
if err != nil {
t.Fatal("Error appending to head", err)
}
err := g.WriteLine(cmn.RandStr(999))
require.NoError(t, err, "Error appending to head")
}
g.Flush()
assertGroupInfo(t, g.ReadGroupInfo(), 0, 0, 999000, 999000)
@ -78,9 +62,8 @@ func TestCheckHeadSizeLimit(t *testing.T) {
assertGroupInfo(t, g.ReadGroupInfo(), 0, 0, 999000, 999000)
// Write 1000 more bytes.
if err := g.WriteLine(RandStr(999)); err != nil {
t.Fatal("Error appending to head", err)
}
err := g.WriteLine(cmn.RandStr(999))
require.NoError(t, err, "Error appending to head")
g.Flush()
// Calling checkHeadSizeLimit this time rolls it.
@ -88,9 +71,8 @@ func TestCheckHeadSizeLimit(t *testing.T) {
assertGroupInfo(t, g.ReadGroupInfo(), 0, 1, 1000000, 0)
// Write 1000 more bytes.
if err := g.WriteLine(RandStr(999)); err != nil {
t.Fatal("Error appending to head", err)
}
err = g.WriteLine(cmn.RandStr(999))
require.NoError(t, err, "Error appending to head")
g.Flush()
// Calling checkHeadSizeLimit does nothing.
@ -99,9 +81,8 @@ func TestCheckHeadSizeLimit(t *testing.T) {
// Write 1000 bytes 999 times.
for i := 0; i < 999; i++ {
if err := g.WriteLine(RandStr(999)); err != nil {
t.Fatal("Error appending to head", err)
}
err = g.WriteLine(cmn.RandStr(999))
require.NoError(t, err, "Error appending to head")
}
g.Flush()
assertGroupInfo(t, g.ReadGroupInfo(), 0, 1, 2000000, 1000000)
@ -111,10 +92,8 @@ func TestCheckHeadSizeLimit(t *testing.T) {
assertGroupInfo(t, g.ReadGroupInfo(), 0, 2, 2000000, 0)
// Write 1000 more bytes.
_, err := g.Head.Write([]byte(RandStr(999) + "\n"))
if err != nil {
t.Fatal("Error appending to head", err)
}
_, err = g.Head.Write([]byte(cmn.RandStr(999) + "\n"))
require.NoError(t, err, "Error appending to head")
g.Flush()
assertGroupInfo(t, g.ReadGroupInfo(), 0, 2, 2001000, 1000)
@ -134,16 +113,12 @@ func TestSearch(t *testing.T) {
for i := 0; i < 100; i++ {
// The random junk at the end ensures that this INFO linen
// is equally likely to show up at the end.
_, err := g.Head.Write([]byte(Fmt("INFO %v %v\n", i, RandStr(123))))
if err != nil {
t.Error("Failed to write to head")
}
_, err := g.Head.Write([]byte(fmt.Sprintf("INFO %v %v\n", i, cmn.RandStr(123))))
require.NoError(t, err, "Failed to write to head")
g.checkHeadSizeLimit()
for j := 0; j < 10; j++ {
_, err := g.Head.Write([]byte(RandStr(123) + "\n"))
if err != nil {
t.Error("Failed to write to head")
}
_, err1 := g.Head.Write([]byte(cmn.RandStr(123) + "\n"))
require.NoError(t, err1, "Failed to write to head")
g.checkHeadSizeLimit()
}
}
@ -173,17 +148,11 @@ func TestSearch(t *testing.T) {
for i := 0; i < 100; i++ {
t.Log("Testing for i", i)
gr, match, err := g.Search("INFO", makeSearchFunc(i))
if err != nil {
t.Fatal("Failed to search for line:", err)
}
if !match {
t.Error("Expected Search to return exact match")
}
require.NoError(t, err, "Failed to search for line")
assert.True(t, match, "Expected Search to return exact match")
line, err := gr.ReadLine()
if err != nil {
t.Fatal("Failed to read line after search", err)
}
if !strings.HasPrefix(line, Fmt("INFO %v ", i)) {
require.NoError(t, err, "Failed to read line after search")
if !strings.HasPrefix(line, fmt.Sprintf("INFO %v ", i)) {
t.Fatal("Failed to get correct line")
}
// Make sure we can continue to read from there.
@ -203,7 +172,7 @@ func TestSearch(t *testing.T) {
if !strings.HasPrefix(line, "INFO ") {
continue
}
if !strings.HasPrefix(line, Fmt("INFO %v ", cur)) {
if !strings.HasPrefix(line, fmt.Sprintf("INFO %v ", cur)) {
t.Fatalf("Unexpected INFO #. Expected %v got:\n%v", cur, line)
}
cur += 1
@ -215,35 +184,23 @@ func TestSearch(t *testing.T) {
// We should get the first available line.
{
gr, match, err := g.Search("INFO", makeSearchFunc(-999))
if err != nil {
t.Fatal("Failed to search for line:", err)
}
if match {
t.Error("Expected Search to not return exact match")
}
require.NoError(t, err, "Failed to search for line")
assert.False(t, match, "Expected Search to not return exact match")
line, err := gr.ReadLine()
if err != nil {
t.Fatal("Failed to read line after search", err)
}
require.NoError(t, err, "Failed to read line after search")
if !strings.HasPrefix(line, "INFO 0 ") {
t.Error("Failed to fetch correct line, which is the earliest INFO")
}
err = gr.Close()
if err != nil {
t.Error("Failed to close GroupReader", err)
}
require.NoError(t, err, "Failed to close GroupReader")
}
// Now search for something that is too large.
// We should get an EOF error.
{
gr, _, err := g.Search("INFO", makeSearchFunc(999))
if err != io.EOF {
t.Error("Expected to get an EOF error")
}
if gr != nil {
t.Error("Expected to get nil GroupReader")
}
assert.Equal(t, io.EOF, err)
assert.Nil(t, gr)
}
// Cleanup
@ -264,18 +221,14 @@ func TestRotateFile(t *testing.T) {
// Read g.Head.Path+"000"
body1, err := ioutil.ReadFile(g.Head.Path + ".000")
if err != nil {
t.Error("Failed to read first rolled file")
}
assert.NoError(t, err, "Failed to read first rolled file")
if string(body1) != "Line 1\nLine 2\nLine 3\n" {
t.Errorf("Got unexpected contents: [%v]", string(body1))
}
// Read g.Head.Path
body2, err := ioutil.ReadFile(g.Head.Path)
if err != nil {
t.Error("Failed to read first rolled file")
}
assert.NoError(t, err, "Failed to read first rolled file")
if string(body2) != "Line 4\nLine 5\nLine 6\n" {
t.Errorf("Got unexpected contents: [%v]", string(body2))
}
@ -300,15 +253,9 @@ func TestFindLast1(t *testing.T) {
g.Flush()
match, found, err := g.FindLast("#")
if err != nil {
t.Error("Unexpected error", err)
}
if !found {
t.Error("Expected found=True")
}
if match != "# b" {
t.Errorf("Unexpected match: [%v]", match)
}
assert.NoError(t, err)
assert.True(t, found)
assert.Equal(t, "# b", match)
// Cleanup
destroyTestGroup(t, g)
@ -330,15 +277,9 @@ func TestFindLast2(t *testing.T) {
g.Flush()
match, found, err := g.FindLast("#")
if err != nil {
t.Error("Unexpected error", err)
}
if !found {
t.Error("Expected found=True")
}
if match != "# b" {
t.Errorf("Unexpected match: [%v]", match)
}
assert.NoError(t, err)
assert.True(t, found)
assert.Equal(t, "# b", match)
// Cleanup
destroyTestGroup(t, g)
@ -360,15 +301,9 @@ func TestFindLast3(t *testing.T) {
g.Flush()
match, found, err := g.FindLast("#")
if err != nil {
t.Error("Unexpected error", err)
}
if !found {
t.Error("Expected found=True")
}
if match != "# b" {
t.Errorf("Unexpected match: [%v]", match)
}
assert.NoError(t, err)
assert.True(t, found)
assert.Equal(t, "# b", match)
// Cleanup
destroyTestGroup(t, g)
@ -388,15 +323,9 @@ func TestFindLast4(t *testing.T) {
g.Flush()
match, found, err := g.FindLast("#")
if err != nil {
t.Error("Unexpected error", err)
}
if found {
t.Error("Expected found=False")
}
if match != "" {
t.Errorf("Unexpected match: [%v]", match)
}
assert.NoError(t, err)
assert.False(t, found)
assert.Empty(t, match)
// Cleanup
destroyTestGroup(t, g)
@ -411,22 +340,18 @@ func TestWrite(t *testing.T) {
read := make([]byte, len(written))
gr, err := g.NewReader(0)
if err != nil {
t.Fatalf("Failed to create reader: %v", err)
}
_, err = gr.Read(read)
if err != nil {
t.Fatalf("Failed to read data: %v", err)
}
require.NoError(t, err, "failed to create reader")
if !bytes.Equal(written, read) {
t.Errorf("%s, %s should be equal", string(written), string(read))
}
_, err = gr.Read(read)
assert.NoError(t, err, "failed to read data")
assert.Equal(t, written, read)
// Cleanup
destroyTestGroup(t, g)
}
// test that Read reads the required amount of bytes from all the files in the
// group and returns no error if n == size of the given slice.
func TestGroupReaderRead(t *testing.T) {
g := createTestGroup(t, 0)
@ -441,22 +366,47 @@ func TestGroupReaderRead(t *testing.T) {
totalWrittenLength := len(professor) + len(frankenstein)
read := make([]byte, totalWrittenLength)
gr, err := g.NewReader(0)
if err != nil {
t.Fatalf("Failed to create reader: %v", err)
}
n, err := gr.Read(read)
if err != nil {
t.Fatalf("Failed to read data: %v", err)
}
if n != totalWrittenLength {
t.Errorf("Failed to read enough bytes: wanted %d, but read %d", totalWrittenLength, n)
}
require.NoError(t, err, "failed to create reader")
n, err := gr.Read(read)
assert.NoError(t, err, "failed to read data")
assert.Equal(t, totalWrittenLength, n, "not enough bytes read")
professorPlusFrankenstein := professor
professorPlusFrankenstein = append(professorPlusFrankenstein, frankenstein...)
if !bytes.Equal(read, professorPlusFrankenstein) {
t.Errorf("%s, %s should be equal", string(professorPlusFrankenstein), string(read))
}
assert.Equal(t, professorPlusFrankenstein, read)
// Cleanup
destroyTestGroup(t, g)
}
// test that Read returns an error if number of bytes read < size of
// the given slice. Subsequent call should return 0, io.EOF.
func TestGroupReaderRead2(t *testing.T) {
g := createTestGroup(t, 0)
professor := []byte("Professor Monster")
g.Write(professor)
g.Flush()
g.RotateFile()
frankenstein := []byte("Frankenstein's Monster")
frankensteinPart := []byte("Frankenstein")
g.Write(frankensteinPart) // note writing only a part
g.Flush()
totalLength := len(professor) + len(frankenstein)
read := make([]byte, totalLength)
gr, err := g.NewReader(0)
require.NoError(t, err, "failed to create reader")
// 1) n < (size of the given slice), io.EOF
n, err := gr.Read(read)
assert.Equal(t, io.EOF, err)
assert.Equal(t, len(professor)+len(frankensteinPart), n, "Read more/less bytes than it is in the group")
// 2) 0, io.EOF
n, err = gr.Read([]byte("0"))
assert.Equal(t, io.EOF, err)
assert.Equal(t, 0, n)
// Cleanup
destroyTestGroup(t, g)
@ -465,9 +415,7 @@ func TestGroupReaderRead(t *testing.T) {
func TestMinIndex(t *testing.T) {
g := createTestGroup(t, 0)
if g.MinIndex() != 0 {
t.Error("MinIndex should be zero at the beginning")
}
assert.Zero(t, g.MinIndex(), "MinIndex should be zero at the beginning")
// Cleanup
destroyTestGroup(t, g)
@ -476,17 +424,13 @@ func TestMinIndex(t *testing.T) {
func TestMaxIndex(t *testing.T) {
g := createTestGroup(t, 0)
if g.MaxIndex() != 0 {
t.Error("MaxIndex should be zero at the beginning")
}
assert.Zero(t, g.MaxIndex(), "MaxIndex should be zero at the beginning")
g.WriteLine("Line 1")
g.Flush()
g.RotateFile()
if g.MaxIndex() != 1 {
t.Error("MaxIndex should point to the last file")
}
assert.Equal(t, 1, g.MaxIndex(), "MaxIndex should point to the last file")
// Cleanup
destroyTestGroup(t, g)


+ 18
- 26
cli/setup_test.go View File

@ -14,8 +14,6 @@ import (
)
func TestSetupEnv(t *testing.T) {
assert, require := assert.New(t), require.New(t)
cases := []struct {
args []string
env map[string]string
@ -51,22 +49,20 @@ func TestSetupEnv(t *testing.T) {
viper.Reset()
args := append([]string{cmd.Use}, tc.args...)
err := RunWithArgs(cmd, args, tc.env)
require.Nil(err, i)
assert.Equal(tc.expected, foo, i)
require.Nil(t, err, i)
assert.Equal(t, tc.expected, foo, i)
}
}
func TestSetupConfig(t *testing.T) {
assert, require := assert.New(t), require.New(t)
// we pre-create two config files we can refer to in the rest of
// the test cases.
cval1, cval2 := "fubble", "wubble"
conf1, err := WriteDemoConfig(map[string]string{"boo": cval1})
require.Nil(err)
require.Nil(t, err)
// make sure it handles dashed-words in the config, and ignores random info
conf2, err := WriteDemoConfig(map[string]string{"boo": cval2, "foo": "bar", "two-words": "WORD"})
require.Nil(err)
require.Nil(t, err)
cases := []struct {
args []string
@ -110,9 +106,9 @@ func TestSetupConfig(t *testing.T) {
viper.Reset()
args := append([]string{cmd.Use}, tc.args...)
err := RunWithArgs(cmd, args, tc.env)
require.Nil(err, i)
assert.Equal(tc.expected, foo, i)
assert.Equal(tc.expectedTwo, two, i)
require.Nil(t, err, i)
assert.Equal(t, tc.expected, foo, i)
assert.Equal(t, tc.expectedTwo, two, i)
}
}
@ -123,16 +119,14 @@ type DemoConfig struct {
}
func TestSetupUnmarshal(t *testing.T) {
assert, require := assert.New(t), require.New(t)
// we pre-create two config files we can refer to in the rest of
// the test cases.
cval1, cval2 := "someone", "else"
conf1, err := WriteDemoConfig(map[string]string{"name": cval1})
require.Nil(err)
require.Nil(t, err)
// even with some ignored fields, should be no problem
conf2, err := WriteDemoConfig(map[string]string{"name": cval2, "foo": "bar"})
require.Nil(err)
require.Nil(t, err)
// unused is not declared on a flag and remains from base
base := DemoConfig{
@ -189,14 +183,12 @@ func TestSetupUnmarshal(t *testing.T) {
viper.Reset()
args := append([]string{cmd.Use}, tc.args...)
err := RunWithArgs(cmd, args, tc.env)
require.Nil(err, i)
assert.Equal(tc.expected, cfg, i)
require.Nil(t, err, i)
assert.Equal(t, tc.expected, cfg, i)
}
}
func TestSetupTrace(t *testing.T) {
assert, require := assert.New(t), require.New(t)
cases := []struct {
args []string
env map[string]string
@ -224,16 +216,16 @@ func TestSetupTrace(t *testing.T) {
viper.Reset()
args := append([]string{cmd.Use}, tc.args...)
stdout, stderr, err := RunCaptureWithArgs(cmd, args, tc.env)
require.NotNil(err, i)
require.Equal("", stdout, i)
require.NotEqual("", stderr, i)
require.NotNil(t, err, i)
require.Equal(t, "", stdout, i)
require.NotEqual(t, "", stderr, i)
msg := strings.Split(stderr, "\n")
desired := fmt.Sprintf("ERROR: %s", tc.expected)
assert.Equal(desired, msg[0], i)
if tc.long && assert.True(len(msg) > 2, i) {
assert.Equal(t, desired, msg[0], i)
if tc.long && assert.True(t, len(msg) > 2, i) {
// the next line starts the stack trace...
assert.Contains(msg[1], "TestSetupTrace", i)
assert.Contains(msg[2], "setup_test.go", i)
assert.Contains(t, msg[1], "TestSetupTrace", i)
assert.Contains(t, msg[2], "setup_test.go", i)
}
}
}

+ 1
- 1
clist/clist_test.go View File

@ -149,7 +149,7 @@ func _TestGCRandom(t *testing.T) {
func TestScanRightDeleteRandom(t *testing.T) {
const numElements = 10000
const numTimes = 100000
const numTimes = 1000
const numScanners = 10
l := New()


+ 11
- 0
common/cmap.go View File

@ -51,6 +51,17 @@ func (cm *CMap) Clear() {
cm.m = make(map[string]interface{})
}
func (cm *CMap) Keys() []string {
cm.l.Lock()
defer cm.l.Unlock()
keys := []string{}
for k := range cm.m {
keys = append(keys, k)
}
return keys
}
func (cm *CMap) Values() []interface{} {
cm.l.Lock()
defer cm.l.Unlock()


+ 53
- 0
common/cmap_test.go View File

@ -0,0 +1,53 @@
package common
import (
"fmt"
"strings"
"testing"
"github.com/stretchr/testify/assert"
)
func TestIterateKeysWithValues(t *testing.T) {
cmap := NewCMap()
for i := 1; i <= 10; i++ {
cmap.Set(fmt.Sprintf("key%d", i), fmt.Sprintf("value%d", i))
}
// Testing size
assert.Equal(t, 10, cmap.Size())
assert.Equal(t, 10, len(cmap.Keys()))
assert.Equal(t, 10, len(cmap.Values()))
// Iterating Keys, checking for matching Value
for _, key := range cmap.Keys() {
val := strings.Replace(key, "key", "value", -1)
assert.Equal(t, val, cmap.Get(key))
}
// Test if all keys are within []Keys()
keys := cmap.Keys()
for i := 1; i <= 10; i++ {
assert.Contains(t, keys, fmt.Sprintf("key%d", i), "cmap.Keys() should contain key")
}
// Delete 1 Key
cmap.Delete("key1")
assert.NotEqual(t, len(keys), len(cmap.Keys()), "[]keys and []Keys() should not be equal, they are copies, one item was removed")
}
func TestContains(t *testing.T) {
cmap := NewCMap()
cmap.Set("key1", "value1")
// Test for known values
assert.True(t, cmap.Has("key1"))
assert.Equal(t, "value1", cmap.Get("key1"))
// Test for unknown values
assert.False(t, cmap.Has("key2"))
assert.Nil(t, cmap.Get("key2"))
}

+ 8
- 3
common/os.go View File

@ -35,6 +35,8 @@ func GoPath() string {
return path
}
// TrapSignal catches the SIGTERM and executes cb function. After that it exits
// with code 1.
func TrapSignal(cb func()) {
c := make(chan os.Signal, 1)
signal.Notify(c, os.Interrupt, syscall.SIGTERM)
@ -50,10 +52,13 @@ func TrapSignal(cb func()) {
select {}
}
// Kill the running process by sending itself SIGTERM
// Kill the running process by sending itself SIGTERM.
func Kill() error {
pid := os.Getpid()
return syscall.Kill(pid, syscall.SIGTERM)
p, err := os.FindProcess(os.Getpid())
if err != nil {
return err
}
return p.Signal(syscall.SIGTERM)
}
func Exit(s string) {


+ 3
- 0
db/mem_db.go View File

@ -2,6 +2,7 @@ package db
import (
"fmt"
"sort"
"strings"
"sync"
)
@ -127,6 +128,8 @@ func (db *MemDB) IteratorPrefix(prefix []byte) Iterator {
it.keys = append(it.keys, key)
}
}
// and we need to sort them
sort.Strings(it.keys)
return it
}


+ 12
- 3
log/tm_logger.go View File

@ -52,19 +52,28 @@ func NewTMLoggerWithColorFn(w io.Writer, colorFn func(keyvals ...interface{}) te
// Info logs a message at level Info.
func (l *tmLogger) Info(msg string, keyvals ...interface{}) {
lWithLevel := kitlevel.Info(l.srcLogger)
kitlog.With(lWithLevel, msgKey, msg).Log(keyvals...)
if err := kitlog.With(lWithLevel, msgKey, msg).Log(keyvals...); err != nil {
errLogger := kitlevel.Error(l.srcLogger)
kitlog.With(errLogger, msgKey, msg).Log("err", err)
}
}
// Debug logs a message at level Debug.
func (l *tmLogger) Debug(msg string, keyvals ...interface{}) {
lWithLevel := kitlevel.Debug(l.srcLogger)
kitlog.With(lWithLevel, msgKey, msg).Log(keyvals...)
if err := kitlog.With(lWithLevel, msgKey, msg).Log(keyvals...); err != nil {
errLogger := kitlevel.Error(l.srcLogger)
kitlog.With(errLogger, msgKey, msg).Log("err", err)
}
}
// Error logs a message at level Error.
func (l *tmLogger) Error(msg string, keyvals ...interface{}) {
lWithLevel := kitlevel.Error(l.srcLogger)
kitlog.With(lWithLevel, msgKey, msg).Log(keyvals...)
lWithMsg := kitlog.With(lWithLevel, msgKey, msg)
if err := lWithMsg.Log(keyvals...); err != nil {
lWithMsg.Log("err", err)
}
}
// With returns a new contextual logger with keyvals prepended to those passed


+ 14
- 0
log/tm_logger_test.go View File

@ -1,12 +1,26 @@
package log_test
import (
"bytes"
"io/ioutil"
"strings"
"testing"
"github.com/go-logfmt/logfmt"
"github.com/tendermint/tmlibs/log"
)
func TestLoggerLogsItsErrors(t *testing.T) {
var buf bytes.Buffer
logger := log.NewTMLogger(&buf)
logger.Info("foo", "baz baz", "bar")
msg := strings.TrimSpace(buf.String())
if !strings.Contains(msg, logfmt.ErrInvalidKey.Error()) {
t.Errorf("Expected logger msg to contain ErrInvalidKey, got %s", msg)
}
}
func BenchmarkTMLoggerSimple(b *testing.B) {
benchmarkRunner(b, log.NewTMLogger(ioutil.Discard), baseInfoMessage)
}


+ 6
- 2
log/tmfmt_logger.go View File

@ -35,7 +35,8 @@ type tmfmtLogger struct {
}
// NewTMFmtLogger returns a logger that encodes keyvals to the Writer in
// Tendermint custom format.
// Tendermint custom format. Note complex types (structs, maps, slices)
// formatted as "%+v".
//
// Each log event produces no more than one call to w.Write.
// The passed Writer must be safe for concurrent use by multiple goroutines if
@ -103,7 +104,10 @@ KeyvalueLoop:
}
}
if err := enc.EncodeKeyval(keyvals[i], keyvals[i+1]); err != nil {
err := enc.EncodeKeyval(keyvals[i], keyvals[i+1])
if err == logfmt.ErrUnsupportedValueType {
enc.EncodeKeyval(keyvals[i], fmt.Sprintf("%+v", keyvals[i+1]))
} else if err != nil {
return err
}
}


+ 4
- 2
log/tmfmt_logger_test.go View File

@ -30,8 +30,10 @@ func TestTMFmtLogger(t *testing.T) {
assert.Regexp(t, regexp.MustCompile(`N\[.+\] unknown \s+ a=1 err=error\n$`), buf.String())
buf.Reset()
err := logger.Log("std_map", map[int]int{1: 2}, "my_map", mymap{0: 0})
assert.NotNil(t, err)
if err := logger.Log("std_map", map[int]int{1: 2}, "my_map", mymap{0: 0}); err != nil {
t.Fatal(err)
}
assert.Regexp(t, regexp.MustCompile(`N\[.+\] unknown \s+ std_map=map\[1:2\] my_map=special_behavior\n$`), buf.String())
buf.Reset()
if err := logger.Log("level", "error"); err != nil {


+ 1
- 1
version/version.go View File

@ -1,3 +1,3 @@
package version
const Version = "0.4.0"
const Version = "0.4.1"

Loading…
Cancel
Save