Skip to content
This repository has been archived by the owner on Aug 13, 2019. It is now read-only.

Commit

Permalink
Fix some stuff I broke during rebase.
Browse files Browse the repository at this point in the history
Signed-off-by: Callum Styan <[email protected]>
  • Loading branch information
cstyan committed Jun 20, 2019
1 parent 1be0c97 commit ca248e3
Show file tree
Hide file tree
Showing 2 changed files with 22 additions and 35 deletions.
15 changes: 1 addition & 14 deletions head.go
Original file line number Diff line number Diff line change
Expand Up @@ -347,20 +347,11 @@ func (h *Head) loadWAL(r *wal.Reader, multiRef map[uint64]uint64) (err error) {
}

var (
<<<<<<< HEAD
dec RecordDecoder
series []RefSeries
samples []RefSample
tstones []Stone
allStones = newMemTombstones()
=======
dec record.RecordDecoder
series []record.RefSeries
samples []record.RefSample
tstones []tombstones.Stone
allStones = tombstones.NewMemTombstones()
err error
>>>>>>> Move tombstones to it's own package.
)
defer func() {
if err := allStones.Close(); err != nil {
Expand Down Expand Up @@ -474,15 +465,11 @@ func (h *Head) loadWAL(r *wal.Reader, multiRef map[uint64]uint64) (err error) {
}
wg.Wait()

<<<<<<< HEAD
if r.Err() != nil {
return errors.Wrap(r.Err(), "read records")
}

if err := allStones.Iter(func(ref uint64, dranges Intervals) error {
=======
if err := allStones.Iter(func(ref uint64, dranges tombstones.Intervals) error {
>>>>>>> Move tombstones to it's own package.
return h.chunkRewrite(ref, dranges)
}); err != nil {
return errors.Wrap(r.Err(), "deleting samples from tombstones")
Expand Down Expand Up @@ -1368,7 +1355,7 @@ func (h *Head) getOrCreateWithID(id, hash uint64, lset labels.Labels) (*record.M
return s, true
}

// seriesHashmap is a simple hashmap for memSeries by their label set. It is built
// seriesHashmap is a simple hashmap for MemSeries by their label set. It is built
// on top of a regular hashmap and holds a slice of series to resolve hash collisions.
// Its methods require the hash to be submitted with it to avoid re-computations throughout
// the code.
Expand Down
42 changes: 21 additions & 21 deletions head_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -102,28 +102,28 @@ func TestHead_ReadWAL(t *testing.T) {
for _, compress := range []bool{false, true} {
t.Run(fmt.Sprintf("compress=%t", compress), func(t *testing.T) {
entries := []interface{}{
[]RefSeries{
[]record.RefSeries{
{Ref: 10, Labels: labels.FromStrings("a", "1")},
{Ref: 11, Labels: labels.FromStrings("a", "2")},
{Ref: 100, Labels: labels.FromStrings("a", "3")},
},
[]RefSample{
[]record.RefSample{
{Ref: 0, T: 99, V: 1},
{Ref: 10, T: 100, V: 2},
{Ref: 100, T: 100, V: 3},
},
[]RefSeries{
[]record.RefSeries{
{Ref: 50, Labels: labels.FromStrings("a", "4")},
// This series has two refs pointing to it.
{Ref: 101, Labels: labels.FromStrings("a", "3")},
},
[]RefSample{
[]record.RefSample{
{Ref: 10, T: 101, V: 5},
{Ref: 50, T: 101, V: 6},
{Ref: 101, T: 101, V: 7},
},
[]Stone{
{ref: 0, intervals: []Interval{{Mint: 99, Maxt: 101}}},
[]tombstones.Stone{
{Ref: 0, Intervals: []tombstones.Interval{{Mint: 99, Maxt: 101}}},
},
}
dir, err := ioutil.TempDir("", "test_read_wal")
Expand All @@ -148,10 +148,10 @@ func TestHead_ReadWAL(t *testing.T) {
s50 := head.series.getByID(50)
s100 := head.series.getByID(100)

testutil.Equals(t, labels.FromStrings("a", "1"), s10.lset)
testutil.Equals(t, (*memSeries)(nil), s11) // Series without samples should be garbage colected at head.Init().
testutil.Equals(t, labels.FromStrings("a", "4"), s50.lset)
testutil.Equals(t, labels.FromStrings("a", "3"), s100.lset)
testutil.Equals(t, labels.FromStrings("a", "1"), s10.Lset)
testutil.Equals(t, (*record.MemSeries)(nil), s11) // Series without samples should be garbage colected at head.Init().
testutil.Equals(t, labels.FromStrings("a", "4"), s50.Lset)
testutil.Equals(t, labels.FromStrings("a", "3"), s100.Lset)

expandChunk := func(c chunkenc.Iterator) (x []sample) {
for c.Next() {
Expand All @@ -161,9 +161,9 @@ func TestHead_ReadWAL(t *testing.T) {
testutil.Ok(t, c.Err())
return x
}
testutil.Equals(t, []sample{{100, 2}, {101, 5}}, expandChunk(s10.iterator(0)))
testutil.Equals(t, []sample{{101, 6}}, expandChunk(s50.iterator(0)))
testutil.Equals(t, []sample{{100, 3}, {101, 7}}, expandChunk(s100.iterator(0)))
testutil.Equals(t, []sample{{100, 2}, {101, 5}}, expandChunk(s10.Iterator(0)))
testutil.Equals(t, []sample{{101, 6}}, expandChunk(s50.Iterator(0)))
testutil.Equals(t, []sample{{100, 3}, {101, 7}}, expandChunk(s100.Iterator(0)))
})
}
}
Expand Down Expand Up @@ -328,14 +328,14 @@ func TestHeadDeleteSeriesWithoutSamples(t *testing.T) {
for _, compress := range []bool{false, true} {
t.Run(fmt.Sprintf("compress=%t", compress), func(t *testing.T) {
entries := []interface{}{
[]RefSeries{
[]record.RefSeries{
{Ref: 10, Labels: labels.FromStrings("a", "1")},
},
[]RefSample{},
[]RefSeries{
[]record.RefSample{},
[]record.RefSeries{
{Ref: 50, Labels: labels.FromStrings("a", "2")},
},
[]RefSample{
[]record.RefSample{
{Ref: 50, T: 80, V: 1},
{Ref: 50, T: 90, V: 1},
},
Expand Down Expand Up @@ -1056,17 +1056,17 @@ func TestHead_LogRollback(t *testing.T) {

testutil.Equals(t, 1, len(recs))

series, ok := recs[0].([]RefSeries)
series, ok := recs[0].([]record.RefSeries)
testutil.Assert(t, ok, "expected series record but got %+v", recs[0])
testutil.Equals(t, []RefSeries{{Ref: 1, Labels: labels.FromStrings("a", "b")}}, series)
testutil.Equals(t, []record.RefSeries{{Ref: 1, Labels: labels.FromStrings("a", "b")}}, series)
})
}
}

// TestWalRepair_DecodingError ensures that a repair is run for an error
// when decoding a record.
func TestWalRepair_DecodingError(t *testing.T) {
var enc RecordEncoder
var enc record.RecordEncoder
for name, test := range map[string]struct {
corrFunc func(rec []byte) []byte // Func that applies the corruption to a record.
rec []byte
Expand All @@ -1078,7 +1078,7 @@ func TestWalRepair_DecodingError(t *testing.T) {
// Do not modify the base record because it is Logged multiple times.
res := make([]byte, len(rec))
copy(res, rec)
res[0] = byte(RecordInvalid)
res[0] = byte(record.RecordInvalid)
return res
},
enc.Series([]record.RefSeries{{Ref: 1, Labels: labels.FromStrings("a", "b")}}, []byte{}),
Expand Down

0 comments on commit ca248e3

Please sign in to comment.