+53
-49
plumbing/format/packfile/delta_selector_test.go
+53
-49
plumbing/format/packfile/delta_selector_test.go
···
1
1
package packfile
2
2
3
3
import (
4
+
"testing"
5
+
4
6
"github.com/go-git/go-git/v5/plumbing"
5
7
"github.com/go-git/go-git/v5/storage/memory"
6
-
7
-
. "gopkg.in/check.v1"
8
+
"github.com/stretchr/testify/suite"
8
9
)
9
10
10
11
type DeltaSelectorSuite struct {
12
+
suite.Suite
11
13
ds *deltaSelector
12
14
store *memory.Storage
13
15
hashes map[string]plumbing.Hash
14
16
}
15
17
16
-
var _ = Suite(&DeltaSelectorSuite{})
18
+
func TestDeltaSelectorSuite(t *testing.T) {
19
+
suite.Run(t, new(DeltaSelectorSuite))
20
+
}
17
21
18
-
func (s *DeltaSelectorSuite) SetUpTest(c *C) {
22
+
func (s *DeltaSelectorSuite) SetupTest() {
19
23
s.store = memory.NewStorage()
20
24
s.createTestObjects()
21
25
s.ds = newDeltaSelector(s.store)
22
26
}
23
27
24
-
func (s *DeltaSelectorSuite) TestSort(c *C) {
28
+
func (s *DeltaSelectorSuite) TestSort() {
25
29
var o1 = newObjectToPack(newObject(plumbing.BlobObject, []byte("00000")))
26
30
var o4 = newObjectToPack(newObject(plumbing.BlobObject, []byte("0000")))
27
31
var o6 = newObjectToPack(newObject(plumbing.BlobObject, []byte("00")))
···
35
39
toSort := []*ObjectToPack{o1, o2, o3, o4, o5, o6, o7, o8, o9}
36
40
s.ds.sort(toSort)
37
41
expected := []*ObjectToPack{o1, o4, o6, o9, o8, o2, o3, o5, o7}
38
-
c.Assert(toSort, DeepEquals, expected)
42
+
s.Equal(expected, toSort)
39
43
}
40
44
41
45
type testObject struct {
···
143
147
}
144
148
}
145
149
146
-
func (s *DeltaSelectorSuite) TestObjectsToPack(c *C) {
150
+
func (s *DeltaSelectorSuite) TestObjectsToPack() {
147
151
// Different type
148
152
hashes := []plumbing.Hash{s.hashes["base"], s.hashes["treeType"]}
149
153
deltaWindowSize := uint(10)
150
154
otp, err := s.ds.ObjectsToPack(hashes, deltaWindowSize)
151
-
c.Assert(err, IsNil)
152
-
c.Assert(len(otp), Equals, 2)
153
-
c.Assert(otp[0].Object, Equals, s.store.Objects[s.hashes["base"]])
154
-
c.Assert(otp[1].Object, Equals, s.store.Objects[s.hashes["treeType"]])
155
+
s.NoError(err)
156
+
s.Len(otp, 2)
157
+
s.Equal(s.store.Objects[s.hashes["base"]], otp[0].Object)
158
+
s.Equal(s.store.Objects[s.hashes["treeType"]], otp[1].Object)
155
159
156
160
// Size radically different
157
161
hashes = []plumbing.Hash{s.hashes["bigBase"], s.hashes["target"]}
158
162
otp, err = s.ds.ObjectsToPack(hashes, deltaWindowSize)
159
-
c.Assert(err, IsNil)
160
-
c.Assert(len(otp), Equals, 2)
161
-
c.Assert(otp[0].Object, Equals, s.store.Objects[s.hashes["bigBase"]])
162
-
c.Assert(otp[1].Object, Equals, s.store.Objects[s.hashes["target"]])
163
+
s.NoError(err)
164
+
s.Len(otp, 2)
165
+
s.Equal(s.store.Objects[s.hashes["bigBase"]], otp[0].Object)
166
+
s.Equal(s.store.Objects[s.hashes["target"]], otp[1].Object)
163
167
164
168
// Delta Size Limit with no best delta yet
165
169
hashes = []plumbing.Hash{s.hashes["smallBase"], s.hashes["smallTarget"]}
166
170
otp, err = s.ds.ObjectsToPack(hashes, deltaWindowSize)
167
-
c.Assert(err, IsNil)
168
-
c.Assert(len(otp), Equals, 2)
169
-
c.Assert(otp[0].Object, Equals, s.store.Objects[s.hashes["smallBase"]])
170
-
c.Assert(otp[1].Object, Equals, s.store.Objects[s.hashes["smallTarget"]])
171
+
s.NoError(err)
172
+
s.Len(otp, 2)
173
+
s.Equal(s.store.Objects[s.hashes["smallBase"]], otp[0].Object)
174
+
s.Equal(s.store.Objects[s.hashes["smallTarget"]], otp[1].Object)
171
175
172
176
// It will create the delta
173
177
hashes = []plumbing.Hash{s.hashes["base"], s.hashes["target"]}
174
178
otp, err = s.ds.ObjectsToPack(hashes, deltaWindowSize)
175
-
c.Assert(err, IsNil)
176
-
c.Assert(len(otp), Equals, 2)
177
-
c.Assert(otp[0].Object, Equals, s.store.Objects[s.hashes["target"]])
178
-
c.Assert(otp[0].IsDelta(), Equals, false)
179
-
c.Assert(otp[1].Original, Equals, s.store.Objects[s.hashes["base"]])
180
-
c.Assert(otp[1].IsDelta(), Equals, true)
181
-
c.Assert(otp[1].Depth, Equals, 1)
179
+
s.NoError(err)
180
+
s.Len(otp, 2)
181
+
s.Equal(s.store.Objects[s.hashes["target"]], otp[0].Object)
182
+
s.False(otp[0].IsDelta())
183
+
s.Equal(s.store.Objects[s.hashes["base"]], otp[1].Original)
184
+
s.True(otp[1].IsDelta())
185
+
s.Equal(1, otp[1].Depth)
182
186
183
187
// If our base is another delta, the depth will increase by one
184
188
hashes = []plumbing.Hash{
···
187
191
s.hashes["o3"],
188
192
}
189
193
otp, err = s.ds.ObjectsToPack(hashes, deltaWindowSize)
190
-
c.Assert(err, IsNil)
191
-
c.Assert(len(otp), Equals, 3)
192
-
c.Assert(otp[0].Object, Equals, s.store.Objects[s.hashes["o1"]])
193
-
c.Assert(otp[0].IsDelta(), Equals, false)
194
-
c.Assert(otp[1].Original, Equals, s.store.Objects[s.hashes["o2"]])
195
-
c.Assert(otp[1].IsDelta(), Equals, true)
196
-
c.Assert(otp[1].Depth, Equals, 1)
197
-
c.Assert(otp[2].Original, Equals, s.store.Objects[s.hashes["o3"]])
198
-
c.Assert(otp[2].IsDelta(), Equals, true)
199
-
c.Assert(otp[2].Depth, Equals, 2)
194
+
s.NoError(err)
195
+
s.Len(otp, 3)
196
+
s.Equal(s.store.Objects[s.hashes["o1"]], otp[0].Object)
197
+
s.False(otp[0].IsDelta())
198
+
s.Equal(s.store.Objects[s.hashes["o2"]], otp[1].Original)
199
+
s.True(otp[1].IsDelta())
200
+
s.Equal(1, otp[1].Depth)
201
+
s.Equal(s.store.Objects[s.hashes["o3"]], otp[2].Original)
202
+
s.True(otp[2].IsDelta())
203
+
s.Equal(2, otp[2].Depth)
200
204
201
205
// Check that objects outside of the sliding window don't produce
202
206
// a delta.
···
210
214
// Don't sort so we can easily check the sliding window without
211
215
// creating a bunch of new objects.
212
216
otp, err = s.ds.objectsToPack(hashes, deltaWindowSize)
213
-
c.Assert(err, IsNil)
217
+
s.NoError(err)
214
218
err = s.ds.walk(otp, deltaWindowSize)
215
-
c.Assert(err, IsNil)
216
-
c.Assert(len(otp), Equals, int(deltaWindowSize)+2)
219
+
s.NoError(err)
220
+
s.Len(otp, int(deltaWindowSize)+2)
217
221
targetIdx := len(otp) - 1
218
-
c.Assert(otp[targetIdx].IsDelta(), Equals, false)
222
+
s.False(otp[targetIdx].IsDelta())
219
223
220
224
// Check that no deltas are created, and the objects are unsorted,
221
225
// if compression is off.
222
226
hashes = []plumbing.Hash{s.hashes["base"], s.hashes["target"]}
223
227
otp, err = s.ds.ObjectsToPack(hashes, 0)
224
-
c.Assert(err, IsNil)
225
-
c.Assert(len(otp), Equals, 2)
226
-
c.Assert(otp[0].Object, Equals, s.store.Objects[s.hashes["base"]])
227
-
c.Assert(otp[0].IsDelta(), Equals, false)
228
-
c.Assert(otp[1].Original, Equals, s.store.Objects[s.hashes["target"]])
229
-
c.Assert(otp[1].IsDelta(), Equals, false)
230
-
c.Assert(otp[1].Depth, Equals, 0)
228
+
s.NoError(err)
229
+
s.Len(otp, 2)
230
+
s.Equal(s.store.Objects[s.hashes["base"]], otp[0].Object)
231
+
s.False(otp[0].IsDelta())
232
+
s.Equal(s.store.Objects[s.hashes["target"]], otp[1].Original)
233
+
s.False(otp[1].IsDelta())
234
+
s.Equal(0, otp[1].Depth)
231
235
}
232
236
233
-
func (s *DeltaSelectorSuite) TestMaxDepth(c *C) {
237
+
func (s *DeltaSelectorSuite) TestMaxDepth() {
234
238
dsl := s.ds.deltaSizeLimit(0, 0, int(maxDepth), true)
235
-
c.Assert(dsl, Equals, int64(0))
239
+
s.Equal(int64(0), dsl)
236
240
}
+30
-27
plumbing/format/packfile/delta_test.go
+30
-27
plumbing/format/packfile/delta_test.go
···
7
7
"testing"
8
8
9
9
"github.com/go-git/go-git/v5/plumbing"
10
-
. "gopkg.in/check.v1"
10
+
"github.com/stretchr/testify/suite"
11
11
)
12
12
13
13
type DeltaSuite struct {
14
+
suite.Suite
14
15
testCases []deltaTest
15
16
}
16
17
17
-
var _ = Suite(&DeltaSuite{})
18
+
func TestDeltaSuite(t *testing.T) {
19
+
suite.Run(t, new(DeltaSuite))
20
+
}
18
21
19
22
type deltaTest struct {
20
23
description string
···
22
25
target []piece
23
26
}
24
27
25
-
func (s *DeltaSuite) SetUpSuite(c *C) {
28
+
func (s *DeltaSuite) SetupSuite() {
26
29
s.testCases = []deltaTest{{
27
30
description: "distinct file",
28
31
base: []piece{{"0", 300}},
···
88
91
return string(randBytes(n))
89
92
}
90
93
91
-
func (s *DeltaSuite) TestAddDelta(c *C) {
94
+
func (s *DeltaSuite) TestAddDelta() {
92
95
for _, t := range s.testCases {
93
96
baseBuf := genBytes(t.base)
94
97
targetBuf := genBytes(t.target)
95
98
delta := DiffDelta(baseBuf, targetBuf)
96
99
result, err := PatchDelta(baseBuf, delta)
97
100
98
-
c.Log("Executing test case:", t.description)
99
-
c.Assert(err, IsNil)
100
-
c.Assert(result, DeepEquals, targetBuf)
101
+
s.T().Log("Executing test case:", t.description)
102
+
s.NoError(err)
103
+
s.Equal(targetBuf, result)
101
104
}
102
105
}
103
106
104
-
func (s *DeltaSuite) TestAddDeltaReader(c *C) {
107
+
func (s *DeltaSuite) TestAddDeltaReader() {
105
108
for _, t := range s.testCases {
106
109
baseBuf := genBytes(t.base)
107
110
baseObj := &plumbing.MemoryObject{}
···
112
115
delta := DiffDelta(baseBuf, targetBuf)
113
116
deltaRC := io.NopCloser(bytes.NewReader(delta))
114
117
115
-
c.Log("Executing test case:", t.description)
118
+
s.T().Log("Executing test case:", t.description)
116
119
117
120
resultRC, err := ReaderFromDelta(baseObj, deltaRC)
118
-
c.Assert(err, IsNil)
121
+
s.NoError(err)
119
122
120
123
result, err := io.ReadAll(resultRC)
121
-
c.Assert(err, IsNil)
124
+
s.NoError(err)
122
125
123
126
err = resultRC.Close()
124
-
c.Assert(err, IsNil)
127
+
s.NoError(err)
125
128
126
-
c.Assert(result, DeepEquals, targetBuf)
129
+
s.Equal(targetBuf, result)
127
130
}
128
131
}
129
132
130
-
func (s *DeltaSuite) TestIncompleteDelta(c *C) {
133
+
func (s *DeltaSuite) TestIncompleteDelta() {
131
134
for _, t := range s.testCases {
132
-
c.Log("Incomplete delta on:", t.description)
135
+
s.T().Log("Incomplete delta on:", t.description)
133
136
baseBuf := genBytes(t.base)
134
137
targetBuf := genBytes(t.target)
135
138
delta := DiffDelta(baseBuf, targetBuf)
136
139
delta = delta[:len(delta)-2]
137
140
result, err := PatchDelta(baseBuf, delta)
138
-
c.Assert(err, NotNil)
139
-
c.Assert(result, IsNil)
141
+
s.NotNil(err)
142
+
s.Nil(result)
140
143
}
141
144
142
145
// check nil input too
143
146
result, err := PatchDelta(nil, nil)
144
-
c.Assert(err, NotNil)
145
-
c.Assert(result, IsNil)
147
+
s.NotNil(err)
148
+
s.Nil(result)
146
149
}
147
150
148
-
func (s *DeltaSuite) TestMaxCopySizeDelta(c *C) {
151
+
func (s *DeltaSuite) TestMaxCopySizeDelta() {
149
152
baseBuf := randBytes(maxCopySize)
150
153
targetBuf := baseBuf[0:]
151
154
targetBuf = append(targetBuf, byte(1))
152
155
153
156
delta := DiffDelta(baseBuf, targetBuf)
154
157
result, err := PatchDelta(baseBuf, delta)
155
-
c.Assert(err, IsNil)
156
-
c.Assert(result, DeepEquals, targetBuf)
158
+
s.NoError(err)
159
+
s.Equal(targetBuf, result)
157
160
}
158
161
159
-
func (s *DeltaSuite) TestMaxCopySizeDeltaReader(c *C) {
162
+
func (s *DeltaSuite) TestMaxCopySizeDeltaReader() {
160
163
baseBuf := randBytes(maxCopySize)
161
164
baseObj := &plumbing.MemoryObject{}
162
165
baseObj.Write(baseBuf)
···
168
171
deltaRC := io.NopCloser(bytes.NewReader(delta))
169
172
170
173
resultRC, err := ReaderFromDelta(baseObj, deltaRC)
171
-
c.Assert(err, IsNil)
174
+
s.NoError(err)
172
175
173
176
result, err := io.ReadAll(resultRC)
174
-
c.Assert(err, IsNil)
177
+
s.NoError(err)
175
178
176
179
err = resultRC.Close()
177
-
c.Assert(err, IsNil)
178
-
c.Assert(result, DeepEquals, targetBuf)
180
+
s.NoError(err)
181
+
s.Equal(targetBuf, result)
179
182
}
180
183
181
184
func FuzzPatchDelta(f *testing.F) {
+38
-30
plumbing/format/packfile/encoder_advanced_test.go
+38
-30
plumbing/format/packfile/encoder_advanced_test.go
···
12
12
. "github.com/go-git/go-git/v5/plumbing/format/packfile"
13
13
"github.com/go-git/go-git/v5/plumbing/storer"
14
14
"github.com/go-git/go-git/v5/storage/filesystem"
15
+
"github.com/stretchr/testify/suite"
15
16
16
17
"github.com/go-git/go-billy/v5/memfs"
17
18
fixtures "github.com/go-git/go-git-fixtures/v4"
18
-
. "gopkg.in/check.v1"
19
19
)
20
20
21
+
type EncoderAdvancedFixtureSuite struct {
22
+
fixtures.Suite
23
+
}
24
+
21
25
type EncoderAdvancedSuite struct {
22
-
fixtures.Suite
26
+
suite.Suite
27
+
EncoderAdvancedFixtureSuite
23
28
}
24
29
25
-
var _ = Suite(&EncoderAdvancedSuite{})
30
+
func TestEncoderAdvancedSuite(t *testing.T) {
31
+
suite.Run(t, new(EncoderAdvancedSuite))
32
+
}
26
33
27
-
func (s *EncoderAdvancedSuite) TestEncodeDecode(c *C) {
34
+
func (s *EncoderAdvancedSuite) TestEncodeDecode() {
28
35
if testing.Short() {
29
-
c.Skip("skipping test in short mode.")
36
+
s.T().Skip("skipping test in short mode.")
30
37
}
31
38
32
39
fixs := fixtures.Basic().ByTag("packfile").ByTag(".git")
33
40
fixs = append(fixs, fixtures.ByURL("https://github.com/src-d/go-git.git").
34
41
ByTag("packfile").ByTag(".git").One())
35
-
fixs.Test(c, func(f *fixtures.Fixture) {
42
+
43
+
for _, f := range fixs {
36
44
storage := filesystem.NewStorage(f.DotGit(), cache.NewObjectLRUDefault())
37
-
s.testEncodeDecode(c, storage, 10)
38
-
})
45
+
s.testEncodeDecode(storage, 10)
46
+
}
39
47
}
40
48
41
-
func (s *EncoderAdvancedSuite) TestEncodeDecodeNoDeltaCompression(c *C) {
49
+
func (s *EncoderAdvancedSuite) TestEncodeDecodeNoDeltaCompression() {
42
50
if testing.Short() {
43
-
c.Skip("skipping test in short mode.")
51
+
s.T().Skip("skipping test in short mode.")
44
52
}
45
53
46
54
fixs := fixtures.Basic().ByTag("packfile").ByTag(".git")
47
55
fixs = append(fixs, fixtures.ByURL("https://github.com/src-d/go-git.git").
48
56
ByTag("packfile").ByTag(".git").One())
49
-
fixs.Test(c, func(f *fixtures.Fixture) {
57
+
58
+
for _, f := range fixs {
50
59
storage := filesystem.NewStorage(f.DotGit(), cache.NewObjectLRUDefault())
51
-
s.testEncodeDecode(c, storage, 0)
52
-
})
60
+
s.testEncodeDecode(storage, 0)
61
+
}
53
62
}
54
63
55
64
func (s *EncoderAdvancedSuite) testEncodeDecode(
56
-
c *C,
57
65
storage storer.Storer,
58
66
packWindow uint,
59
67
) {
60
68
objIter, err := storage.IterEncodedObjects(plumbing.AnyObject)
61
-
c.Assert(err, IsNil)
69
+
s.NoError(err)
62
70
63
71
expectedObjects := map[plumbing.Hash]bool{}
64
72
var hashes []plumbing.Hash
···
68
76
return err
69
77
70
78
})
71
-
c.Assert(err, IsNil)
79
+
s.NoError(err)
72
80
73
81
// Shuffle hashes to avoid delta selector getting order right just because
74
82
// the initial order is correct.
···
81
89
buf := bytes.NewBuffer(nil)
82
90
enc := NewEncoder(buf, storage, false)
83
91
encodeHash, err := enc.Encode(hashes, packWindow)
84
-
c.Assert(err, IsNil)
92
+
s.NoError(err)
85
93
86
94
fs := memfs.New()
87
95
f, err := fs.Create("packfile")
88
-
c.Assert(err, IsNil)
96
+
s.NoError(err)
89
97
90
98
_, err = f.Write(buf.Bytes())
91
-
c.Assert(err, IsNil)
99
+
s.NoError(err)
92
100
93
101
_, err = f.Seek(0, io.SeekStart)
94
-
c.Assert(err, IsNil)
102
+
s.NoError(err)
95
103
96
104
w := new(idxfile.Writer)
97
105
parser := NewParser(NewScanner(f), WithScannerObservers(w))
98
106
99
107
_, err = parser.Parse()
100
-
c.Assert(err, IsNil)
108
+
s.NoError(err)
101
109
index, err := w.Index()
102
-
c.Assert(err, IsNil)
110
+
s.NoError(err)
103
111
104
112
_, err = f.Seek(0, io.SeekStart)
105
-
c.Assert(err, IsNil)
113
+
s.NoError(err)
106
114
107
115
p := NewPackfile(f, WithIdx(index), WithFs(fs))
108
116
109
117
decodeHash, err := p.ID()
110
-
c.Assert(err, IsNil)
111
-
c.Assert(encodeHash, Equals, decodeHash)
118
+
s.NoError(err)
119
+
s.Equal(decodeHash, encodeHash)
112
120
113
121
objIter, err = p.GetAll()
114
-
c.Assert(err, IsNil)
122
+
s.NoError(err)
115
123
obtainedObjects := map[plumbing.Hash]bool{}
116
124
err = objIter.ForEach(func(o plumbing.EncodedObject) error {
117
125
obtainedObjects[o.Hash()] = true
118
126
return nil
119
127
})
120
-
c.Assert(err, IsNil)
121
-
c.Assert(obtainedObjects, DeepEquals, expectedObjects)
128
+
s.NoError(err)
129
+
s.Equal(expectedObjects, obtainedObjects)
122
130
123
131
for h := range obtainedObjects {
124
132
if !expectedObjects[h] {
125
-
c.Errorf("obtained unexpected object: %s", h)
133
+
s.T().Errorf("obtained unexpected object: %s", h)
126
134
}
127
135
}
128
136
129
137
for h := range expectedObjects {
130
138
if !obtainedObjects[h] {
131
-
c.Errorf("missing object: %s", h)
139
+
s.T().Errorf("missing object: %s", h)
132
140
}
133
141
}
134
142
}
+105
-94
plumbing/format/packfile/encoder_test.go
+105
-94
plumbing/format/packfile/encoder_test.go
···
3
3
import (
4
4
"bytes"
5
5
"io"
6
+
"testing"
6
7
7
8
"github.com/go-git/go-git/v5/plumbing"
8
9
"github.com/go-git/go-git/v5/plumbing/format/idxfile"
9
10
"github.com/go-git/go-git/v5/plumbing/hash"
10
11
"github.com/go-git/go-git/v5/storage/memory"
12
+
"github.com/stretchr/testify/suite"
11
13
12
14
"github.com/go-git/go-billy/v5/memfs"
13
15
fixtures "github.com/go-git/go-git-fixtures/v4"
14
-
. "gopkg.in/check.v1"
15
16
)
17
+
18
+
type EncoderFixtureSuite struct {
19
+
fixtures.Suite
20
+
}
16
21
17
22
type EncoderSuite struct {
18
-
fixtures.Suite
23
+
suite.Suite
24
+
EncoderFixtureSuite
19
25
buf *bytes.Buffer
20
26
store *memory.Storage
21
27
enc *Encoder
22
28
}
23
29
24
-
var _ = Suite(&EncoderSuite{})
30
+
func TestEncoderSuite(t *testing.T) {
31
+
suite.Run(t, new(EncoderSuite))
32
+
}
25
33
26
-
func (s *EncoderSuite) SetUpTest(c *C) {
34
+
func (s *EncoderSuite) SetupTest() {
27
35
s.buf = bytes.NewBuffer(nil)
28
36
s.store = memory.NewStorage()
29
37
s.enc = NewEncoder(s.buf, s.store, false)
30
38
}
31
39
32
-
func (s *EncoderSuite) TestCorrectPackHeader(c *C) {
40
+
func (s *EncoderSuite) TestCorrectPackHeader() {
33
41
h, err := s.enc.Encode([]plumbing.Hash{}, 10)
34
-
c.Assert(err, IsNil)
42
+
s.NoError(err)
35
43
36
44
hb := [hash.Size]byte(h)
37
45
···
41
49
42
50
result := s.buf.Bytes()
43
51
44
-
c.Assert(result, DeepEquals, expectedResult)
52
+
s.Equal(expectedResult, result)
45
53
}
46
54
47
-
func (s *EncoderSuite) TestCorrectPackWithOneEmptyObject(c *C) {
55
+
func (s *EncoderSuite) TestCorrectPackWithOneEmptyObject() {
48
56
o := &plumbing.MemoryObject{}
49
57
o.SetType(plumbing.CommitObject)
50
58
o.SetSize(0)
51
59
_, err := s.store.SetEncodedObject(o)
52
-
c.Assert(err, IsNil)
60
+
s.NoError(err)
53
61
54
62
h, err := s.enc.Encode([]plumbing.Hash{o.Hash()}, 10)
55
-
c.Assert(err, IsNil)
63
+
s.NoError(err)
56
64
57
65
// PACK + VERSION(2) + OBJECT NUMBER(1)
58
66
expectedResult := []byte{'P', 'A', 'C', 'K', 0, 0, 0, 2, 0, 0, 0, 1}
···
69
77
70
78
result := s.buf.Bytes()
71
79
72
-
c.Assert(result, DeepEquals, expectedResult)
80
+
s.Equal(expectedResult, result)
73
81
}
74
82
75
-
func (s *EncoderSuite) TestMaxObjectSize(c *C) {
83
+
func (s *EncoderSuite) TestMaxObjectSize() {
76
84
o := s.store.NewEncodedObject()
77
85
o.SetSize(9223372036854775807)
78
86
o.SetType(plumbing.CommitObject)
79
87
_, err := s.store.SetEncodedObject(o)
80
-
c.Assert(err, IsNil)
88
+
s.NoError(err)
81
89
hash, err := s.enc.Encode([]plumbing.Hash{o.Hash()}, 10)
82
-
c.Assert(err, IsNil)
83
-
c.Assert(hash.IsZero(), Not(Equals), true)
90
+
s.NoError(err)
91
+
s.NotEqual(true, hash.IsZero())
84
92
}
85
93
86
-
func (s *EncoderSuite) TestHashNotFound(c *C) {
94
+
func (s *EncoderSuite) TestHashNotFound() {
87
95
h, err := s.enc.Encode([]plumbing.Hash{plumbing.NewHash("BAD")}, 10)
88
-
c.Assert(h, Equals, plumbing.ZeroHash)
89
-
c.Assert(err, NotNil)
90
-
c.Assert(err, Equals, plumbing.ErrObjectNotFound)
96
+
s.Equal(plumbing.ZeroHash, h)
97
+
s.NotNil(err)
98
+
s.ErrorIs(err, plumbing.ErrObjectNotFound)
91
99
}
92
100
93
-
func (s *EncoderSuite) TestDecodeEncodeWithDeltaDecodeREF(c *C) {
101
+
func (s *EncoderSuite) TestDecodeEncodeWithDeltaDecodeREF() {
94
102
s.enc = NewEncoder(s.buf, s.store, true)
95
-
s.simpleDeltaTest(c)
103
+
s.simpleDeltaTest()
96
104
}
97
105
98
-
func (s *EncoderSuite) TestDecodeEncodeWithDeltaDecodeOFS(c *C) {
106
+
func (s *EncoderSuite) TestDecodeEncodeWithDeltaDecodeOFS() {
99
107
s.enc = NewEncoder(s.buf, s.store, false)
100
-
s.simpleDeltaTest(c)
108
+
s.simpleDeltaTest()
101
109
}
102
110
103
-
func (s *EncoderSuite) TestDecodeEncodeWithDeltasDecodeREF(c *C) {
111
+
func (s *EncoderSuite) TestDecodeEncodeWithDeltasDecodeREF() {
104
112
s.enc = NewEncoder(s.buf, s.store, true)
105
-
s.deltaOverDeltaTest(c)
113
+
s.deltaOverDeltaTest()
106
114
}
107
115
108
-
func (s *EncoderSuite) TestDecodeEncodeWithDeltasDecodeOFS(c *C) {
116
+
func (s *EncoderSuite) TestDecodeEncodeWithDeltasDecodeOFS() {
109
117
s.enc = NewEncoder(s.buf, s.store, false)
110
-
s.deltaOverDeltaTest(c)
118
+
s.deltaOverDeltaTest()
111
119
}
112
120
113
-
func (s *EncoderSuite) TestDecodeEncodeWithCycleREF(c *C) {
121
+
func (s *EncoderSuite) TestDecodeEncodeWithCycleREF() {
114
122
s.enc = NewEncoder(s.buf, s.store, true)
115
-
s.deltaOverDeltaCyclicTest(c)
123
+
s.deltaOverDeltaCyclicTest()
116
124
}
117
125
118
-
func (s *EncoderSuite) TestDecodeEncodeWithCycleOFS(c *C) {
126
+
func (s *EncoderSuite) TestDecodeEncodeWithCycleOFS() {
119
127
s.enc = NewEncoder(s.buf, s.store, false)
120
-
s.deltaOverDeltaCyclicTest(c)
128
+
s.deltaOverDeltaCyclicTest()
121
129
}
122
130
123
-
func (s *EncoderSuite) simpleDeltaTest(c *C) {
131
+
func (s *EncoderSuite) simpleDeltaTest() {
124
132
srcObject := newObject(plumbing.BlobObject, []byte("0"))
125
133
targetObject := newObject(plumbing.BlobObject, []byte("01"))
126
134
127
135
deltaObject, err := GetDelta(srcObject, targetObject)
128
-
c.Assert(err, IsNil)
136
+
s.NoError(err)
129
137
130
138
srcToPack := newObjectToPack(srcObject)
131
139
encHash, err := s.enc.encode([]*ObjectToPack{
132
140
srcToPack,
133
141
newDeltaObjectToPack(srcToPack, targetObject, deltaObject),
134
142
})
135
-
c.Assert(err, IsNil)
143
+
s.NoError(err)
136
144
137
-
p, cleanup := packfileFromReader(c, s.buf)
145
+
p, cleanup := packfileFromReader(s, s.buf)
138
146
defer cleanup()
139
147
decHash, err := p.ID()
140
-
c.Assert(err, IsNil)
148
+
s.NoError(err)
141
149
142
-
c.Assert(encHash, Equals, decHash)
150
+
s.Equal(decHash, encHash)
143
151
144
152
decSrc, err := p.Get(srcObject.Hash())
145
-
c.Assert(err, IsNil)
146
-
objectsEqual(c, decSrc, srcObject)
153
+
s.NoError(err)
154
+
objectsEqual(s, decSrc, srcObject)
147
155
148
156
decTarget, err := p.Get(targetObject.Hash())
149
-
c.Assert(err, IsNil)
150
-
objectsEqual(c, decTarget, targetObject)
157
+
s.NoError(err)
158
+
objectsEqual(s, decTarget, targetObject)
151
159
}
152
160
153
-
func (s *EncoderSuite) deltaOverDeltaTest(c *C) {
161
+
func (s *EncoderSuite) deltaOverDeltaTest() {
154
162
srcObject := newObject(plumbing.BlobObject, []byte("0"))
155
163
targetObject := newObject(plumbing.BlobObject, []byte("01"))
156
164
otherTargetObject := newObject(plumbing.BlobObject, []byte("011111"))
157
165
158
166
deltaObject, err := GetDelta(srcObject, targetObject)
159
-
c.Assert(err, IsNil)
160
-
c.Assert(deltaObject.Hash(), Not(Equals), plumbing.ZeroHash)
167
+
s.NoError(err)
168
+
s.NotEqual(plumbing.ZeroHash, deltaObject.Hash())
161
169
162
170
otherDeltaObject, err := GetDelta(targetObject, otherTargetObject)
163
-
c.Assert(err, IsNil)
164
-
c.Assert(otherDeltaObject.Hash(), Not(Equals), plumbing.ZeroHash)
171
+
s.NoError(err)
172
+
s.NotEqual(plumbing.ZeroHash, otherDeltaObject.Hash())
165
173
166
174
srcToPack := newObjectToPack(srcObject)
167
175
targetToPack := newObjectToPack(targetObject)
···
171
179
newDeltaObjectToPack(srcToPack, targetObject, deltaObject),
172
180
newDeltaObjectToPack(targetToPack, otherTargetObject, otherDeltaObject),
173
181
})
174
-
c.Assert(err, IsNil)
182
+
s.NoError(err)
175
183
176
-
p, cleanup := packfileFromReader(c, s.buf)
184
+
p, cleanup := packfileFromReader(s, s.buf)
177
185
defer cleanup()
178
186
decHash, err := p.ID()
179
-
c.Assert(err, IsNil)
187
+
s.NoError(err)
180
188
181
-
c.Assert(encHash, Equals, decHash)
189
+
s.Equal(decHash, encHash)
182
190
183
191
decSrc, err := p.Get(srcObject.Hash())
184
-
c.Assert(err, IsNil)
185
-
objectsEqual(c, decSrc, srcObject)
192
+
s.NoError(err)
193
+
objectsEqual(s, decSrc, srcObject)
186
194
187
195
decTarget, err := p.Get(targetObject.Hash())
188
-
c.Assert(err, IsNil)
189
-
objectsEqual(c, decTarget, targetObject)
196
+
s.NoError(err)
197
+
objectsEqual(s, decTarget, targetObject)
190
198
191
199
decOtherTarget, err := p.Get(otherTargetObject.Hash())
192
-
c.Assert(err, IsNil)
193
-
objectsEqual(c, decOtherTarget, otherTargetObject)
200
+
s.NoError(err)
201
+
objectsEqual(s, decOtherTarget, otherTargetObject)
194
202
}
195
203
196
-
func (s *EncoderSuite) deltaOverDeltaCyclicTest(c *C) {
204
+
func (s *EncoderSuite) deltaOverDeltaCyclicTest() {
197
205
o1 := newObject(plumbing.BlobObject, []byte("0"))
198
206
o2 := newObject(plumbing.BlobObject, []byte("01"))
199
207
o3 := newObject(plumbing.BlobObject, []byte("011111"))
200
208
o4 := newObject(plumbing.BlobObject, []byte("01111100000"))
201
209
202
210
_, err := s.store.SetEncodedObject(o1)
203
-
c.Assert(err, IsNil)
211
+
s.NoError(err)
204
212
_, err = s.store.SetEncodedObject(o2)
205
-
c.Assert(err, IsNil)
213
+
s.NoError(err)
206
214
_, err = s.store.SetEncodedObject(o3)
207
-
c.Assert(err, IsNil)
215
+
s.NoError(err)
208
216
_, err = s.store.SetEncodedObject(o4)
209
-
c.Assert(err, IsNil)
217
+
s.NoError(err)
210
218
211
219
d2, err := GetDelta(o1, o2)
212
-
c.Assert(err, IsNil)
220
+
s.NoError(err)
213
221
214
222
d3, err := GetDelta(o4, o3)
215
-
c.Assert(err, IsNil)
223
+
s.NoError(err)
216
224
217
225
d4, err := GetDelta(o3, o4)
218
-
c.Assert(err, IsNil)
226
+
s.NoError(err)
219
227
220
228
po1 := newObjectToPack(o1)
221
229
pd2 := newDeltaObjectToPack(po1, o2, d2)
···
243
251
pd3,
244
252
pd4,
245
253
})
246
-
c.Assert(err, IsNil)
254
+
s.NoError(err)
247
255
248
-
p, cleanup := packfileFromReader(c, s.buf)
256
+
p, cleanup := packfileFromReader(s, s.buf)
249
257
defer cleanup()
250
258
decHash, err := p.ID()
251
-
c.Assert(err, IsNil)
259
+
s.NoError(err)
252
260
253
-
c.Assert(encHash, Equals, decHash)
261
+
s.Equal(decHash, encHash)
254
262
255
263
decSrc, err := p.Get(o1.Hash())
256
-
c.Assert(err, IsNil)
257
-
objectsEqual(c, decSrc, o1)
264
+
s.NoError(err)
265
+
objectsEqual(s, decSrc, o1)
258
266
259
267
decTarget, err := p.Get(o2.Hash())
260
-
c.Assert(err, IsNil)
261
-
objectsEqual(c, decTarget, o2)
268
+
s.NoError(err)
269
+
objectsEqual(s, decTarget, o2)
262
270
263
271
decOtherTarget, err := p.Get(o3.Hash())
264
-
c.Assert(err, IsNil)
265
-
objectsEqual(c, decOtherTarget, o3)
272
+
s.NoError(err)
273
+
objectsEqual(s, decOtherTarget, o3)
266
274
267
275
decAnotherTarget, err := p.Get(o4.Hash())
268
-
c.Assert(err, IsNil)
269
-
objectsEqual(c, decAnotherTarget, o4)
276
+
s.NoError(err)
277
+
objectsEqual(s, decAnotherTarget, o4)
270
278
}
271
279
272
-
func objectsEqual(c *C, o1, o2 plumbing.EncodedObject) {
273
-
c.Assert(o1.Type(), Equals, o2.Type())
274
-
c.Assert(o1.Hash(), Equals, o2.Hash())
275
-
c.Assert(o1.Size(), Equals, o2.Size())
280
+
func objectsEqual(s *EncoderSuite, o1, o2 plumbing.EncodedObject) {
281
+
s.Equal(o2.Type(), o1.Type())
282
+
s.Equal(o2.Hash(), o1.Hash())
283
+
s.Equal(o2.Size(), o1.Size())
276
284
277
285
r1, err := o1.Reader()
278
-
c.Assert(err, IsNil)
286
+
s.NoError(err)
279
287
280
288
b1, err := io.ReadAll(r1)
281
-
c.Assert(err, IsNil)
289
+
s.NoError(err)
282
290
283
291
r2, err := o2.Reader()
284
-
c.Assert(err, IsNil)
292
+
s.NoError(err)
285
293
286
294
b2, err := io.ReadAll(r2)
287
-
c.Assert(err, IsNil)
295
+
s.NoError(err)
288
296
289
-
c.Assert(bytes.Compare(b1, b2), Equals, 0)
297
+
s.Equal(0, bytes.Compare(b1, b2))
290
298
291
299
err = r2.Close()
292
-
c.Assert(err, IsNil)
300
+
s.NoError(err)
293
301
294
302
err = r1.Close()
295
-
c.Assert(err, IsNil)
303
+
s.NoError(err)
296
304
}
297
305
298
-
func packfileFromReader(c *C, buf *bytes.Buffer) (*Packfile, func()) {
306
+
func packfileFromReader(s *EncoderSuite, buf *bytes.Buffer) (*Packfile, func()) {
299
307
fs := memfs.New()
300
308
file, err := fs.Create("packfile")
301
-
c.Assert(err, IsNil)
309
+
s.NoError(err)
302
310
303
311
_, err = file.Write(buf.Bytes())
304
-
c.Assert(err, IsNil)
312
+
s.NoError(err)
305
313
306
314
_, err = file.Seek(0, io.SeekStart)
307
-
c.Assert(err, IsNil)
315
+
s.NoError(err)
308
316
309
317
scanner := NewScanner(file)
310
318
···
312
320
p := NewParser(scanner, WithScannerObservers(w))
313
321
314
322
_, err = p.Parse()
315
-
c.Assert(err, IsNil)
323
+
s.NoError(err)
316
324
317
325
index, err := w.Index()
318
-
c.Assert(err, IsNil)
326
+
s.NoError(err)
327
+
328
+
_, err = file.Seek(0, io.SeekStart)
329
+
s.NoError(err)
319
330
320
331
return NewPackfile(file, WithIdx(index), WithFs(fs)), func() {
321
-
c.Assert(file.Close(), IsNil)
332
+
s.NoError(file.Close())
322
333
}
323
334
}
+17
-13
plumbing/format/packfile/object_pack_test.go
+17
-13
plumbing/format/packfile/object_pack_test.go
···
2
2
3
3
import (
4
4
"io"
5
+
"testing"
5
6
6
7
"github.com/go-git/go-git/v5/plumbing"
7
-
8
-
. "gopkg.in/check.v1"
8
+
"github.com/stretchr/testify/suite"
9
9
)
10
10
11
-
type ObjectToPackSuite struct{}
11
+
type ObjectToPackSuite struct {
12
+
suite.Suite
13
+
}
12
14
13
-
var _ = Suite(&ObjectToPackSuite{})
15
+
func TestObjectToPackSuite(t *testing.T) {
16
+
suite.Run(t, new(ObjectToPackSuite))
17
+
}
14
18
15
-
func (s *ObjectToPackSuite) TestObjectToPack(c *C) {
19
+
func (s *ObjectToPackSuite) TestObjectToPack() {
16
20
obj := &dummyObject{}
17
21
otp := newObjectToPack(obj)
18
-
c.Assert(obj, Equals, otp.Object)
19
-
c.Assert(obj, Equals, otp.Original)
20
-
c.Assert(otp.Base, IsNil)
21
-
c.Assert(otp.IsDelta(), Equals, false)
22
+
s.Equal(otp.Object, obj)
23
+
s.Equal(otp.Original, obj)
24
+
s.Nil(otp.Base)
25
+
s.False(otp.IsDelta())
22
26
23
27
original := &dummyObject{}
24
28
delta := &dummyObject{}
25
29
deltaToPack := newDeltaObjectToPack(otp, original, delta)
26
-
c.Assert(obj, Equals, deltaToPack.Object)
27
-
c.Assert(original, Equals, deltaToPack.Original)
28
-
c.Assert(otp, Equals, deltaToPack.Base)
29
-
c.Assert(deltaToPack.IsDelta(), Equals, true)
30
+
s.Equal(deltaToPack.Object, obj)
31
+
s.Equal(deltaToPack.Original, original)
32
+
s.Equal(deltaToPack.Base, otp)
33
+
s.True(deltaToPack.IsDelta())
30
34
}
31
35
32
36
type dummyObject struct{}
-2
plumbing/memory.go
-2
plumbing/memory.go
···
3
3
import (
4
4
"bytes"
5
5
"io"
6
-
"slices"
7
6
)
8
7
9
8
// MemoryObject on memory Object implementation
···
38
37
// SetSize set the object size, a content of the given size should be written
39
38
// afterwards
40
39
func (o *MemoryObject) SetSize(s int64) {
41
-
o.cont = slices.Grow(o.cont, int(s))
42
40
o.sz = s
43
41
}
44
42