Explorar o código

Add AuthorMissing tests

Signed-off-by: Vadim Markovtsev <vadim@sourced.tech>
Vadim Markovtsev %!s(int64=6) %!d(string=hai) anos
pai
achega
1aa99dcb7a
Modificáronse 2 ficheiros con 185 adicións e 3 borrados
  1. 86 3
      leaves/burndown_test.go
  2. 99 0
      leaves/couples_test.go

+ 86 - 3
leaves/burndown_test.go

@@ -342,7 +342,7 @@ func TestBurndownConsumeFinalize(t *testing.T) {
 	}
 }
 
-func TestBurndownSerialize(t *testing.T) {
+func bakeBurndownForSerialization(t *testing.T, firstAuthor, secondAuthor int) BurndownResult {
 	burndown := BurndownAnalysis{
 		Granularity:  30,
 		Sampling:     30,
@@ -352,7 +352,7 @@ func TestBurndownSerialize(t *testing.T) {
 	burndown.Initialize(test.Repository)
 	deps := map[string]interface{}{}
 	// stage 1
-	deps[identity.DependencyAuthor] = 0
+	deps[identity.DependencyAuthor] = firstAuthor
 	deps[items.DependencyDay] = 0
 	cache := map[plumbing.Hash]*items.CachedBlob{}
 	AddHash(t, cache, "291286b4ac41952cbd1389fda66420ec03c1a9fe")
@@ -413,7 +413,7 @@ func TestBurndownSerialize(t *testing.T) {
 
 	// stage 2
 	// 2b1ed978194a94edeabbca6de7ff3b5771d4d665
-	deps[identity.DependencyAuthor] = 1
+	deps[identity.DependencyAuthor] = secondAuthor
 	deps[items.DependencyDay] = 30
 	cache = map[plumbing.Hash]*items.CachedBlob{}
 	AddHash(t, cache, "291286b4ac41952cbd1389fda66420ec03c1a9fe")
@@ -481,6 +481,12 @@ func TestBurndownSerialize(t *testing.T) {
 	burndown.reversedPeopleDict = people[:]
 	burndown.Consume(deps)
 	out := burndown.Finalize().(BurndownResult)
+	return out
+}
+
+func TestBurndownSerialize(t *testing.T) {
+	out := bakeBurndownForSerialization(t, 0, 1)
+	burndown := &BurndownAnalysis{}
 
 	buffer := &bytes.Buffer{}
 	burndown.Serialize(out, false, buffer)
@@ -557,6 +563,83 @@ func TestBurndownSerialize(t *testing.T) {
 	assert.Equal(t, msg.PeopleInteraction.Indptr, indptr[:])
 }
 
+func TestBurndownSerializeAuthorMissing(t *testing.T) {
+	out := bakeBurndownForSerialization(t, 0, identity.AuthorMissing)
+	burndown := &BurndownAnalysis{}
+
+	buffer := &bytes.Buffer{}
+	burndown.Serialize(out, false, buffer)
+	assert.Equal(t, buffer.String(), `  granularity: 30
+  sampling: 30
+  "project": |-
+    1145    0
+     464  369
+  files:
+    "burndown.go": |-
+      926   0
+      293 250
+    "cmd/hercules/main.go": |-
+      207   0
+      171 119
+  people_sequence:
+    - "one@srcd"
+    - "two@srcd"
+  people:
+    "one@srcd": |-
+      1145    0
+       464    0
+    "two@srcd": |-
+      0 0
+      0 0
+  people_interaction: |-
+    1145 -681    0    0
+       0    0    0    0
+`)
+	buffer = &bytes.Buffer{}
+	burndown.Serialize(out, true, buffer)
+	msg := pb.BurndownAnalysisResults{}
+	proto.Unmarshal(buffer.Bytes(), &msg)
+	assert.Equal(t, msg.Granularity, int32(30))
+	assert.Equal(t, msg.Sampling, int32(30))
+	assert.Equal(t, msg.Project.Name, "project")
+	assert.Equal(t, msg.Project.NumberOfRows, int32(2))
+	assert.Equal(t, msg.Project.NumberOfColumns, int32(2))
+	assert.Len(t, msg.Project.Rows, 2)
+	assert.Len(t, msg.Project.Rows[0].Columns, 1)
+	assert.Equal(t, msg.Project.Rows[0].Columns[0], uint32(1145))
+	assert.Len(t, msg.Project.Rows[1].Columns, 2)
+	assert.Equal(t, msg.Project.Rows[1].Columns[0], uint32(464))
+	assert.Equal(t, msg.Project.Rows[1].Columns[1], uint32(369))
+	assert.Len(t, msg.Files, 2)
+	assert.Equal(t, msg.Files[0].Name, "burndown.go")
+	assert.Equal(t, msg.Files[1].Name, "cmd/hercules/main.go")
+	assert.Len(t, msg.Files[0].Rows, 2)
+	assert.Len(t, msg.Files[0].Rows[0].Columns, 1)
+	assert.Equal(t, msg.Files[0].Rows[0].Columns[0], uint32(926))
+	assert.Len(t, msg.Files[0].Rows[1].Columns, 2)
+	assert.Equal(t, msg.Files[0].Rows[1].Columns[0], uint32(293))
+	assert.Equal(t, msg.Files[0].Rows[1].Columns[1], uint32(250))
+	assert.Len(t, msg.People, 2)
+	assert.Equal(t, msg.People[0].Name, "one@srcd")
+	assert.Equal(t, msg.People[1].Name, "two@srcd")
+	assert.Len(t, msg.People[0].Rows, 2)
+	assert.Len(t, msg.People[0].Rows[0].Columns, 1)
+	assert.Len(t, msg.People[0].Rows[1].Columns, 1)
+	assert.Equal(t, msg.People[0].Rows[0].Columns[0], uint32(1145))
+	assert.Equal(t, msg.People[0].Rows[1].Columns[0], uint32(464))
+	assert.Len(t, msg.People[1].Rows, 2)
+	assert.Len(t, msg.People[1].Rows[0].Columns, 0)
+	assert.Len(t, msg.People[1].Rows[1].Columns, 0)
+	assert.Equal(t, msg.PeopleInteraction.NumberOfRows, int32(2))
+	assert.Equal(t, msg.PeopleInteraction.NumberOfColumns, int32(4))
+	data := [...]int64{1145, -681}
+	assert.Equal(t, msg.PeopleInteraction.Data, data[:])
+	indices := [...]int32{0, 1}
+	assert.Equal(t, msg.PeopleInteraction.Indices, indices[:])
+	indptr := [...]int64{0, 2, 2}
+	assert.Equal(t, msg.PeopleInteraction.Indptr, indptr[:])
+}
+
 type panickingCloser struct {
 }
 

+ 99 - 0
leaves/couples_test.go

@@ -189,6 +189,105 @@ func TestCouplesConsumeFinalize(t *testing.T) {
 	assert.Equal(t, cr.FilesMatrix[2][2], int64(3))
 }
 
+func TestCouplesConsumeFinalizeAuthorMissing(t *testing.T) {
+	c := fixtureCouples()
+	deps := map[string]interface{}{}
+	deps[identity.DependencyAuthor] = 0
+	deps[core.DependencyCommit], _ = test.Repository.CommitObject(gitplumbing.NewHash(
+		"a3ee37f91f0d705ec9c41ae88426f0ae44b2fbc3"))
+	deps[core.DependencyIsMerge] = false
+	deps[plumbing.DependencyTreeChanges] = generateChanges("+LICENSE2", "+file2.go", "+rbtree2.go")
+	c.Consume(deps)
+	deps[plumbing.DependencyTreeChanges] = generateChanges("+README.md", "-LICENSE2", "=analyser.go", ">file2.go>file_test.go")
+	c.Consume(deps)
+	deps[identity.DependencyAuthor] = 1
+	deps[plumbing.DependencyTreeChanges] = generateChanges("=README.md", "=analyser.go", "-rbtree2.go")
+	c.Consume(deps)
+	deps[identity.DependencyAuthor] = identity.AuthorMissing
+	deps[plumbing.DependencyTreeChanges] = generateChanges("=file_test.go")
+	c.Consume(deps)
+	assert.Equal(t, len(c.people[0]), 6)
+	assert.Equal(t, c.people[0]["README.md"], 1)
+	assert.Equal(t, c.people[0]["LICENSE2"], 2)
+	assert.Equal(t, c.people[0]["analyser.go"], 1)
+	assert.Equal(t, c.people[0]["file2.go"], 1)
+	assert.Equal(t, c.people[0]["file_test.go"], 1)
+	assert.Equal(t, c.people[0]["rbtree2.go"], 1)
+	assert.Equal(t, len(c.people[1]), 3)
+	assert.Equal(t, c.people[1]["README.md"], 1)
+	assert.Equal(t, c.people[1]["analyser.go"], 1)
+	assert.Equal(t, c.people[1]["rbtree2.go"], 1)
+	assert.Equal(t, len(c.people[2]), 0)
+	assert.Equal(t, len(c.files["README.md"]), 3)
+	assert.Equal(t, c.files["README.md"], map[string]int{
+		"README.md":    2,
+		"analyser.go":  2,
+		"file_test.go": 1,
+	})
+	assert.Equal(t, c.files["LICENSE2"], map[string]int{
+		"LICENSE2":   1,
+		"file2.go":   1,
+		"rbtree2.go": 1,
+	})
+	assert.Equal(t, c.files["file2.go"], map[string]int{
+		"LICENSE2":   1,
+		"file2.go":   1,
+		"rbtree2.go": 1,
+	})
+	assert.Equal(t, c.files["rbtree2.go"], map[string]int{
+		"LICENSE2":   1,
+		"file2.go":   1,
+		"rbtree2.go": 1,
+	})
+	assert.Equal(t, c.files["analyser.go"], map[string]int{
+		"analyser.go":  2,
+		"README.md":    2,
+		"file_test.go": 1,
+	})
+	assert.Equal(t, c.files["file_test.go"], map[string]int{
+		"file_test.go": 2,
+		"README.md":    1,
+		"analyser.go":  1,
+	})
+	assert.Equal(t, c.peopleCommits[0], 2)
+	assert.Equal(t, c.peopleCommits[1], 1)
+	assert.Equal(t, c.peopleCommits[2], 0)
+	cr := c.Finalize().(CouplesResult)
+	assert.Equal(t, len(cr.Files), 3)
+	assert.Equal(t, cr.Files[0], "README.md")
+	assert.Equal(t, cr.Files[1], "analyser.go")
+	assert.Equal(t, cr.Files[2], "file_test.go")
+	assert.Equal(t, len(cr.PeopleFiles[0]), 3)
+	assert.Equal(t, cr.PeopleFiles[0][0], 0)
+	assert.Equal(t, cr.PeopleFiles[0][1], 1)
+	assert.Equal(t, cr.PeopleFiles[0][2], 2)
+	assert.Equal(t, len(cr.PeopleFiles[1]), 2)
+	assert.Equal(t, cr.PeopleFiles[1][0], 0)
+	assert.Equal(t, cr.PeopleFiles[1][1], 1)
+	assert.Equal(t, len(cr.PeopleFiles[2]), 0)
+	assert.Equal(t, len(cr.PeopleMatrix[0]), 3)
+	assert.Equal(t, cr.PeopleMatrix[0][0], int64(7))
+	assert.Equal(t, cr.PeopleMatrix[0][1], int64(3))
+	assert.Equal(t, cr.PeopleMatrix[0][2], int64(0))
+	assert.Equal(t, len(cr.PeopleMatrix[1]), 2)
+	assert.Equal(t, cr.PeopleMatrix[1][0], int64(3))
+	assert.Equal(t, cr.PeopleMatrix[1][1], int64(3))
+	assert.Equal(t, len(cr.PeopleMatrix[2]), 0)
+	assert.Equal(t, len(cr.FilesMatrix), 3)
+	assert.Equal(t, len(cr.FilesMatrix[0]), 3)
+	assert.Equal(t, cr.FilesMatrix[0][2], int64(1))
+	assert.Equal(t, cr.FilesMatrix[0][0], int64(2))
+	assert.Equal(t, cr.FilesMatrix[0][1], int64(2))
+	assert.Equal(t, len(cr.FilesMatrix[1]), 3)
+	assert.Equal(t, cr.FilesMatrix[1][2], int64(1))
+	assert.Equal(t, cr.FilesMatrix[1][0], int64(2))
+	assert.Equal(t, cr.FilesMatrix[1][1], int64(2))
+	assert.Equal(t, len(cr.FilesMatrix[2]), 3)
+	assert.Equal(t, cr.FilesMatrix[2][0], int64(1))
+	assert.Equal(t, cr.FilesMatrix[2][1], int64(1))
+	assert.Equal(t, cr.FilesMatrix[2][2], int64(3))
+}
+
 func TestCouplesFork(t *testing.T) {
 	couples1 := fixtureCouples()
 	clones := couples1.Fork(1)