burndown_test.go 33 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090
  1. package leaves
  2. import (
  3. "bytes"
  4. "io"
  5. "io/ioutil"
  6. "path"
  7. "testing"
  8. "gopkg.in/src-d/hercules.v5/internal/core"
  9. "gopkg.in/src-d/hercules.v5/internal/test/fixtures"
  10. "github.com/gogo/protobuf/proto"
  11. "github.com/stretchr/testify/assert"
  12. "gopkg.in/src-d/go-git.v4/plumbing"
  13. "gopkg.in/src-d/go-git.v4/plumbing/object"
  14. "gopkg.in/src-d/hercules.v5/internal/pb"
  15. items "gopkg.in/src-d/hercules.v5/internal/plumbing"
  16. "gopkg.in/src-d/hercules.v5/internal/plumbing/identity"
  17. "gopkg.in/src-d/hercules.v5/internal/test"
  18. )
  19. func AddHash(t *testing.T, cache map[plumbing.Hash]*items.CachedBlob, hash string) {
  20. objhash := plumbing.NewHash(hash)
  21. blob, err := test.Repository.BlobObject(objhash)
  22. assert.Nil(t, err)
  23. cb := &items.CachedBlob{Blob: *blob}
  24. err = cb.Cache()
  25. assert.Nil(t, err)
  26. cache[objhash] = cb
  27. }
  28. func TestBurndownMeta(t *testing.T) {
  29. burndown := BurndownAnalysis{}
  30. assert.Equal(t, burndown.Name(), "Burndown")
  31. assert.Len(t, burndown.Provides(), 0)
  32. required := [...]string{
  33. items.DependencyFileDiff, items.DependencyTreeChanges, items.DependencyBlobCache,
  34. items.DependencyDay, identity.DependencyAuthor}
  35. for _, name := range required {
  36. assert.Contains(t, burndown.Requires(), name)
  37. }
  38. opts := burndown.ListConfigurationOptions()
  39. matches := 0
  40. for _, opt := range opts {
  41. switch opt.Name {
  42. case ConfigBurndownGranularity, ConfigBurndownSampling, ConfigBurndownTrackFiles,
  43. ConfigBurndownTrackPeople, ConfigBurndownDebug:
  44. matches++
  45. }
  46. }
  47. assert.Len(t, opts, matches)
  48. assert.Equal(t, burndown.Flag(), "burndown")
  49. }
  50. func TestBurndownConfigure(t *testing.T) {
  51. burndown := BurndownAnalysis{}
  52. facts := map[string]interface{}{}
  53. facts[ConfigBurndownGranularity] = 100
  54. facts[ConfigBurndownSampling] = 200
  55. facts[ConfigBurndownTrackFiles] = true
  56. facts[ConfigBurndownTrackPeople] = true
  57. facts[ConfigBurndownDebug] = true
  58. facts[identity.FactIdentityDetectorPeopleCount] = 5
  59. facts[identity.FactIdentityDetectorReversedPeopleDict] = burndown.Requires()
  60. burndown.Configure(facts)
  61. assert.Equal(t, burndown.Granularity, 100)
  62. assert.Equal(t, burndown.Sampling, 200)
  63. assert.Equal(t, burndown.TrackFiles, true)
  64. assert.Equal(t, burndown.PeopleNumber, 5)
  65. assert.Equal(t, burndown.Debug, true)
  66. assert.Equal(t, burndown.reversedPeopleDict, burndown.Requires())
  67. facts[ConfigBurndownTrackPeople] = false
  68. facts[identity.FactIdentityDetectorPeopleCount] = 50
  69. burndown.Configure(facts)
  70. assert.Equal(t, burndown.PeopleNumber, 0)
  71. facts = map[string]interface{}{}
  72. burndown.Configure(facts)
  73. assert.Equal(t, burndown.Granularity, 100)
  74. assert.Equal(t, burndown.Sampling, 200)
  75. assert.Equal(t, burndown.TrackFiles, true)
  76. assert.Equal(t, burndown.PeopleNumber, 0)
  77. assert.Equal(t, burndown.Debug, true)
  78. assert.Equal(t, burndown.reversedPeopleDict, burndown.Requires())
  79. }
  80. func TestBurndownRegistration(t *testing.T) {
  81. summoned := core.Registry.Summon((&BurndownAnalysis{}).Name())
  82. assert.Len(t, summoned, 1)
  83. assert.Equal(t, summoned[0].Name(), "Burndown")
  84. leaves := core.Registry.GetLeaves()
  85. matched := false
  86. for _, tp := range leaves {
  87. if tp.Flag() == (&BurndownAnalysis{}).Flag() {
  88. matched = true
  89. break
  90. }
  91. }
  92. assert.True(t, matched)
  93. }
  94. func TestBurndownInitialize(t *testing.T) {
  95. burndown := BurndownAnalysis{}
  96. burndown.Sampling = -10
  97. burndown.Granularity = DefaultBurndownGranularity
  98. burndown.Initialize(test.Repository)
  99. assert.Equal(t, burndown.Sampling, DefaultBurndownGranularity)
  100. assert.Equal(t, burndown.Granularity, DefaultBurndownGranularity)
  101. burndown.Sampling = 0
  102. burndown.Granularity = DefaultBurndownGranularity - 1
  103. burndown.Initialize(test.Repository)
  104. assert.Equal(t, burndown.Sampling, DefaultBurndownGranularity-1)
  105. assert.Equal(t, burndown.Granularity, DefaultBurndownGranularity-1)
  106. burndown.Sampling = DefaultBurndownGranularity - 1
  107. burndown.Granularity = -10
  108. burndown.Initialize(test.Repository)
  109. assert.Equal(t, burndown.Sampling, DefaultBurndownGranularity-1)
  110. assert.Equal(t, burndown.Granularity, DefaultBurndownGranularity)
  111. }
  112. func TestBurndownConsumeFinalize(t *testing.T) {
  113. burndown := BurndownAnalysis{
  114. Granularity: 30,
  115. Sampling: 30,
  116. PeopleNumber: 2,
  117. TrackFiles: true,
  118. }
  119. burndown.Initialize(test.Repository)
  120. deps := map[string]interface{}{}
  121. // stage 1
  122. deps[identity.DependencyAuthor] = 0
  123. deps[items.DependencyDay] = 0
  124. cache := map[plumbing.Hash]*items.CachedBlob{}
  125. AddHash(t, cache, "291286b4ac41952cbd1389fda66420ec03c1a9fe")
  126. AddHash(t, cache, "c29112dbd697ad9b401333b80c18a63951bc18d9")
  127. AddHash(t, cache, "baa64828831d174f40140e4b3cfa77d1e917a2c1")
  128. AddHash(t, cache, "dc248ba2b22048cc730c571a748e8ffcf7085ab9")
  129. deps[items.DependencyBlobCache] = cache
  130. changes := make(object.Changes, 3)
  131. treeFrom, _ := test.Repository.TreeObject(plumbing.NewHash(
  132. "a1eb2ea76eb7f9bfbde9b243861474421000eb96"))
  133. treeTo, _ := test.Repository.TreeObject(plumbing.NewHash(
  134. "994eac1cd07235bb9815e547a75c84265dea00f5"))
  135. changes[0] = &object.Change{From: object.ChangeEntry{
  136. Name: "analyser.go",
  137. Tree: treeFrom,
  138. TreeEntry: object.TreeEntry{
  139. Name: "analyser.go",
  140. Mode: 0100644,
  141. Hash: plumbing.NewHash("dc248ba2b22048cc730c571a748e8ffcf7085ab9"),
  142. },
  143. }, To: object.ChangeEntry{
  144. Name: "analyser.go",
  145. Tree: treeTo,
  146. TreeEntry: object.TreeEntry{
  147. Name: "analyser.go",
  148. Mode: 0100644,
  149. Hash: plumbing.NewHash("baa64828831d174f40140e4b3cfa77d1e917a2c1"),
  150. },
  151. }}
  152. changes[1] = &object.Change{From: object.ChangeEntry{}, To: object.ChangeEntry{
  153. Name: "cmd/hercules/main.go",
  154. Tree: treeTo,
  155. TreeEntry: object.TreeEntry{
  156. Name: "cmd/hercules/main.go",
  157. Mode: 0100644,
  158. Hash: plumbing.NewHash("c29112dbd697ad9b401333b80c18a63951bc18d9"),
  159. },
  160. },
  161. }
  162. changes[2] = &object.Change{From: object.ChangeEntry{}, To: object.ChangeEntry{
  163. Name: ".travis.yml",
  164. Tree: treeTo,
  165. TreeEntry: object.TreeEntry{
  166. Name: ".travis.yml",
  167. Mode: 0100644,
  168. Hash: plumbing.NewHash("291286b4ac41952cbd1389fda66420ec03c1a9fe"),
  169. },
  170. },
  171. }
  172. deps[items.DependencyTreeChanges] = changes
  173. fd := fixtures.FileDiff()
  174. result, err := fd.Consume(deps)
  175. assert.Nil(t, err)
  176. deps[items.DependencyFileDiff] = result[items.DependencyFileDiff]
  177. deps[core.DependencyCommit], _ = test.Repository.CommitObject(plumbing.NewHash(
  178. "cce947b98a050c6d356bc6ba95030254914027b1"))
  179. deps[core.DependencyIsMerge] = false
  180. result, err = burndown.Consume(deps)
  181. assert.Nil(t, result)
  182. assert.Nil(t, err)
  183. assert.Equal(t, burndown.previousDay, 0)
  184. assert.Len(t, burndown.files, 3)
  185. assert.Equal(t, burndown.files["cmd/hercules/main.go"].Len(), 207)
  186. assert.Equal(t, burndown.files["analyser.go"].Len(), 926)
  187. assert.Equal(t, burndown.files[".travis.yml"].Len(), 12)
  188. assert.Len(t, burndown.peopleHistories, 2)
  189. assert.Equal(t, burndown.peopleHistories[0][0][0], int64(12+207+926))
  190. assert.Len(t, burndown.globalHistory, 1)
  191. assert.Equal(t, burndown.globalHistory[0][0], int64(12+207+926))
  192. assert.Len(t, burndown.fileHistories, 3)
  193. burndown2 := BurndownAnalysis{
  194. Granularity: 30,
  195. Sampling: 0,
  196. }
  197. burndown2.Initialize(test.Repository)
  198. _, err = burndown2.Consume(deps)
  199. assert.Nil(t, err)
  200. assert.Len(t, burndown2.peopleHistories, 0)
  201. assert.Len(t, burndown2.fileHistories, 0)
  202. // check merge hashes
  203. burndown3 := BurndownAnalysis{}
  204. burndown3.Initialize(test.Repository)
  205. deps[identity.DependencyAuthor] = 1
  206. deps[core.DependencyIsMerge] = true
  207. _, err = burndown3.Consume(deps)
  208. assert.Nil(t, err)
  209. assert.Equal(t, 1, burndown3.mergedAuthor)
  210. assert.True(t, burndown3.mergedFiles["cmd/hercules/main.go"])
  211. assert.True(t, burndown3.mergedFiles["analyser.go"], plumbing.ZeroHash)
  212. assert.True(t, burndown3.mergedFiles[".travis.yml"], plumbing.ZeroHash)
  213. // stage 2
  214. // 2b1ed978194a94edeabbca6de7ff3b5771d4d665
  215. deps[core.DependencyIsMerge] = false
  216. deps[items.DependencyDay] = 30
  217. cache = map[plumbing.Hash]*items.CachedBlob{}
  218. AddHash(t, cache, "291286b4ac41952cbd1389fda66420ec03c1a9fe")
  219. AddHash(t, cache, "baa64828831d174f40140e4b3cfa77d1e917a2c1")
  220. AddHash(t, cache, "29c9fafd6a2fae8cd20298c3f60115bc31a4c0f2")
  221. AddHash(t, cache, "c29112dbd697ad9b401333b80c18a63951bc18d9")
  222. AddHash(t, cache, "f7d918ec500e2f925ecde79b51cc007bac27de72")
  223. deps[items.DependencyBlobCache] = cache
  224. changes = make(object.Changes, 3)
  225. treeFrom, _ = test.Repository.TreeObject(plumbing.NewHash(
  226. "96c6ece9b2f3c7c51b83516400d278dea5605100"))
  227. treeTo, _ = test.Repository.TreeObject(plumbing.NewHash(
  228. "251f2094d7b523d5bcc60e663b6cf38151bf8844"))
  229. changes[0] = &object.Change{From: object.ChangeEntry{
  230. Name: "analyser.go",
  231. Tree: treeFrom,
  232. TreeEntry: object.TreeEntry{
  233. Name: "analyser.go",
  234. Mode: 0100644,
  235. Hash: plumbing.NewHash("baa64828831d174f40140e4b3cfa77d1e917a2c1"),
  236. },
  237. }, To: object.ChangeEntry{
  238. Name: "burndown.go",
  239. Tree: treeTo,
  240. TreeEntry: object.TreeEntry{
  241. Name: "burndown.go",
  242. Mode: 0100644,
  243. Hash: plumbing.NewHash("29c9fafd6a2fae8cd20298c3f60115bc31a4c0f2"),
  244. },
  245. },
  246. }
  247. changes[1] = &object.Change{From: object.ChangeEntry{
  248. Name: "cmd/hercules/main.go",
  249. Tree: treeFrom,
  250. TreeEntry: object.TreeEntry{
  251. Name: "cmd/hercules/main.go",
  252. Mode: 0100644,
  253. Hash: plumbing.NewHash("c29112dbd697ad9b401333b80c18a63951bc18d9"),
  254. },
  255. }, To: object.ChangeEntry{
  256. Name: "cmd/hercules/main.go",
  257. Tree: treeTo,
  258. TreeEntry: object.TreeEntry{
  259. Name: "cmd/hercules/main.go",
  260. Mode: 0100644,
  261. Hash: plumbing.NewHash("f7d918ec500e2f925ecde79b51cc007bac27de72"),
  262. },
  263. },
  264. }
  265. changes[2] = &object.Change{From: object.ChangeEntry{
  266. Name: ".travis.yml",
  267. Tree: treeTo,
  268. TreeEntry: object.TreeEntry{
  269. Name: ".travis.yml",
  270. Mode: 0100644,
  271. Hash: plumbing.NewHash("291286b4ac41952cbd1389fda66420ec03c1a9fe"),
  272. },
  273. }, To: object.ChangeEntry{},
  274. }
  275. deps[items.DependencyTreeChanges] = changes
  276. fd = fixtures.FileDiff()
  277. result, err = fd.Consume(deps)
  278. assert.Nil(t, err)
  279. deps[items.DependencyFileDiff] = result[items.DependencyFileDiff]
  280. result, err = burndown.Consume(deps)
  281. assert.Nil(t, result)
  282. assert.Nil(t, err)
  283. assert.Equal(t, burndown.previousDay, 30)
  284. assert.Len(t, burndown.files, 2)
  285. assert.Equal(t, burndown.files["cmd/hercules/main.go"].Len(), 290)
  286. assert.Equal(t, burndown.files["burndown.go"].Len(), 543)
  287. assert.Len(t, burndown.peopleHistories, 2)
  288. assert.Len(t, burndown.globalHistory, 2)
  289. assert.Equal(t, burndown.globalHistory[0][0], int64(1145))
  290. assert.Equal(t, burndown.globalHistory[30][0], int64(-681))
  291. assert.Equal(t, burndown.globalHistory[30][30], int64(369))
  292. assert.Len(t, burndown.fileHistories, 2)
  293. out := burndown.Finalize().(BurndownResult)
  294. /*
  295. GlobalHistory [][]int64
  296. FileHistories map[string][][]int64
  297. PeopleHistories [][][]int64
  298. PeopleMatrix [][]int64
  299. */
  300. assert.Len(t, out.GlobalHistory, 2)
  301. for i := 0; i < 2; i++ {
  302. assert.Len(t, out.GlobalHistory[i], 2)
  303. }
  304. assert.Len(t, out.GlobalHistory, 2)
  305. assert.Equal(t, out.GlobalHistory[0][0], int64(1145))
  306. assert.Equal(t, out.GlobalHistory[0][1], int64(0))
  307. assert.Equal(t, out.GlobalHistory[1][0], int64(464))
  308. assert.Equal(t, out.GlobalHistory[1][1], int64(369))
  309. assert.Len(t, out.FileHistories, 2)
  310. assert.Len(t, out.FileHistories["cmd/hercules/main.go"], 2)
  311. assert.Len(t, out.FileHistories["burndown.go"], 2)
  312. assert.Len(t, out.FileHistories["cmd/hercules/main.go"][0], 2)
  313. assert.Len(t, out.FileHistories["burndown.go"][0], 2)
  314. assert.Len(t, out.PeopleMatrix, 2)
  315. assert.Len(t, out.PeopleMatrix[0], 4)
  316. assert.Len(t, out.PeopleMatrix[1], 4)
  317. assert.Equal(t, out.PeopleMatrix[0][0], int64(1145))
  318. assert.Equal(t, out.PeopleMatrix[0][1], int64(0))
  319. assert.Equal(t, out.PeopleMatrix[0][2], int64(0))
  320. assert.Equal(t, out.PeopleMatrix[0][3], int64(-681))
  321. assert.Equal(t, out.PeopleMatrix[1][0], int64(369))
  322. assert.Equal(t, out.PeopleMatrix[1][1], int64(0))
  323. assert.Equal(t, out.PeopleMatrix[1][2], int64(0))
  324. assert.Equal(t, out.PeopleMatrix[1][3], int64(0))
  325. assert.Len(t, out.PeopleHistories, 2)
  326. for i := 0; i < 2; i++ {
  327. assert.Len(t, out.PeopleHistories[i], 2)
  328. assert.Len(t, out.PeopleHistories[i][0], 2)
  329. assert.Len(t, out.PeopleHistories[i][1], 2)
  330. }
  331. }
  332. func TestBurndownSerialize(t *testing.T) {
  333. burndown := BurndownAnalysis{
  334. Granularity: 30,
  335. Sampling: 30,
  336. PeopleNumber: 2,
  337. TrackFiles: true,
  338. }
  339. burndown.Initialize(test.Repository)
  340. deps := map[string]interface{}{}
  341. // stage 1
  342. deps[identity.DependencyAuthor] = 0
  343. deps[items.DependencyDay] = 0
  344. cache := map[plumbing.Hash]*items.CachedBlob{}
  345. AddHash(t, cache, "291286b4ac41952cbd1389fda66420ec03c1a9fe")
  346. AddHash(t, cache, "c29112dbd697ad9b401333b80c18a63951bc18d9")
  347. AddHash(t, cache, "baa64828831d174f40140e4b3cfa77d1e917a2c1")
  348. AddHash(t, cache, "dc248ba2b22048cc730c571a748e8ffcf7085ab9")
  349. deps[items.DependencyBlobCache] = cache
  350. changes := make(object.Changes, 3)
  351. treeFrom, _ := test.Repository.TreeObject(plumbing.NewHash(
  352. "a1eb2ea76eb7f9bfbde9b243861474421000eb96"))
  353. treeTo, _ := test.Repository.TreeObject(plumbing.NewHash(
  354. "994eac1cd07235bb9815e547a75c84265dea00f5"))
  355. changes[0] = &object.Change{From: object.ChangeEntry{
  356. Name: "analyser.go",
  357. Tree: treeFrom,
  358. TreeEntry: object.TreeEntry{
  359. Name: "analyser.go",
  360. Mode: 0100644,
  361. Hash: plumbing.NewHash("dc248ba2b22048cc730c571a748e8ffcf7085ab9"),
  362. },
  363. }, To: object.ChangeEntry{
  364. Name: "analyser.go",
  365. Tree: treeTo,
  366. TreeEntry: object.TreeEntry{
  367. Name: "analyser.go",
  368. Mode: 0100644,
  369. Hash: plumbing.NewHash("baa64828831d174f40140e4b3cfa77d1e917a2c1"),
  370. },
  371. }}
  372. changes[1] = &object.Change{From: object.ChangeEntry{}, To: object.ChangeEntry{
  373. Name: "cmd/hercules/main.go",
  374. Tree: treeTo,
  375. TreeEntry: object.TreeEntry{
  376. Name: "cmd/hercules/main.go",
  377. Mode: 0100644,
  378. Hash: plumbing.NewHash("c29112dbd697ad9b401333b80c18a63951bc18d9"),
  379. },
  380. },
  381. }
  382. changes[2] = &object.Change{From: object.ChangeEntry{}, To: object.ChangeEntry{
  383. Name: ".travis.yml",
  384. Tree: treeTo,
  385. TreeEntry: object.TreeEntry{
  386. Name: ".travis.yml",
  387. Mode: 0100644,
  388. Hash: plumbing.NewHash("291286b4ac41952cbd1389fda66420ec03c1a9fe"),
  389. },
  390. },
  391. }
  392. deps[items.DependencyTreeChanges] = changes
  393. deps[core.DependencyCommit], _ = test.Repository.CommitObject(plumbing.NewHash(
  394. "cce947b98a050c6d356bc6ba95030254914027b1"))
  395. deps[core.DependencyIsMerge] = false
  396. fd := fixtures.FileDiff()
  397. result, _ := fd.Consume(deps)
  398. deps[items.DependencyFileDiff] = result[items.DependencyFileDiff]
  399. burndown.Consume(deps)
  400. // stage 2
  401. // 2b1ed978194a94edeabbca6de7ff3b5771d4d665
  402. deps[identity.DependencyAuthor] = 1
  403. deps[items.DependencyDay] = 30
  404. cache = map[plumbing.Hash]*items.CachedBlob{}
  405. AddHash(t, cache, "291286b4ac41952cbd1389fda66420ec03c1a9fe")
  406. AddHash(t, cache, "baa64828831d174f40140e4b3cfa77d1e917a2c1")
  407. AddHash(t, cache, "29c9fafd6a2fae8cd20298c3f60115bc31a4c0f2")
  408. AddHash(t, cache, "c29112dbd697ad9b401333b80c18a63951bc18d9")
  409. AddHash(t, cache, "f7d918ec500e2f925ecde79b51cc007bac27de72")
  410. deps[items.DependencyBlobCache] = cache
  411. changes = make(object.Changes, 3)
  412. treeFrom, _ = test.Repository.TreeObject(plumbing.NewHash(
  413. "96c6ece9b2f3c7c51b83516400d278dea5605100"))
  414. treeTo, _ = test.Repository.TreeObject(plumbing.NewHash(
  415. "251f2094d7b523d5bcc60e663b6cf38151bf8844"))
  416. changes[0] = &object.Change{From: object.ChangeEntry{
  417. Name: "analyser.go",
  418. Tree: treeFrom,
  419. TreeEntry: object.TreeEntry{
  420. Name: "analyser.go",
  421. Mode: 0100644,
  422. Hash: plumbing.NewHash("baa64828831d174f40140e4b3cfa77d1e917a2c1"),
  423. },
  424. }, To: object.ChangeEntry{
  425. Name: "burndown.go",
  426. Tree: treeTo,
  427. TreeEntry: object.TreeEntry{
  428. Name: "burndown.go",
  429. Mode: 0100644,
  430. Hash: plumbing.NewHash("29c9fafd6a2fae8cd20298c3f60115bc31a4c0f2"),
  431. },
  432. },
  433. }
  434. changes[1] = &object.Change{From: object.ChangeEntry{
  435. Name: "cmd/hercules/main.go",
  436. Tree: treeFrom,
  437. TreeEntry: object.TreeEntry{
  438. Name: "cmd/hercules/main.go",
  439. Mode: 0100644,
  440. Hash: plumbing.NewHash("c29112dbd697ad9b401333b80c18a63951bc18d9"),
  441. },
  442. }, To: object.ChangeEntry{
  443. Name: "cmd/hercules/main.go",
  444. Tree: treeTo,
  445. TreeEntry: object.TreeEntry{
  446. Name: "cmd/hercules/main.go",
  447. Mode: 0100644,
  448. Hash: plumbing.NewHash("f7d918ec500e2f925ecde79b51cc007bac27de72"),
  449. },
  450. },
  451. }
  452. changes[2] = &object.Change{From: object.ChangeEntry{
  453. Name: ".travis.yml",
  454. Tree: treeTo,
  455. TreeEntry: object.TreeEntry{
  456. Name: ".travis.yml",
  457. Mode: 0100644,
  458. Hash: plumbing.NewHash("291286b4ac41952cbd1389fda66420ec03c1a9fe"),
  459. },
  460. }, To: object.ChangeEntry{},
  461. }
  462. deps[items.DependencyTreeChanges] = changes
  463. fd = fixtures.FileDiff()
  464. result, _ = fd.Consume(deps)
  465. deps[items.DependencyFileDiff] = result[items.DependencyFileDiff]
  466. people := [...]string{"one@srcd", "two@srcd"}
  467. burndown.reversedPeopleDict = people[:]
  468. burndown.Consume(deps)
  469. out := burndown.Finalize().(BurndownResult)
  470. buffer := &bytes.Buffer{}
  471. burndown.Serialize(out, false, buffer)
  472. assert.Equal(t, buffer.String(), ` granularity: 30
  473. sampling: 30
  474. "project": |-
  475. 1145 0
  476. 464 369
  477. files:
  478. "burndown.go": |-
  479. 926 0
  480. 293 250
  481. "cmd/hercules/main.go": |-
  482. 207 0
  483. 171 119
  484. people_sequence:
  485. - "one@srcd"
  486. - "two@srcd"
  487. people:
  488. "one@srcd": |-
  489. 1145 0
  490. 464 0
  491. "two@srcd": |-
  492. 0 0
  493. 0 369
  494. people_interaction: |-
  495. 1145 0 0 -681
  496. 369 0 0 0
  497. `)
  498. buffer = &bytes.Buffer{}
  499. burndown.Serialize(out, true, buffer)
  500. msg := pb.BurndownAnalysisResults{}
  501. proto.Unmarshal(buffer.Bytes(), &msg)
  502. assert.Equal(t, msg.Granularity, int32(30))
  503. assert.Equal(t, msg.Sampling, int32(30))
  504. assert.Equal(t, msg.Project.Name, "project")
  505. assert.Equal(t, msg.Project.NumberOfRows, int32(2))
  506. assert.Equal(t, msg.Project.NumberOfColumns, int32(2))
  507. assert.Len(t, msg.Project.Rows, 2)
  508. assert.Len(t, msg.Project.Rows[0].Columns, 1)
  509. assert.Equal(t, msg.Project.Rows[0].Columns[0], uint32(1145))
  510. assert.Len(t, msg.Project.Rows[1].Columns, 2)
  511. assert.Equal(t, msg.Project.Rows[1].Columns[0], uint32(464))
  512. assert.Equal(t, msg.Project.Rows[1].Columns[1], uint32(369))
  513. assert.Len(t, msg.Files, 2)
  514. assert.Equal(t, msg.Files[0].Name, "burndown.go")
  515. assert.Equal(t, msg.Files[1].Name, "cmd/hercules/main.go")
  516. assert.Len(t, msg.Files[0].Rows, 2)
  517. assert.Len(t, msg.Files[0].Rows[0].Columns, 1)
  518. assert.Equal(t, msg.Files[0].Rows[0].Columns[0], uint32(926))
  519. assert.Len(t, msg.Files[0].Rows[1].Columns, 2)
  520. assert.Equal(t, msg.Files[0].Rows[1].Columns[0], uint32(293))
  521. assert.Equal(t, msg.Files[0].Rows[1].Columns[1], uint32(250))
  522. assert.Len(t, msg.People, 2)
  523. assert.Equal(t, msg.People[0].Name, "one@srcd")
  524. assert.Equal(t, msg.People[1].Name, "two@srcd")
  525. assert.Len(t, msg.People[0].Rows, 2)
  526. assert.Len(t, msg.People[0].Rows[0].Columns, 1)
  527. assert.Len(t, msg.People[0].Rows[1].Columns, 1)
  528. assert.Equal(t, msg.People[0].Rows[0].Columns[0], uint32(1145))
  529. assert.Equal(t, msg.People[0].Rows[1].Columns[0], uint32(464))
  530. assert.Len(t, msg.People[1].Rows, 2)
  531. assert.Len(t, msg.People[1].Rows[0].Columns, 0)
  532. assert.Len(t, msg.People[1].Rows[1].Columns, 2)
  533. assert.Equal(t, msg.People[1].Rows[1].Columns[0], uint32(0))
  534. assert.Equal(t, msg.People[1].Rows[1].Columns[1], uint32(369))
  535. assert.Equal(t, msg.PeopleInteraction.NumberOfRows, int32(2))
  536. assert.Equal(t, msg.PeopleInteraction.NumberOfColumns, int32(4))
  537. data := [...]int64{1145, -681, 369}
  538. assert.Equal(t, msg.PeopleInteraction.Data, data[:])
  539. indices := [...]int32{0, 3, 0}
  540. assert.Equal(t, msg.PeopleInteraction.Indices, indices[:])
  541. indptr := [...]int64{0, 2, 3}
  542. assert.Equal(t, msg.PeopleInteraction.Indptr, indptr[:])
  543. }
  544. type panickingCloser struct {
  545. }
  546. func (c panickingCloser) Close() error {
  547. return io.EOF
  548. }
  549. func TestCheckClose(t *testing.T) {
  550. closer := panickingCloser{}
  551. assert.Panics(t, func() { checkClose(closer) })
  552. }
  553. func TestBurndownAddMatrix(t *testing.T) {
  554. size := 5*3 + 1
  555. daily := make([][]float32, size)
  556. for i := range daily {
  557. daily[i] = make([]float32, size)
  558. }
  559. added := make([][]int64, 5)
  560. for i := range added {
  561. added[i] = make([]int64, 3)
  562. switch i {
  563. case 0:
  564. added[i][0] = 10
  565. case 1:
  566. added[i][0] = 18
  567. added[i][1] = 2
  568. case 2:
  569. added[i][0] = 12
  570. added[i][1] = 14
  571. case 3:
  572. added[i][0] = 10
  573. added[i][1] = 12
  574. added[i][2] = 6
  575. case 4:
  576. added[i][0] = 8
  577. added[i][1] = 9
  578. added[i][2] = 13
  579. }
  580. }
  581. assert.Panics(t, func() {
  582. daily2 := make([][]float32, 16)
  583. for i := range daily2 {
  584. daily2[i] = make([]float32, 15)
  585. }
  586. addBurndownMatrix(added, 5, 3, daily2, 1)
  587. })
  588. assert.Panics(t, func() {
  589. daily2 := make([][]float32, 15)
  590. for i := range daily2 {
  591. daily2[i] = make([]float32, 16)
  592. }
  593. addBurndownMatrix(added, 5, 3, daily2, 1)
  594. })
  595. // yaml.PrintMatrix(os.Stdout, added, 0, "test", true)
  596. /*
  597. "test": |-
  598. 10 0 0
  599. 18 2 0
  600. 12 14 0
  601. 10 12 6
  602. 8 9 13
  603. */
  604. addBurndownMatrix(added, 5, 3, daily, 1)
  605. for i := range daily[0] {
  606. assert.Equal(t, daily[0][i], float32(0))
  607. }
  608. for i := range daily {
  609. assert.Equal(t, daily[i][0], float32(0))
  610. }
  611. /*for _, row := range daily {
  612. fmt.Println(row)
  613. }*/
  614. // check pinned points
  615. for y := 0; y < 5; y++ {
  616. for x := 0; x < 3; x++ {
  617. var sum float32
  618. for i := x * 5; i < (x+1)*5; i++ {
  619. sum += daily[(y+1)*3][i+1]
  620. }
  621. assert.InDelta(t, sum, added[y][x], 0.00001)
  622. }
  623. }
  624. // check overall trend: 0 -> const -> peak -> decay
  625. for x := 0; x < 15; x++ {
  626. for y := 0; y < x; y++ {
  627. assert.Zero(t, daily[y+1][x+1])
  628. }
  629. var prev float32
  630. for y := x; y < ((x+3)/5)*5; y++ {
  631. if prev == 0 {
  632. prev = daily[y+1][x+1]
  633. }
  634. assert.Equal(t, daily[y+1][x+1], prev)
  635. }
  636. for y := ((x + 3) / 5) * 5; y < 15; y++ {
  637. if prev == 0 {
  638. prev = daily[y+1][x+1]
  639. }
  640. assert.True(t, daily[y+1][x+1] <= prev)
  641. prev = daily[y+1][x+1]
  642. }
  643. }
  644. }
  645. func TestBurndownAddMatrixCrazy(t *testing.T) {
  646. size := 5 * 3
  647. daily := make([][]float32, size)
  648. for i := range daily {
  649. daily[i] = make([]float32, size)
  650. }
  651. added := make([][]int64, 5)
  652. for i := range added {
  653. added[i] = make([]int64, 3)
  654. switch i {
  655. case 0:
  656. added[i][0] = 10
  657. case 1:
  658. added[i][0] = 9
  659. added[i][1] = 2
  660. case 2:
  661. added[i][0] = 8
  662. added[i][1] = 16
  663. case 3:
  664. added[i][0] = 7
  665. added[i][1] = 12
  666. added[i][2] = 6
  667. case 4:
  668. added[i][0] = 6
  669. added[i][1] = 9
  670. added[i][2] = 13
  671. }
  672. }
  673. // yaml.PrintMatrix(os.Stdout, added, 0, "test", true)
  674. /*
  675. "test": |-
  676. 10 0 0
  677. 9 2 0
  678. 8 16 0
  679. 7 12 6
  680. 6 9 13
  681. */
  682. addBurndownMatrix(added, 5, 3, daily, 0)
  683. /*
  684. for _, row := range daily {
  685. for _, v := range row {
  686. fmt.Print(v, " ")
  687. }
  688. fmt.Println()
  689. }
  690. */
  691. // check pinned points
  692. for y := 0; y < 5; y++ {
  693. for x := 0; x < 3; x++ {
  694. var sum float32
  695. for i := x * 5; i < (x+1)*5; i++ {
  696. sum += daily[(y+1)*3-1][i]
  697. }
  698. assert.InDelta(t, sum, added[y][x], 0.00001)
  699. }
  700. }
  701. // check overall trend: 0 -> const -> peak -> decay
  702. for x := 0; x < 15; x++ {
  703. for y := 0; y < x; y++ {
  704. assert.Zero(t, daily[y][x])
  705. }
  706. var prev float32
  707. for y := x; y < ((x+3)/5)*5; y++ {
  708. if prev == 0 {
  709. prev = daily[y][x]
  710. }
  711. assert.Equal(t, daily[y][x], prev)
  712. }
  713. for y := ((x + 3) / 5) * 5; y < 15; y++ {
  714. if prev == 0 {
  715. prev = daily[y][x]
  716. }
  717. assert.True(t, daily[y][x] <= prev)
  718. prev = daily[y][x]
  719. }
  720. }
  721. }
  722. func TestBurndownAddMatrixNaNs(t *testing.T) {
  723. size := 4 * 4
  724. daily := make([][]float32, size)
  725. for i := range daily {
  726. daily[i] = make([]float32, size)
  727. }
  728. added := make([][]int64, 4)
  729. for i := range added {
  730. added[i] = make([]int64, 4)
  731. switch i {
  732. case 0:
  733. added[i][0] = 20
  734. case 1:
  735. added[i][0] = 18
  736. added[i][1] = 30
  737. case 2:
  738. added[i][0] = 15
  739. added[i][1] = 25
  740. added[i][2] = 28
  741. case 3:
  742. added[i][0] = 12
  743. added[i][1] = 20
  744. added[i][2] = 25
  745. added[i][3] = 40
  746. }
  747. }
  748. // yaml.PrintMatrix(os.Stdout, added, 0, "test", true)
  749. /*
  750. "test": |-
  751. 20 0 0 0
  752. 18 30 0 0
  753. 15 25 28 0
  754. 12 20 25 40
  755. */
  756. addBurndownMatrix(added, 4, 4, daily, 0)
  757. /*
  758. for _, row := range daily {
  759. for _, v := range row {
  760. fmt.Print(v, " ")
  761. }
  762. fmt.Println()
  763. }
  764. */
  765. // check pinned points
  766. for y := 0; y < 4; y++ {
  767. for x := 0; x < 4; x++ {
  768. var sum float32
  769. for i := x * 4; i < (x+1)*4; i++ {
  770. sum += daily[(y+1)*4-1][i]
  771. }
  772. assert.InDelta(t, sum, added[y][x], 0.00001)
  773. }
  774. }
  775. // check overall trend: 0 -> const -> peak -> decay
  776. for x := 0; x < 16; x++ {
  777. for y := 0; y < x; y++ {
  778. assert.Zero(t, daily[y][x])
  779. }
  780. var prev float32
  781. for y := x - 4; y < x; y++ {
  782. if y < 0 {
  783. continue
  784. }
  785. if prev == 0 {
  786. prev = daily[y][x]
  787. }
  788. assert.Equal(t, daily[y][x], prev)
  789. }
  790. for y := x; y < 16; y++ {
  791. if prev == 0 {
  792. prev = daily[y][x]
  793. }
  794. assert.True(t, daily[y][x] <= prev)
  795. prev = daily[y][x]
  796. }
  797. }
  798. }
  799. func TestBurndownMergeGlobalHistory(t *testing.T) {
  800. people1 := [...]string{"one", "two"}
  801. res1 := BurndownResult{
  802. GlobalHistory: [][]int64{},
  803. FileHistories: map[string][][]int64{},
  804. PeopleHistories: [][][]int64{},
  805. PeopleMatrix: [][]int64{},
  806. reversedPeopleDict: people1[:],
  807. sampling: 15,
  808. granularity: 20,
  809. }
  810. c1 := core.CommonAnalysisResult{
  811. BeginTime: 600566400, // 1989 Jan 12
  812. EndTime: 604713600, // 1989 March 1
  813. CommitsNumber: 10,
  814. RunTime: 100000,
  815. }
  816. // 48 days
  817. res1.GlobalHistory = make([][]int64, 48/15+1 /* 4 samples */)
  818. for i := range res1.GlobalHistory {
  819. res1.GlobalHistory[i] = make([]int64, 48/20+1 /* 3 bands */)
  820. switch i {
  821. case 0:
  822. res1.GlobalHistory[i][0] = 1000
  823. case 1:
  824. res1.GlobalHistory[i][0] = 1100
  825. res1.GlobalHistory[i][1] = 400
  826. case 2:
  827. res1.GlobalHistory[i][0] = 900
  828. res1.GlobalHistory[i][1] = 750
  829. res1.GlobalHistory[i][2] = 100
  830. case 3:
  831. res1.GlobalHistory[i][0] = 850
  832. res1.GlobalHistory[i][1] = 700
  833. res1.GlobalHistory[i][2] = 150
  834. }
  835. }
  836. res1.FileHistories["file1"] = res1.GlobalHistory
  837. res1.FileHistories["file2"] = res1.GlobalHistory
  838. res1.PeopleHistories = append(res1.PeopleHistories, res1.GlobalHistory)
  839. res1.PeopleHistories = append(res1.PeopleHistories, res1.GlobalHistory)
  840. res1.PeopleMatrix = append(res1.PeopleMatrix, make([]int64, 4))
  841. res1.PeopleMatrix = append(res1.PeopleMatrix, make([]int64, 4))
  842. res1.PeopleMatrix[0][0] = 10
  843. res1.PeopleMatrix[0][1] = 20
  844. res1.PeopleMatrix[0][2] = 30
  845. res1.PeopleMatrix[0][3] = 40
  846. res1.PeopleMatrix[1][0] = 50
  847. res1.PeopleMatrix[1][1] = 60
  848. res1.PeopleMatrix[1][2] = 70
  849. res1.PeopleMatrix[1][3] = 80
  850. people2 := [...]string{"two", "three"}
  851. res2 := BurndownResult{
  852. GlobalHistory: [][]int64{},
  853. FileHistories: map[string][][]int64{},
  854. PeopleHistories: [][][]int64{},
  855. PeopleMatrix: [][]int64{},
  856. reversedPeopleDict: people2[:],
  857. sampling: 14,
  858. granularity: 19,
  859. }
  860. c2 := core.CommonAnalysisResult{
  861. BeginTime: 601084800, // 1989 Jan 18
  862. EndTime: 605923200, // 1989 March 15
  863. CommitsNumber: 10,
  864. RunTime: 100000,
  865. }
  866. // 56 days
  867. res2.GlobalHistory = make([][]int64, 56/14 /* 4 samples */)
  868. for i := range res2.GlobalHistory {
  869. res2.GlobalHistory[i] = make([]int64, 56/19+1 /* 3 bands */)
  870. switch i {
  871. case 0:
  872. res2.GlobalHistory[i][0] = 900
  873. case 1:
  874. res2.GlobalHistory[i][0] = 1100
  875. res2.GlobalHistory[i][1] = 400
  876. case 2:
  877. res2.GlobalHistory[i][0] = 900
  878. res2.GlobalHistory[i][1] = 750
  879. res2.GlobalHistory[i][2] = 100
  880. case 3:
  881. res2.GlobalHistory[i][0] = 800
  882. res2.GlobalHistory[i][1] = 600
  883. res2.GlobalHistory[i][2] = 600
  884. }
  885. }
  886. res2.FileHistories["file2"] = res2.GlobalHistory
  887. res2.FileHistories["file3"] = res2.GlobalHistory
  888. res2.PeopleHistories = append(res2.PeopleHistories, res2.GlobalHistory)
  889. res2.PeopleHistories = append(res2.PeopleHistories, res2.GlobalHistory)
  890. res2.PeopleMatrix = append(res2.PeopleMatrix, make([]int64, 4))
  891. res2.PeopleMatrix = append(res2.PeopleMatrix, make([]int64, 4))
  892. res2.PeopleMatrix[0][0] = 100
  893. res2.PeopleMatrix[0][1] = 200
  894. res2.PeopleMatrix[0][2] = 300
  895. res2.PeopleMatrix[0][3] = 400
  896. res2.PeopleMatrix[1][0] = 500
  897. res2.PeopleMatrix[1][1] = 600
  898. res2.PeopleMatrix[1][2] = 700
  899. res2.PeopleMatrix[1][3] = 800
  900. burndown := BurndownAnalysis{}
  901. merged := burndown.MergeResults(res1, res2, &c1, &c2).(BurndownResult)
  902. assert.Equal(t, merged.granularity, 19)
  903. assert.Equal(t, merged.sampling, 14)
  904. assert.Len(t, merged.GlobalHistory, 5)
  905. for _, row := range merged.GlobalHistory {
  906. assert.Len(t, row, 4)
  907. }
  908. assert.Equal(t, merged.FileHistories["file1"], res1.GlobalHistory)
  909. assert.Equal(t, merged.FileHistories["file2"], merged.GlobalHistory)
  910. assert.Equal(t, merged.FileHistories["file3"], res2.GlobalHistory)
  911. assert.Len(t, merged.reversedPeopleDict, 3)
  912. assert.Equal(t, merged.PeopleHistories[0], res1.GlobalHistory)
  913. assert.Equal(t, merged.PeopleHistories[1], merged.GlobalHistory)
  914. assert.Equal(t, merged.PeopleHistories[2], res2.GlobalHistory)
  915. assert.Len(t, merged.PeopleMatrix, 3)
  916. for _, row := range merged.PeopleMatrix {
  917. assert.Len(t, row, 5)
  918. }
  919. assert.Equal(t, merged.PeopleMatrix[0][0], int64(10))
  920. assert.Equal(t, merged.PeopleMatrix[0][1], int64(20))
  921. assert.Equal(t, merged.PeopleMatrix[0][2], int64(30))
  922. assert.Equal(t, merged.PeopleMatrix[0][3], int64(40))
  923. assert.Equal(t, merged.PeopleMatrix[0][4], int64(0))
  924. assert.Equal(t, merged.PeopleMatrix[1][0], int64(150))
  925. assert.Equal(t, merged.PeopleMatrix[1][1], int64(260))
  926. assert.Equal(t, merged.PeopleMatrix[1][2], int64(70))
  927. assert.Equal(t, merged.PeopleMatrix[1][3], int64(380))
  928. assert.Equal(t, merged.PeopleMatrix[1][4], int64(400))
  929. assert.Equal(t, merged.PeopleMatrix[2][0], int64(500))
  930. assert.Equal(t, merged.PeopleMatrix[2][1], int64(600))
  931. assert.Equal(t, merged.PeopleMatrix[2][2], int64(0))
  932. assert.Equal(t, merged.PeopleMatrix[2][3], int64(700))
  933. assert.Equal(t, merged.PeopleMatrix[2][4], int64(800))
  934. burndown.serializeBinary(&merged, ioutil.Discard)
  935. }
  936. func TestBurndownMergeNils(t *testing.T) {
  937. res1 := BurndownResult{
  938. GlobalHistory: [][]int64{},
  939. FileHistories: map[string][][]int64{},
  940. PeopleHistories: [][][]int64{},
  941. PeopleMatrix: [][]int64{},
  942. reversedPeopleDict: []string{},
  943. sampling: 15,
  944. granularity: 20,
  945. }
  946. c1 := core.CommonAnalysisResult{
  947. BeginTime: 600566400, // 1989 Jan 12
  948. EndTime: 604713600, // 1989 March 1
  949. CommitsNumber: 10,
  950. RunTime: 100000,
  951. }
  952. res2 := BurndownResult{
  953. GlobalHistory: nil,
  954. FileHistories: nil,
  955. PeopleHistories: nil,
  956. PeopleMatrix: nil,
  957. reversedPeopleDict: nil,
  958. sampling: 14,
  959. granularity: 19,
  960. }
  961. c2 := core.CommonAnalysisResult{
  962. BeginTime: 601084800, // 1989 Jan 18
  963. EndTime: 605923200, // 1989 March 15
  964. CommitsNumber: 10,
  965. RunTime: 100000,
  966. }
  967. burndown := BurndownAnalysis{}
  968. merged := burndown.MergeResults(res1, res2, &c1, &c2).(BurndownResult)
  969. assert.Equal(t, merged.granularity, 19)
  970. assert.Equal(t, merged.sampling, 14)
  971. assert.Nil(t, merged.GlobalHistory)
  972. assert.Nil(t, merged.FileHistories)
  973. assert.Nil(t, merged.PeopleHistories)
  974. assert.Nil(t, merged.PeopleMatrix)
  975. burndown.serializeBinary(&merged, ioutil.Discard)
  976. res2.GlobalHistory = make([][]int64, 56/14 /* 4 samples */)
  977. for i := range res2.GlobalHistory {
  978. res2.GlobalHistory[i] = make([]int64, 56/19+1 /* 3 bands */)
  979. switch i {
  980. case 0:
  981. res2.GlobalHistory[i][0] = 900
  982. case 1:
  983. res2.GlobalHistory[i][0] = 1100
  984. res2.GlobalHistory[i][1] = 400
  985. case 2:
  986. res2.GlobalHistory[i][0] = 900
  987. res2.GlobalHistory[i][1] = 750
  988. res2.GlobalHistory[i][2] = 100
  989. case 3:
  990. res2.GlobalHistory[i][0] = 800
  991. res2.GlobalHistory[i][1] = 600
  992. res2.GlobalHistory[i][2] = 600
  993. }
  994. }
  995. people1 := [...]string{"one", "two"}
  996. res1.reversedPeopleDict = people1[:]
  997. res1.PeopleMatrix = append(res1.PeopleMatrix, make([]int64, 4))
  998. res1.PeopleMatrix = append(res1.PeopleMatrix, make([]int64, 4))
  999. res1.PeopleMatrix[0][0] = 10
  1000. res1.PeopleMatrix[0][1] = 20
  1001. res1.PeopleMatrix[0][2] = 30
  1002. res1.PeopleMatrix[0][3] = 40
  1003. res1.PeopleMatrix[1][0] = 50
  1004. res1.PeopleMatrix[1][1] = 60
  1005. res1.PeopleMatrix[1][2] = 70
  1006. res1.PeopleMatrix[1][3] = 80
  1007. people2 := [...]string{"two", "three"}
  1008. res2.reversedPeopleDict = people2[:]
  1009. merged = burndown.MergeResults(res1, res2, &c1, &c2).(BurndownResult)
  1010. mgh := [5][4]int64{
  1011. {0, 0, 0, 0},
  1012. {578, 0, 0, 0},
  1013. {798, 546, 0, 0},
  1014. {664, 884, 222, 0},
  1015. {547, 663, 610, 178},
  1016. }
  1017. mgh2 := [...][]int64{
  1018. mgh[0][:], mgh[1][:], mgh[2][:], mgh[3][:], mgh[4][:],
  1019. }
  1020. mgh3 := mgh2[:]
  1021. assert.Equal(t, mgh3, merged.GlobalHistory)
  1022. assert.Len(t, merged.PeopleMatrix, 3)
  1023. for _, row := range merged.PeopleMatrix {
  1024. assert.Len(t, row, 5)
  1025. }
  1026. assert.Equal(t, merged.PeopleMatrix[0][0], int64(10))
  1027. assert.Equal(t, merged.PeopleMatrix[0][1], int64(20))
  1028. assert.Equal(t, merged.PeopleMatrix[0][2], int64(30))
  1029. assert.Equal(t, merged.PeopleMatrix[0][3], int64(40))
  1030. assert.Equal(t, merged.PeopleMatrix[0][4], int64(0))
  1031. assert.Equal(t, merged.PeopleMatrix[1][0], int64(50))
  1032. assert.Equal(t, merged.PeopleMatrix[1][1], int64(60))
  1033. assert.Equal(t, merged.PeopleMatrix[1][2], int64(70))
  1034. assert.Equal(t, merged.PeopleMatrix[1][3], int64(80))
  1035. assert.Equal(t, merged.PeopleMatrix[1][4], int64(0))
  1036. assert.Equal(t, merged.PeopleMatrix[2][0], int64(0))
  1037. assert.Equal(t, merged.PeopleMatrix[2][1], int64(0))
  1038. assert.Equal(t, merged.PeopleMatrix[2][2], int64(0))
  1039. assert.Equal(t, merged.PeopleMatrix[2][3], int64(0))
  1040. assert.Equal(t, merged.PeopleMatrix[2][4], int64(0))
  1041. burndown.serializeBinary(&merged, ioutil.Discard)
  1042. }
  1043. func TestBurndownDeserialize(t *testing.T) {
  1044. allBuffer, err := ioutil.ReadFile(path.Join("..", "internal", "test_data", "burndown.pb"))
  1045. assert.Nil(t, err)
  1046. message := pb.AnalysisResults{}
  1047. err = proto.Unmarshal(allBuffer, &message)
  1048. assert.Nil(t, err)
  1049. burndown := BurndownAnalysis{}
  1050. iresult, err := burndown.Deserialize(message.Contents[burndown.Name()])
  1051. assert.Nil(t, err)
  1052. result := iresult.(BurndownResult)
  1053. assert.True(t, len(result.GlobalHistory) > 0)
  1054. assert.True(t, len(result.FileHistories) > 0)
  1055. assert.True(t, len(result.reversedPeopleDict) > 0)
  1056. assert.True(t, len(result.PeopleHistories) > 0)
  1057. assert.True(t, len(result.PeopleMatrix) > 0)
  1058. assert.Equal(t, result.granularity, 30)
  1059. assert.Equal(t, result.sampling, 30)
  1060. }