couples.go 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589
  1. package leaves
  2. import (
  3. "fmt"
  4. "io"
  5. "sort"
  6. "github.com/gogo/protobuf/proto"
  7. "gopkg.in/src-d/go-git.v4"
  8. "gopkg.in/src-d/go-git.v4/plumbing/object"
  9. "gopkg.in/src-d/go-git.v4/utils/merkletrie"
  10. "gopkg.in/src-d/hercules.v4/internal/core"
  11. "gopkg.in/src-d/hercules.v4/internal/pb"
  12. items "gopkg.in/src-d/hercules.v4/internal/plumbing"
  13. "gopkg.in/src-d/hercules.v4/internal/plumbing/identity"
  14. "gopkg.in/src-d/hercules.v4/internal/yaml"
  15. )
  16. // CouplesAnalysis calculates the number of common commits for files and authors.
  17. // The results are matrices, where cell at row X and column Y is the number of commits which
  18. // changed X and Y together. In case with people, the numbers are summed for every common file.
  19. type CouplesAnalysis struct {
  20. core.NoopMerger
  21. core.OneShotMergeProcessor
  22. // PeopleNumber is the number of developers for which to build the matrix. 0 disables this analysis.
  23. PeopleNumber int
  24. // people store how many times every developer committed to every file.
  25. people []map[string]int
  26. // peopleCommits is the number of commits each author made.
  27. peopleCommits []int
  28. // files store every file occurred in the same commit with every other file.
  29. files map[string]map[string]int
  30. // renames point from new file name to old file name.
  31. renames *[]rename
  32. // lastCommit is the last commit which was consumed.
  33. lastCommit *object.Commit
  34. // reversedPeopleDict references IdentityDetector.ReversedPeopleDict
  35. reversedPeopleDict []string
  36. }
  37. // CouplesResult is returned by CouplesAnalysis.Finalize() and carries couples matrices from
  38. // authors and files.
  39. type CouplesResult struct {
  40. PeopleMatrix []map[int]int64
  41. PeopleFiles [][]int
  42. FilesMatrix []map[int]int64
  43. Files []string
  44. // reversedPeopleDict references IdentityDetector.ReversedPeopleDict
  45. reversedPeopleDict []string
  46. }
  47. type rename struct {
  48. FromName string
  49. ToName string
  50. }
  51. // Name of this PipelineItem. Uniquely identifies the type, used for mapping keys, etc.
  52. func (couples *CouplesAnalysis) Name() string {
  53. return "Couples"
  54. }
  55. // Provides returns the list of names of entities which are produced by this PipelineItem.
  56. // Each produced entity will be inserted into `deps` of dependent Consume()-s according
  57. // to this list. Also used by core.Registry to build the global map of providers.
  58. func (couples *CouplesAnalysis) Provides() []string {
  59. return []string{}
  60. }
  61. // Requires returns the list of names of entities which are needed by this PipelineItem.
  62. // Each requested entity will be inserted into `deps` of Consume(). In turn, those
  63. // entities are Provides() upstream.
  64. func (couples *CouplesAnalysis) Requires() []string {
  65. arr := [...]string{identity.DependencyAuthor, items.DependencyTreeChanges}
  66. return arr[:]
  67. }
  68. // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
  69. func (couples *CouplesAnalysis) ListConfigurationOptions() []core.ConfigurationOption {
  70. return []core.ConfigurationOption{}
  71. }
  72. // Configure sets the properties previously published by ListConfigurationOptions().
  73. func (couples *CouplesAnalysis) Configure(facts map[string]interface{}) {
  74. if val, exists := facts[identity.FactIdentityDetectorPeopleCount].(int); exists {
  75. couples.PeopleNumber = val
  76. couples.reversedPeopleDict = facts[identity.FactIdentityDetectorReversedPeopleDict].([]string)
  77. }
  78. }
  79. // Flag for the command line switch which enables this analysis.
  80. func (couples *CouplesAnalysis) Flag() string {
  81. return "couples"
  82. }
  83. // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
  84. // calls. The repository which is going to be analysed is supplied as an argument.
  85. func (couples *CouplesAnalysis) Initialize(repository *git.Repository) {
  86. couples.people = make([]map[string]int, couples.PeopleNumber+1)
  87. for i := range couples.people {
  88. couples.people[i] = map[string]int{}
  89. }
  90. couples.peopleCommits = make([]int, couples.PeopleNumber+1)
  91. couples.files = map[string]map[string]int{}
  92. couples.renames = &[]rename{}
  93. couples.OneShotMergeProcessor.Initialize()
  94. }
  95. // Consume runs this PipelineItem on the next commit data.
  96. // `deps` contain all the results from upstream PipelineItem-s as requested by Requires().
  97. // Additionally, DependencyCommit is always present there and represents the analysed *object.Commit.
  98. // This function returns the mapping with analysis results. The keys must be the same as
  99. // in Provides(). If there was an error, nil is returned.
  100. func (couples *CouplesAnalysis) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
  101. firstMerge := couples.ShouldConsumeCommit(deps)
  102. mergeMode := core.IsMergeCommit(deps)
  103. couples.lastCommit = deps[core.DependencyCommit].(*object.Commit)
  104. author := deps[identity.DependencyAuthor].(int)
  105. if author == identity.AuthorMissing {
  106. author = couples.PeopleNumber
  107. }
  108. if firstMerge {
  109. couples.peopleCommits[author]++
  110. }
  111. treeDiff := deps[items.DependencyTreeChanges].(object.Changes)
  112. context := make([]string, 0, len(treeDiff))
  113. for _, change := range treeDiff {
  114. action, err := change.Action()
  115. if err != nil {
  116. return nil, err
  117. }
  118. toName := change.To.Name
  119. fromName := change.From.Name
  120. switch action {
  121. case merkletrie.Insert:
  122. if !mergeMode {
  123. context = append(context, toName)
  124. couples.people[author][toName]++
  125. } else if couples.people[author][toName] == 0 {
  126. couples.people[author][toName] = 1
  127. }
  128. case merkletrie.Delete:
  129. if !mergeMode {
  130. couples.people[author][fromName]++
  131. } else if couples.people[author][fromName] == 0 {
  132. couples.people[author][fromName] = 1
  133. }
  134. case merkletrie.Modify:
  135. if fromName != toName {
  136. // renamed
  137. *couples.renames = append(
  138. *couples.renames, rename{ToName: toName, FromName: fromName})
  139. }
  140. if !mergeMode {
  141. context = append(context, toName)
  142. couples.people[author][toName]++
  143. }
  144. }
  145. }
  146. for _, file := range context {
  147. for _, otherFile := range context {
  148. lane, exists := couples.files[file]
  149. if !exists {
  150. lane = map[string]int{}
  151. couples.files[file] = lane
  152. }
  153. lane[otherFile]++
  154. }
  155. }
  156. return nil, nil
  157. }
  158. // Finalize returns the result of the analysis. Further Consume() calls are not expected.
  159. func (couples *CouplesAnalysis) Finalize() interface{} {
  160. files, people := couples.propagateRenames(couples.currentFiles())
  161. filesSequence := make([]string, len(files))
  162. i := 0
  163. for file := range files {
  164. filesSequence[i] = file
  165. i++
  166. }
  167. sort.Strings(filesSequence)
  168. filesIndex := map[string]int{}
  169. for i, file := range filesSequence {
  170. filesIndex[file] = i
  171. }
  172. peopleMatrix := make([]map[int]int64, couples.PeopleNumber+1)
  173. peopleFiles := make([][]int, couples.PeopleNumber+1)
  174. for i := range peopleMatrix {
  175. peopleMatrix[i] = map[int]int64{}
  176. for file, commits := range people[i] {
  177. if fi, exists := filesIndex[file]; exists {
  178. peopleFiles[i] = append(peopleFiles[i], fi)
  179. }
  180. for j, otherFiles := range people {
  181. otherCommits := otherFiles[file]
  182. delta := otherCommits
  183. if otherCommits > commits {
  184. delta = commits
  185. }
  186. if delta > 0 {
  187. peopleMatrix[i][j] += int64(delta)
  188. }
  189. }
  190. }
  191. sort.Ints(peopleFiles[i])
  192. }
  193. filesMatrix := make([]map[int]int64, len(filesIndex))
  194. for i := range filesMatrix {
  195. filesMatrix[i] = map[int]int64{}
  196. for otherFile, cooccs := range files[filesSequence[i]] {
  197. filesMatrix[i][filesIndex[otherFile]] = int64(cooccs)
  198. }
  199. }
  200. return CouplesResult{
  201. PeopleMatrix: peopleMatrix,
  202. PeopleFiles: peopleFiles,
  203. Files: filesSequence,
  204. FilesMatrix: filesMatrix,
  205. reversedPeopleDict: couples.reversedPeopleDict,
  206. }
  207. }
  208. // Fork clones this pipeline item.
  209. func (couples *CouplesAnalysis) Fork(n int) []core.PipelineItem {
  210. return core.ForkCopyPipelineItem(couples, n)
  211. }
  212. // Serialize converts the analysis result as returned by Finalize() to text or bytes.
  213. // The text format is YAML and the bytes format is Protocol Buffers.
  214. func (couples *CouplesAnalysis) Serialize(result interface{}, binary bool, writer io.Writer) error {
  215. couplesResult := result.(CouplesResult)
  216. if binary {
  217. return couples.serializeBinary(&couplesResult, writer)
  218. }
  219. couples.serializeText(&couplesResult, writer)
  220. return nil
  221. }
  222. // Deserialize converts the specified protobuf bytes to CouplesResult.
  223. func (couples *CouplesAnalysis) Deserialize(pbmessage []byte) (interface{}, error) {
  224. message := pb.CouplesAnalysisResults{}
  225. err := proto.Unmarshal(pbmessage, &message)
  226. if err != nil {
  227. return nil, err
  228. }
  229. result := CouplesResult{
  230. Files: message.FileCouples.Index,
  231. FilesMatrix: make([]map[int]int64, message.FileCouples.Matrix.NumberOfRows),
  232. PeopleFiles: make([][]int, len(message.PeopleCouples.Index)),
  233. PeopleMatrix: make([]map[int]int64, message.PeopleCouples.Matrix.NumberOfRows),
  234. reversedPeopleDict: message.PeopleCouples.Index,
  235. }
  236. for i, files := range message.PeopleFiles {
  237. result.PeopleFiles[i] = make([]int, len(files.Files))
  238. for j, val := range files.Files {
  239. result.PeopleFiles[i][j] = int(val)
  240. }
  241. }
  242. convertCSR := func(dest []map[int]int64, src *pb.CompressedSparseRowMatrix) {
  243. for indptr := range src.Indptr {
  244. if indptr == 0 {
  245. continue
  246. }
  247. dest[indptr-1] = map[int]int64{}
  248. for j := src.Indptr[indptr-1]; j < src.Indptr[indptr]; j++ {
  249. dest[indptr-1][int(src.Indices[j])] = src.Data[j]
  250. }
  251. }
  252. }
  253. convertCSR(result.FilesMatrix, message.FileCouples.Matrix)
  254. convertCSR(result.PeopleMatrix, message.PeopleCouples.Matrix)
  255. return result, nil
  256. }
  257. // MergeResults combines two CouplesAnalysis-s together.
  258. func (couples *CouplesAnalysis) MergeResults(r1, r2 interface{}, c1, c2 *core.CommonAnalysisResult) interface{} {
  259. cr1 := r1.(CouplesResult)
  260. cr2 := r2.(CouplesResult)
  261. merged := CouplesResult{}
  262. var people, files map[string][3]int
  263. people, merged.reversedPeopleDict = identity.Detector{}.MergeReversedDicts(
  264. cr1.reversedPeopleDict, cr2.reversedPeopleDict)
  265. files, merged.Files = identity.Detector{}.MergeReversedDicts(cr1.Files, cr2.Files)
  266. merged.PeopleFiles = make([][]int, len(merged.reversedPeopleDict))
  267. peopleFilesDicts := make([]map[int]bool, len(merged.reversedPeopleDict))
  268. addPeopleFiles := func(peopleFiles [][]int, reversedPeopleDict []string,
  269. reversedFilesDict []string) {
  270. for pi, fs := range peopleFiles {
  271. idx := people[reversedPeopleDict[pi]][0]
  272. m := peopleFilesDicts[idx]
  273. if m == nil {
  274. m = map[int]bool{}
  275. peopleFilesDicts[idx] = m
  276. }
  277. for _, f := range fs {
  278. m[files[reversedFilesDict[f]][0]] = true
  279. }
  280. }
  281. }
  282. addPeopleFiles(cr1.PeopleFiles, cr1.reversedPeopleDict, cr1.Files)
  283. addPeopleFiles(cr2.PeopleFiles, cr2.reversedPeopleDict, cr2.Files)
  284. for i, m := range peopleFilesDicts {
  285. merged.PeopleFiles[i] = make([]int, len(m))
  286. j := 0
  287. for f := range m {
  288. merged.PeopleFiles[i][j] = f
  289. j++
  290. }
  291. sort.Ints(merged.PeopleFiles[i])
  292. }
  293. merged.PeopleMatrix = make([]map[int]int64, len(merged.reversedPeopleDict)+1)
  294. addPeople := func(peopleMatrix []map[int]int64, reversedPeopleDict []string,
  295. reversedFilesDict []string) {
  296. for pi, pc := range peopleMatrix {
  297. var idx int
  298. if pi < len(reversedPeopleDict) {
  299. idx = people[reversedPeopleDict[pi]][0]
  300. } else {
  301. idx = len(merged.reversedPeopleDict)
  302. }
  303. m := merged.PeopleMatrix[idx]
  304. if m == nil {
  305. m = map[int]int64{}
  306. merged.PeopleMatrix[idx] = m
  307. }
  308. for file, val := range pc {
  309. m[files[reversedFilesDict[file]][0]] += val
  310. }
  311. }
  312. }
  313. addPeople(cr1.PeopleMatrix, cr1.reversedPeopleDict, cr1.Files)
  314. addPeople(cr2.PeopleMatrix, cr2.reversedPeopleDict, cr2.Files)
  315. merged.FilesMatrix = make([]map[int]int64, len(merged.Files))
  316. addFiles := func(filesMatrix []map[int]int64, reversedFilesDict []string) {
  317. for fi, fc := range filesMatrix {
  318. idx := people[reversedFilesDict[fi]][0]
  319. m := merged.FilesMatrix[idx]
  320. if m == nil {
  321. m = map[int]int64{}
  322. merged.FilesMatrix[idx] = m
  323. }
  324. for file, val := range fc {
  325. m[files[reversedFilesDict[file]][0]] += val
  326. }
  327. }
  328. }
  329. addFiles(cr1.FilesMatrix, cr1.Files)
  330. addFiles(cr2.FilesMatrix, cr2.Files)
  331. return merged
  332. }
  333. func (couples *CouplesAnalysis) serializeText(result *CouplesResult, writer io.Writer) {
  334. fmt.Fprintln(writer, " files_coocc:")
  335. fmt.Fprintln(writer, " index:")
  336. for _, file := range result.Files {
  337. fmt.Fprintf(writer, " - %s\n", yaml.SafeString(file))
  338. }
  339. fmt.Fprintln(writer, " matrix:")
  340. for _, files := range result.FilesMatrix {
  341. fmt.Fprint(writer, " - {")
  342. var indices []int
  343. for file := range files {
  344. indices = append(indices, file)
  345. }
  346. sort.Ints(indices)
  347. for i, file := range indices {
  348. fmt.Fprintf(writer, "%d: %d", file, files[file])
  349. if i < len(indices)-1 {
  350. fmt.Fprint(writer, ", ")
  351. }
  352. }
  353. fmt.Fprintln(writer, "}")
  354. }
  355. fmt.Fprintln(writer, " people_coocc:")
  356. fmt.Fprintln(writer, " index:")
  357. for _, person := range result.reversedPeopleDict {
  358. fmt.Fprintf(writer, " - %s\n", yaml.SafeString(person))
  359. }
  360. fmt.Fprintln(writer, " matrix:")
  361. for _, people := range result.PeopleMatrix {
  362. fmt.Fprint(writer, " - {")
  363. var indices []int
  364. for file := range people {
  365. indices = append(indices, file)
  366. }
  367. sort.Ints(indices)
  368. for i, person := range indices {
  369. fmt.Fprintf(writer, "%d: %d", person, people[person])
  370. if i < len(indices)-1 {
  371. fmt.Fprint(writer, ", ")
  372. }
  373. }
  374. fmt.Fprintln(writer, "}")
  375. }
  376. fmt.Fprintln(writer, " author_files:") // sorted by number of files each author changed
  377. peopleFiles := sortByNumberOfFiles(result.PeopleFiles, result.reversedPeopleDict, result.Files)
  378. for _, authorFiles := range peopleFiles {
  379. fmt.Fprintf(writer, " - %s:\n", yaml.SafeString(authorFiles.Author))
  380. sort.Strings(authorFiles.Files)
  381. for _, file := range authorFiles.Files {
  382. fmt.Fprintf(writer, " - %s\n", yaml.SafeString(file)) // sorted by path
  383. }
  384. }
  385. }
  386. func sortByNumberOfFiles(
  387. peopleFiles [][]int, peopleDict []string, filesDict []string) authorFilesList {
  388. var pfl authorFilesList
  389. for peopleIdx, files := range peopleFiles {
  390. if peopleIdx < len(peopleDict) {
  391. fileNames := make([]string, len(files))
  392. for i, fi := range files {
  393. fileNames[i] = filesDict[fi]
  394. }
  395. pfl = append(pfl, authorFiles{peopleDict[peopleIdx], fileNames})
  396. }
  397. }
  398. sort.Sort(pfl)
  399. return pfl
  400. }
  401. type authorFiles struct {
  402. Author string
  403. Files []string
  404. }
  405. type authorFilesList []authorFiles
  406. func (s authorFilesList) Len() int {
  407. return len(s)
  408. }
  409. func (s authorFilesList) Swap(i, j int) {
  410. s[i], s[j] = s[j], s[i]
  411. }
  412. func (s authorFilesList) Less(i, j int) bool {
  413. return len(s[i].Files) < len(s[j].Files)
  414. }
  415. func (couples *CouplesAnalysis) serializeBinary(result *CouplesResult, writer io.Writer) error {
  416. message := pb.CouplesAnalysisResults{}
  417. message.FileCouples = &pb.Couples{
  418. Index: result.Files,
  419. Matrix: pb.MapToCompressedSparseRowMatrix(result.FilesMatrix),
  420. }
  421. message.PeopleCouples = &pb.Couples{
  422. Index: result.reversedPeopleDict,
  423. Matrix: pb.MapToCompressedSparseRowMatrix(result.PeopleMatrix),
  424. }
  425. message.PeopleFiles = make([]*pb.TouchedFiles, len(result.reversedPeopleDict))
  426. for key := range result.reversedPeopleDict {
  427. files := result.PeopleFiles[key]
  428. int32Files := make([]int32, len(files))
  429. for i, f := range files {
  430. int32Files[i] = int32(f)
  431. }
  432. message.PeopleFiles[key] = &pb.TouchedFiles{
  433. Files: int32Files,
  434. }
  435. }
  436. serialized, err := proto.Marshal(&message)
  437. if err != nil {
  438. return err
  439. }
  440. writer.Write(serialized)
  441. return nil
  442. }
  443. // currentFiles return the list of files in the last consumed commit.
  444. func (couples *CouplesAnalysis) currentFiles() map[string]bool {
  445. files := map[string]bool{}
  446. if couples.lastCommit == nil {
  447. for key := range couples.files {
  448. files[key] = true
  449. }
  450. }
  451. tree, _ := couples.lastCommit.Tree()
  452. fileIter := tree.Files()
  453. fileIter.ForEach(func(fobj *object.File) error {
  454. files[fobj.Name] = true
  455. return nil
  456. })
  457. return files
  458. }
  459. // propagateRenames applies `renames` over the files from `lastCommit`.
  460. func (couples *CouplesAnalysis) propagateRenames(files map[string]bool) (
  461. map[string]map[string]int, []map[string]int) {
  462. renames := *couples.renames
  463. reducedFiles := map[string]map[string]int{}
  464. for file := range files {
  465. fmap := map[string]int{}
  466. refmap := couples.files[file]
  467. for other := range files {
  468. refval := refmap[other]
  469. if refval > 0 {
  470. fmap[other] = refval
  471. }
  472. }
  473. if len(fmap) > 0 {
  474. reducedFiles[file] = fmap
  475. }
  476. }
  477. // propagate renames
  478. aliases := map[string]map[string]bool{}
  479. pointers := map[string]string{}
  480. for i := range renames {
  481. rename := renames[len(renames)-i-1]
  482. toName := rename.ToName
  483. if newTo, exists := pointers[toName]; exists {
  484. toName = newTo
  485. }
  486. if _, exists := reducedFiles[toName]; exists {
  487. if rename.FromName != toName {
  488. var set map[string]bool
  489. if set, exists = aliases[toName]; !exists {
  490. set = map[string]bool{}
  491. aliases[toName] = set
  492. }
  493. set[rename.FromName] = true
  494. pointers[rename.FromName] = toName
  495. }
  496. continue
  497. }
  498. }
  499. adjustments := map[string]map[string]int{}
  500. for final, set := range aliases {
  501. adjustment := map[string]int{}
  502. for alias := range set {
  503. for k, v := range couples.files[alias] {
  504. adjustment[k] += v
  505. }
  506. }
  507. adjustments[final] = adjustment
  508. }
  509. for _, adjustment := range adjustments {
  510. for final, set := range aliases {
  511. for alias := range set {
  512. adjustment[final] += adjustment[alias]
  513. delete(adjustment, alias)
  514. }
  515. }
  516. }
  517. for final, adjustment := range adjustments {
  518. for key, val := range adjustment {
  519. if coocc, exists := reducedFiles[final][key]; exists {
  520. reducedFiles[final][key] = coocc + val
  521. reducedFiles[key][final] = coocc + val
  522. }
  523. }
  524. }
  525. people := make([]map[string]int, len(couples.people))
  526. for i, counts := range couples.people {
  527. reducedCounts := map[string]int{}
  528. people[i] = reducedCounts
  529. for file := range files {
  530. count := counts[file]
  531. for alias := range aliases[file] {
  532. count += counts[alias]
  533. }
  534. if count > 0 {
  535. reducedCounts[file] = count
  536. }
  537. }
  538. for key, val := range counts {
  539. if _, exists := files[key]; !exists {
  540. if _, exists = pointers[key]; !exists {
  541. reducedCounts[key] = val
  542. }
  543. }
  544. }
  545. }
  546. return reducedFiles, people
  547. }
  548. func init() {
  549. core.Registry.Register(&CouplesAnalysis{})
  550. }