renames.go 7.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270
  1. package hercules
  2. import (
  3. "fmt"
  4. "os"
  5. "sort"
  6. "unicode/utf8"
  7. "github.com/sergi/go-diff/diffmatchpatch"
  8. "gopkg.in/src-d/go-git.v4"
  9. "gopkg.in/src-d/go-git.v4/plumbing"
  10. "gopkg.in/src-d/go-git.v4/plumbing/object"
  11. "gopkg.in/src-d/go-git.v4/utils/merkletrie"
  12. )
  13. // RenameAnalysis improves TreeDiff's results by searching for changed blobs under different
  14. // paths which are likely to be the result of a rename with subsequent edits.
  15. // RenameAnalysis is a PipelineItem.
  16. type RenameAnalysis struct {
  17. // SimilarityThreshold adjusts the heuristic to determine file renames.
  18. // It has the same units as cgit's -X rename-threshold or -M. Better to
  19. // set it to the default value of 90 (90%).
  20. SimilarityThreshold int
  21. repository *git.Repository
  22. }
  23. const (
  24. // RenameAnalysisDefaultThreshold specifies the default percentage of common lines in a pair
  25. // of files to consider them linked. The exact code of the decision is sizesAreClose().
  26. RenameAnalysisDefaultThreshold = 90
  27. // ConfigRenameAnalysisSimilarityThreshold is the name of the configuration option
  28. // (RenameAnalysis.Configure()) which sets the similarity threshold.
  29. ConfigRenameAnalysisSimilarityThreshold = "RenameAnalysis.SimilarityThreshold"
  30. )
  31. func (ra *RenameAnalysis) Name() string {
  32. return "RenameAnalysis"
  33. }
  34. func (ra *RenameAnalysis) Provides() []string {
  35. arr := [...]string{DependencyTreeChanges}
  36. return arr[:]
  37. }
  38. func (ra *RenameAnalysis) Requires() []string {
  39. arr := [...]string{DependencyBlobCache, DependencyTreeChanges}
  40. return arr[:]
  41. }
  42. func (ra *RenameAnalysis) ListConfigurationOptions() []ConfigurationOption {
  43. options := [...]ConfigurationOption{{
  44. Name: ConfigRenameAnalysisSimilarityThreshold,
  45. Description: "The threshold on the similarity index used to detect renames.",
  46. Flag: "M",
  47. Type: IntConfigurationOption,
  48. Default: RenameAnalysisDefaultThreshold},
  49. }
  50. return options[:]
  51. }
  52. func (ra *RenameAnalysis) Configure(facts map[string]interface{}) {
  53. if val, exists := facts[ConfigRenameAnalysisSimilarityThreshold].(int); exists {
  54. ra.SimilarityThreshold = val
  55. }
  56. }
  57. func (ra *RenameAnalysis) Initialize(repository *git.Repository) {
  58. if ra.SimilarityThreshold < 0 || ra.SimilarityThreshold > 100 {
  59. fmt.Fprintf(os.Stderr, "Warning: adjusted the similarity threshold to %d\n",
  60. RenameAnalysisDefaultThreshold)
  61. ra.SimilarityThreshold = RenameAnalysisDefaultThreshold
  62. }
  63. ra.repository = repository
  64. }
  65. func (ra *RenameAnalysis) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
  66. changes := deps[DependencyTreeChanges].(object.Changes)
  67. cache := deps[DependencyBlobCache].(map[plumbing.Hash]*object.Blob)
  68. reducedChanges := make(object.Changes, 0, changes.Len())
  69. // Stage 1 - find renames by matching the hashes
  70. // n log(n)
  71. // We sort additions and deletions by hash and then do the single scan along
  72. // both slices.
  73. deleted := make(sortableChanges, 0, changes.Len())
  74. added := make(sortableChanges, 0, changes.Len())
  75. for _, change := range changes {
  76. action, err := change.Action()
  77. if err != nil {
  78. return nil, err
  79. }
  80. switch action {
  81. case merkletrie.Insert:
  82. added = append(added, sortableChange{change, change.To.TreeEntry.Hash})
  83. case merkletrie.Delete:
  84. deleted = append(deleted, sortableChange{change, change.From.TreeEntry.Hash})
  85. case merkletrie.Modify:
  86. reducedChanges = append(reducedChanges, change)
  87. }
  88. }
  89. sort.Sort(deleted)
  90. sort.Sort(added)
  91. a := 0
  92. d := 0
  93. stillDeleted := make(object.Changes, 0, deleted.Len())
  94. stillAdded := make(object.Changes, 0, added.Len())
  95. for a < added.Len() && d < deleted.Len() {
  96. if added[a].hash == deleted[d].hash {
  97. reducedChanges = append(
  98. reducedChanges,
  99. &object.Change{From: deleted[d].change.From, To: added[a].change.To})
  100. a++
  101. d++
  102. } else if added[a].Less(&deleted[d]) {
  103. stillAdded = append(stillAdded, added[a].change)
  104. a++
  105. } else {
  106. stillDeleted = append(stillDeleted, deleted[d].change)
  107. d++
  108. }
  109. }
  110. for ; a < added.Len(); a++ {
  111. stillAdded = append(stillAdded, added[a].change)
  112. }
  113. for ; d < deleted.Len(); d++ {
  114. stillDeleted = append(stillDeleted, deleted[d].change)
  115. }
  116. // Stage 2 - apply the similarity threshold
  117. // n^2 but actually linear
  118. // We sort the blobs by size and do the single linear scan.
  119. addedBlobs := make(sortableBlobs, 0, stillAdded.Len())
  120. deletedBlobs := make(sortableBlobs, 0, stillDeleted.Len())
  121. for _, change := range stillAdded {
  122. blob := cache[change.To.TreeEntry.Hash]
  123. addedBlobs = append(
  124. addedBlobs, sortableBlob{change: change, size: blob.Size})
  125. }
  126. for _, change := range stillDeleted {
  127. blob := cache[change.From.TreeEntry.Hash]
  128. deletedBlobs = append(
  129. deletedBlobs, sortableBlob{change: change, size: blob.Size})
  130. }
  131. sort.Sort(addedBlobs)
  132. sort.Sort(deletedBlobs)
  133. dStart := 0
  134. for a = 0; a < addedBlobs.Len(); a++ {
  135. myBlob := cache[addedBlobs[a].change.To.TreeEntry.Hash]
  136. mySize := addedBlobs[a].size
  137. for d = dStart; d < deletedBlobs.Len() && !ra.sizesAreClose(mySize, deletedBlobs[d].size); d++ {
  138. }
  139. dStart = d
  140. foundMatch := false
  141. for d = dStart; d < deletedBlobs.Len() && ra.sizesAreClose(mySize, deletedBlobs[d].size); d++ {
  142. blobsAreClose, err := ra.blobsAreClose(
  143. myBlob, cache[deletedBlobs[d].change.From.TreeEntry.Hash])
  144. if err != nil {
  145. return nil, err
  146. }
  147. if blobsAreClose {
  148. foundMatch = true
  149. reducedChanges = append(
  150. reducedChanges,
  151. &object.Change{From: deletedBlobs[d].change.From,
  152. To: addedBlobs[a].change.To})
  153. break
  154. }
  155. }
  156. if foundMatch {
  157. addedBlobs = append(addedBlobs[:a], addedBlobs[a+1:]...)
  158. a--
  159. deletedBlobs = append(deletedBlobs[:d], deletedBlobs[d+1:]...)
  160. }
  161. }
  162. // Stage 3 - we give up, everything left are independent additions and deletions
  163. for _, blob := range addedBlobs {
  164. reducedChanges = append(reducedChanges, blob.change)
  165. }
  166. for _, blob := range deletedBlobs {
  167. reducedChanges = append(reducedChanges, blob.change)
  168. }
  169. return map[string]interface{}{DependencyTreeChanges: reducedChanges}, nil
  170. }
  171. func (ra *RenameAnalysis) sizesAreClose(size1 int64, size2 int64) bool {
  172. return abs64(size1-size2)*100/max64(1, min64(size1, size2)) <=
  173. int64(100-ra.SimilarityThreshold)
  174. }
  175. func (ra *RenameAnalysis) blobsAreClose(
  176. blob1 *object.Blob, blob2 *object.Blob) (bool, error) {
  177. strFrom, err := BlobToString(blob1)
  178. if err != nil {
  179. return false, err
  180. }
  181. strTo, err := BlobToString(blob2)
  182. if err != nil {
  183. return false, err
  184. }
  185. dmp := diffmatchpatch.New()
  186. src, dst, _ := dmp.DiffLinesToRunes(strFrom, strTo)
  187. diffs := dmp.DiffMainRunes(src, dst, false)
  188. common := 0
  189. for _, edit := range diffs {
  190. if edit.Type == diffmatchpatch.DiffEqual {
  191. common += utf8.RuneCountInString(edit.Text)
  192. }
  193. }
  194. return common*100/max(1, min(len(src), len(dst))) >= ra.SimilarityThreshold, nil
  195. }
  196. type sortableChange struct {
  197. change *object.Change
  198. hash plumbing.Hash
  199. }
  200. type sortableChanges []sortableChange
  201. func (change *sortableChange) Less(other *sortableChange) bool {
  202. for x := 0; x < 20; x++ {
  203. if change.hash[x] < other.hash[x] {
  204. return true
  205. }
  206. }
  207. return false
  208. }
  209. func (slice sortableChanges) Len() int {
  210. return len(slice)
  211. }
  212. func (slice sortableChanges) Less(i, j int) bool {
  213. return slice[i].Less(&slice[j])
  214. }
  215. func (slice sortableChanges) Swap(i, j int) {
  216. slice[i], slice[j] = slice[j], slice[i]
  217. }
  218. type sortableBlob struct {
  219. change *object.Change
  220. size int64
  221. }
  222. type sortableBlobs []sortableBlob
  223. func (change *sortableBlob) Less(other *sortableBlob) bool {
  224. return change.size < other.size
  225. }
  226. func (slice sortableBlobs) Len() int {
  227. return len(slice)
  228. }
  229. func (slice sortableBlobs) Less(i, j int) bool {
  230. return slice[i].Less(&slice[j])
  231. }
  232. func (slice sortableBlobs) Swap(i, j int) {
  233. slice[i], slice[j] = slice[j], slice[i]
  234. }
  235. func init() {
  236. Registry.Register(&RenameAnalysis{})
  237. }