renames.go 9.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288
  1. package plumbing
  2. import (
  3. "log"
  4. "sort"
  5. "unicode/utf8"
  6. "github.com/sergi/go-diff/diffmatchpatch"
  7. "gopkg.in/src-d/go-git.v4"
  8. "gopkg.in/src-d/go-git.v4/plumbing"
  9. "gopkg.in/src-d/go-git.v4/plumbing/object"
  10. "gopkg.in/src-d/go-git.v4/utils/merkletrie"
  11. "gopkg.in/src-d/hercules.v4/internal"
  12. "gopkg.in/src-d/hercules.v4/internal/core"
  13. )
  14. // RenameAnalysis improves TreeDiff's results by searching for changed blobs under different
  15. // paths which are likely to be the result of a rename with subsequent edits.
  16. // RenameAnalysis is a PipelineItem.
  17. type RenameAnalysis struct {
  18. // SimilarityThreshold adjusts the heuristic to determine file renames.
  19. // It has the same units as cgit's -X rename-threshold or -M. Better to
  20. // set it to the default value of 90 (90%).
  21. SimilarityThreshold int
  22. repository *git.Repository
  23. }
  24. const (
  25. // RenameAnalysisDefaultThreshold specifies the default percentage of common lines in a pair
  26. // of files to consider them linked. The exact code of the decision is sizesAreClose().
  27. RenameAnalysisDefaultThreshold = 90
  28. // ConfigRenameAnalysisSimilarityThreshold is the name of the configuration option
  29. // (RenameAnalysis.Configure()) which sets the similarity threshold.
  30. ConfigRenameAnalysisSimilarityThreshold = "RenameAnalysis.SimilarityThreshold"
  31. )
  32. // Name of this PipelineItem. Uniquely identifies the type, used for mapping keys, etc.
  33. func (ra *RenameAnalysis) Name() string {
  34. return "RenameAnalysis"
  35. }
  36. // Provides returns the list of names of entities which are produced by this PipelineItem.
  37. // Each produced entity will be inserted into `deps` of dependent Consume()-s according
  38. // to this list. Also used by core.Registry to build the global map of providers.
  39. func (ra *RenameAnalysis) Provides() []string {
  40. arr := [...]string{DependencyTreeChanges}
  41. return arr[:]
  42. }
  43. // Requires returns the list of names of entities which are needed by this PipelineItem.
  44. // Each requested entity will be inserted into `deps` of Consume(). In turn, those
  45. // entities are Provides() upstream.
  46. func (ra *RenameAnalysis) Requires() []string {
  47. arr := [...]string{DependencyBlobCache, DependencyTreeChanges}
  48. return arr[:]
  49. }
  50. // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
  51. func (ra *RenameAnalysis) ListConfigurationOptions() []core.ConfigurationOption {
  52. options := [...]core.ConfigurationOption{{
  53. Name: ConfigRenameAnalysisSimilarityThreshold,
  54. Description: "The threshold on the similarity index used to detect renames.",
  55. Flag: "M",
  56. Type: core.IntConfigurationOption,
  57. Default: RenameAnalysisDefaultThreshold},
  58. }
  59. return options[:]
  60. }
  61. // Configure sets the properties previously published by ListConfigurationOptions().
  62. func (ra *RenameAnalysis) Configure(facts map[string]interface{}) {
  63. if val, exists := facts[ConfigRenameAnalysisSimilarityThreshold].(int); exists {
  64. ra.SimilarityThreshold = val
  65. }
  66. }
  67. // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
  68. // calls. The repository which is going to be analysed is supplied as an argument.
  69. func (ra *RenameAnalysis) Initialize(repository *git.Repository) {
  70. if ra.SimilarityThreshold < 0 || ra.SimilarityThreshold > 100 {
  71. log.Printf("Warning: adjusted the similarity threshold to %d\n",
  72. RenameAnalysisDefaultThreshold)
  73. ra.SimilarityThreshold = RenameAnalysisDefaultThreshold
  74. }
  75. ra.repository = repository
  76. }
  77. // Consume runs this PipelineItem on the next commit data.
  78. // `deps` contain all the results from upstream PipelineItem-s as requested by Requires().
  79. // Additionally, "commit" is always present there and represents the analysed *object.Commit.
  80. // This function returns the mapping with analysis results. The keys must be the same as
  81. // in Provides(). If there was an error, nil is returned.
  82. func (ra *RenameAnalysis) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
  83. changes := deps[DependencyTreeChanges].(object.Changes)
  84. cache := deps[DependencyBlobCache].(map[plumbing.Hash]*object.Blob)
  85. reducedChanges := make(object.Changes, 0, changes.Len())
  86. // Stage 1 - find renames by matching the hashes
  87. // n log(n)
  88. // We sort additions and deletions by hash and then do the single scan along
  89. // both slices.
  90. deleted := make(sortableChanges, 0, changes.Len())
  91. added := make(sortableChanges, 0, changes.Len())
  92. for _, change := range changes {
  93. action, err := change.Action()
  94. if err != nil {
  95. return nil, err
  96. }
  97. switch action {
  98. case merkletrie.Insert:
  99. added = append(added, sortableChange{change, change.To.TreeEntry.Hash})
  100. case merkletrie.Delete:
  101. deleted = append(deleted, sortableChange{change, change.From.TreeEntry.Hash})
  102. case merkletrie.Modify:
  103. reducedChanges = append(reducedChanges, change)
  104. }
  105. }
  106. sort.Sort(deleted)
  107. sort.Sort(added)
  108. a := 0
  109. d := 0
  110. stillDeleted := make(object.Changes, 0, deleted.Len())
  111. stillAdded := make(object.Changes, 0, added.Len())
  112. for a < added.Len() && d < deleted.Len() {
  113. if added[a].hash == deleted[d].hash {
  114. reducedChanges = append(
  115. reducedChanges,
  116. &object.Change{From: deleted[d].change.From, To: added[a].change.To})
  117. a++
  118. d++
  119. } else if added[a].Less(&deleted[d]) {
  120. stillAdded = append(stillAdded, added[a].change)
  121. a++
  122. } else {
  123. stillDeleted = append(stillDeleted, deleted[d].change)
  124. d++
  125. }
  126. }
  127. for ; a < added.Len(); a++ {
  128. stillAdded = append(stillAdded, added[a].change)
  129. }
  130. for ; d < deleted.Len(); d++ {
  131. stillDeleted = append(stillDeleted, deleted[d].change)
  132. }
  133. // Stage 2 - apply the similarity threshold
  134. // n^2 but actually linear
  135. // We sort the blobs by size and do the single linear scan.
  136. addedBlobs := make(sortableBlobs, 0, stillAdded.Len())
  137. deletedBlobs := make(sortableBlobs, 0, stillDeleted.Len())
  138. for _, change := range stillAdded {
  139. blob := cache[change.To.TreeEntry.Hash]
  140. addedBlobs = append(
  141. addedBlobs, sortableBlob{change: change, size: blob.Size})
  142. }
  143. for _, change := range stillDeleted {
  144. blob := cache[change.From.TreeEntry.Hash]
  145. deletedBlobs = append(
  146. deletedBlobs, sortableBlob{change: change, size: blob.Size})
  147. }
  148. sort.Sort(addedBlobs)
  149. sort.Sort(deletedBlobs)
  150. dStart := 0
  151. for a = 0; a < addedBlobs.Len(); a++ {
  152. myBlob := cache[addedBlobs[a].change.To.TreeEntry.Hash]
  153. mySize := addedBlobs[a].size
  154. for d = dStart; d < deletedBlobs.Len() && !ra.sizesAreClose(mySize, deletedBlobs[d].size); d++ {
  155. }
  156. dStart = d
  157. foundMatch := false
  158. for d = dStart; d < deletedBlobs.Len() && ra.sizesAreClose(mySize, deletedBlobs[d].size); d++ {
  159. blobsAreClose, err := ra.blobsAreClose(
  160. myBlob, cache[deletedBlobs[d].change.From.TreeEntry.Hash])
  161. if err != nil {
  162. return nil, err
  163. }
  164. if blobsAreClose {
  165. foundMatch = true
  166. reducedChanges = append(
  167. reducedChanges,
  168. &object.Change{From: deletedBlobs[d].change.From,
  169. To: addedBlobs[a].change.To})
  170. break
  171. }
  172. }
  173. if foundMatch {
  174. addedBlobs = append(addedBlobs[:a], addedBlobs[a+1:]...)
  175. a--
  176. deletedBlobs = append(deletedBlobs[:d], deletedBlobs[d+1:]...)
  177. }
  178. }
  179. // Stage 3 - we give up, everything left are independent additions and deletions
  180. for _, blob := range addedBlobs {
  181. reducedChanges = append(reducedChanges, blob.change)
  182. }
  183. for _, blob := range deletedBlobs {
  184. reducedChanges = append(reducedChanges, blob.change)
  185. }
  186. return map[string]interface{}{DependencyTreeChanges: reducedChanges}, nil
  187. }
  188. func (ra *RenameAnalysis) sizesAreClose(size1 int64, size2 int64) bool {
  189. return internal.Abs64(size1-size2)*100/internal.Max64(1, internal.Min64(size1, size2)) <=
  190. int64(100-ra.SimilarityThreshold)
  191. }
  192. func (ra *RenameAnalysis) blobsAreClose(
  193. blob1 *object.Blob, blob2 *object.Blob) (bool, error) {
  194. strFrom, err := BlobToString(blob1)
  195. if err != nil {
  196. return false, err
  197. }
  198. strTo, err := BlobToString(blob2)
  199. if err != nil {
  200. return false, err
  201. }
  202. dmp := diffmatchpatch.New()
  203. src, dst, _ := dmp.DiffLinesToRunes(strFrom, strTo)
  204. diffs := dmp.DiffMainRunes(src, dst, false)
  205. common := 0
  206. for _, edit := range diffs {
  207. if edit.Type == diffmatchpatch.DiffEqual {
  208. common += utf8.RuneCountInString(edit.Text)
  209. }
  210. }
  211. similarity := common * 100 / internal.Max(1, internal.Min(len(src), len(dst)))
  212. return similarity >= ra.SimilarityThreshold, nil
  213. }
  214. type sortableChange struct {
  215. change *object.Change
  216. hash plumbing.Hash
  217. }
  218. type sortableChanges []sortableChange
  219. func (change *sortableChange) Less(other *sortableChange) bool {
  220. for x := 0; x < 20; x++ {
  221. if change.hash[x] < other.hash[x] {
  222. return true
  223. }
  224. }
  225. return false
  226. }
  227. func (slice sortableChanges) Len() int {
  228. return len(slice)
  229. }
  230. func (slice sortableChanges) Less(i, j int) bool {
  231. return slice[i].Less(&slice[j])
  232. }
  233. func (slice sortableChanges) Swap(i, j int) {
  234. slice[i], slice[j] = slice[j], slice[i]
  235. }
  236. type sortableBlob struct {
  237. change *object.Change
  238. size int64
  239. }
  240. type sortableBlobs []sortableBlob
  241. func (change *sortableBlob) Less(other *sortableBlob) bool {
  242. return change.size < other.size
  243. }
  244. func (slice sortableBlobs) Len() int {
  245. return len(slice)
  246. }
  247. func (slice sortableBlobs) Less(i, j int) bool {
  248. return slice[i].Less(&slice[j])
  249. }
  250. func (slice sortableBlobs) Swap(i, j int) {
  251. slice[i], slice[j] = slice[j], slice[i]
  252. }
  253. func init() {
  254. core.Registry.Register(&RenameAnalysis{})
  255. }