|
@@ -49,21 +49,25 @@ type BurndownAnalysis struct {
|
|
|
|
|
|
// Repository points to the analysed Git repository struct from go-git.
|
|
// Repository points to the analysed Git repository struct from go-git.
|
|
repository *git.Repository
|
|
repository *git.Repository
|
|
- // globalStatus is the current daily alive number of lines; key is the number
|
|
|
|
- // of days from the beginning of the history.
|
|
|
|
- globalStatus map[int]int64
|
|
|
|
- // globalHistory is the periodic snapshots of globalStatus.
|
|
|
|
- globalHistory [][]int64
|
|
|
|
- // fileHistories is the periodic snapshots of each file's status.
|
|
|
|
- fileHistories map[string][][]int64
|
|
|
|
- // peopleHistories is the periodic snapshots of each person's status.
|
|
|
|
- peopleHistories [][][]int64
|
|
|
|
|
|
+ // globalHistory is the daily deltas of daily line counts.
|
|
|
|
+ // E.g. day 0: day 0 +50 lines
|
|
|
|
+ // day 10: day 0 -10 lines; day 10 +20 lines
|
|
|
|
+ // day 12: day 0 -5 lines; day 10 -3 lines; day 12 +10 lines
|
|
|
|
+ // map [0] [0] = 50
|
|
|
|
+ // map[10] [0] = -10
|
|
|
|
+ // map[10][10] = 20
|
|
|
|
+ // map[12] [0] = -5
|
|
|
|
+ // map[12][10] = -3
|
|
|
|
+ // map[12][12] = 10
|
|
|
|
+ globalHistory sparseHistory
|
|
|
|
+ // fileHistories is the daily deltas of each file's daily line counts.
|
|
|
|
+ fileHistories map[string]sparseHistory
|
|
|
|
+ // peopleHistories is the daily deltas of each person's daily line counts.
|
|
|
|
+ peopleHistories []sparseHistory
|
|
// files is the mapping <file path> -> *File.
|
|
// files is the mapping <file path> -> *File.
|
|
files map[string]*burndown.File
|
|
files map[string]*burndown.File
|
|
// matrix is the mutual deletions and self insertions.
|
|
// matrix is the mutual deletions and self insertions.
|
|
matrix []map[int]int64
|
|
matrix []map[int]int64
|
|
- // people is the people's individual time stats.
|
|
|
|
- people []map[int]int64
|
|
|
|
// day is the most recent day index processed.
|
|
// day is the most recent day index processed.
|
|
day int
|
|
day int
|
|
// previousDay is the day from the previous sample period -
|
|
// previousDay is the day from the previous sample period -
|
|
@@ -79,18 +83,18 @@ type BurndownResult struct {
|
|
// [number of samples][number of bands]
|
|
// [number of samples][number of bands]
|
|
// The number of samples depends on Sampling: the less Sampling, the bigger the number.
|
|
// The number of samples depends on Sampling: the less Sampling, the bigger the number.
|
|
// The number of bands depends on Granularity: the less Granularity, the bigger the number.
|
|
// The number of bands depends on Granularity: the less Granularity, the bigger the number.
|
|
- GlobalHistory [][]int64
|
|
|
|
|
|
+ GlobalHistory DenseHistory
|
|
// The key is the path inside the Git repository. The value's dimensions are the same as
|
|
// The key is the path inside the Git repository. The value's dimensions are the same as
|
|
// in GlobalHistory.
|
|
// in GlobalHistory.
|
|
- FileHistories map[string][][]int64
|
|
|
|
|
|
+ FileHistories map[string]DenseHistory
|
|
// [number of people][number of samples][number of bands]
|
|
// [number of people][number of samples][number of bands]
|
|
- PeopleHistories [][][]int64
|
|
|
|
|
|
+ PeopleHistories []DenseHistory
|
|
// [number of people][number of people + 2]
|
|
// [number of people][number of people + 2]
|
|
// The first element is the total number of lines added by the author.
|
|
// The first element is the total number of lines added by the author.
|
|
// The second element is the number of removals by unidentified authors (outside reversedPeopleDict).
|
|
// The second element is the number of removals by unidentified authors (outside reversedPeopleDict).
|
|
// The rest of the elements are equal the number of line removals by the corresponding
|
|
// The rest of the elements are equal the number of line removals by the corresponding
|
|
// authors in reversedPeopleDict: 2 -> 0, 3 -> 1, etc.
|
|
// authors in reversedPeopleDict: 2 -> 0, 3 -> 1, etc.
|
|
- PeopleMatrix [][]int64
|
|
|
|
|
|
+ PeopleMatrix DenseHistory
|
|
|
|
|
|
// The following members are private.
|
|
// The following members are private.
|
|
|
|
|
|
@@ -123,6 +127,11 @@ const (
|
|
authorSelf = (1 << (32 - burndown.TreeMaxBinPower)) - 2
|
|
authorSelf = (1 << (32 - burndown.TreeMaxBinPower)) - 2
|
|
)
|
|
)
|
|
|
|
|
|
|
|
+type sparseHistory = map[int]map[int]int64
|
|
|
|
+
|
|
|
|
+// DenseHistory is the matrix [number of samples][number of bands] -> number of lines.
|
|
|
|
+type DenseHistory = [][]int64
|
|
|
|
+
|
|
// Name of this PipelineItem. Uniquely identifies the type, used for mapping keys, etc.
|
|
// Name of this PipelineItem. Uniquely identifies the type, used for mapping keys, etc.
|
|
func (analyser *BurndownAnalysis) Name() string {
|
|
func (analyser *BurndownAnalysis) Name() string {
|
|
return "Burndown"
|
|
return "Burndown"
|
|
@@ -225,18 +234,16 @@ func (analyser *BurndownAnalysis) Initialize(repository *git.Repository) {
|
|
analyser.Sampling = analyser.Granularity
|
|
analyser.Sampling = analyser.Granularity
|
|
}
|
|
}
|
|
analyser.repository = repository
|
|
analyser.repository = repository
|
|
- analyser.globalStatus = map[int]int64{}
|
|
|
|
- analyser.globalHistory = [][]int64{}
|
|
|
|
- analyser.fileHistories = map[string][][]int64{}
|
|
|
|
- analyser.peopleHistories = make([][][]int64, analyser.PeopleNumber)
|
|
|
|
|
|
+ analyser.globalHistory = sparseHistory{}
|
|
|
|
+ analyser.fileHistories = map[string]sparseHistory{}
|
|
|
|
+ analyser.peopleHistories = make([]sparseHistory, analyser.PeopleNumber)
|
|
analyser.files = map[string]*burndown.File{}
|
|
analyser.files = map[string]*burndown.File{}
|
|
analyser.matrix = make([]map[int]int64, analyser.PeopleNumber)
|
|
analyser.matrix = make([]map[int]int64, analyser.PeopleNumber)
|
|
- analyser.people = make([]map[int]int64, analyser.PeopleNumber)
|
|
|
|
analyser.day = 0
|
|
analyser.day = 0
|
|
analyser.previousDay = 0
|
|
analyser.previousDay = 0
|
|
}
|
|
}
|
|
|
|
|
|
-// Consume runs this PipelineItem on the next commit data.
|
|
|
|
|
|
+// Consume runs this PipelineItem on the next commit's data.
|
|
// `deps` contain all the results from upstream PipelineItem-s as requested by Requires().
|
|
// `deps` contain all the results from upstream PipelineItem-s as requested by Requires().
|
|
// Additionally, DependencyCommit is always present there and represents the analysed *object.Commit.
|
|
// Additionally, DependencyCommit is always present there and represents the analysed *object.Commit.
|
|
// This function returns the mapping with analysis results. The keys must be the same as
|
|
// This function returns the mapping with analysis results. The keys must be the same as
|
|
@@ -309,19 +316,15 @@ func (analyser *BurndownAnalysis) Merge(branches []core.PipelineItem) {
|
|
|
|
|
|
// Finalize returns the result of the analysis. Further Consume() calls are not expected.
|
|
// Finalize returns the result of the analysis. Further Consume() calls are not expected.
|
|
func (analyser *BurndownAnalysis) Finalize() interface{} {
|
|
func (analyser *BurndownAnalysis) Finalize() interface{} {
|
|
- gs, fss, pss := analyser.groupStatus()
|
|
|
|
- analyser.updateHistories(1, gs, fss, pss)
|
|
|
|
- for key, statuses := range analyser.fileHistories {
|
|
|
|
- if len(statuses) == len(analyser.globalHistory) {
|
|
|
|
- continue
|
|
|
|
- }
|
|
|
|
- padding := make([][]int64, len(analyser.globalHistory)-len(statuses))
|
|
|
|
- for i := range padding {
|
|
|
|
- padding[i] = make([]int64, len(analyser.globalStatus))
|
|
|
|
- }
|
|
|
|
- analyser.fileHistories[key] = append(padding, statuses...)
|
|
|
|
|
|
+ fileHistories := map[string]DenseHistory{}
|
|
|
|
+ for key, history := range analyser.fileHistories {
|
|
|
|
+ fileHistories[key] = analyser.groupSparseHistory(history)
|
|
|
|
+ }
|
|
|
|
+ peopleHistories := make([]DenseHistory, analyser.PeopleNumber)
|
|
|
|
+ for i, history := range analyser.peopleHistories {
|
|
|
|
+ peopleHistories[i] = analyser.groupSparseHistory(history)
|
|
}
|
|
}
|
|
- peopleMatrix := make([][]int64, analyser.PeopleNumber)
|
|
|
|
|
|
+ peopleMatrix := make(DenseHistory, analyser.PeopleNumber)
|
|
for i, row := range analyser.matrix {
|
|
for i, row := range analyser.matrix {
|
|
mrow := make([]int64, analyser.PeopleNumber+2)
|
|
mrow := make([]int64, analyser.PeopleNumber+2)
|
|
peopleMatrix[i] = mrow
|
|
peopleMatrix[i] = mrow
|
|
@@ -335,9 +338,9 @@ func (analyser *BurndownAnalysis) Finalize() interface{} {
|
|
}
|
|
}
|
|
}
|
|
}
|
|
return BurndownResult{
|
|
return BurndownResult{
|
|
- GlobalHistory: analyser.globalHistory,
|
|
|
|
- FileHistories: analyser.fileHistories,
|
|
|
|
- PeopleHistories: analyser.peopleHistories,
|
|
|
|
|
|
+ GlobalHistory: analyser.groupSparseHistory(analyser.globalHistory),
|
|
|
|
+ FileHistories: fileHistories,
|
|
|
|
+ PeopleHistories: peopleHistories,
|
|
PeopleMatrix: peopleMatrix,
|
|
PeopleMatrix: peopleMatrix,
|
|
reversedPeopleDict: analyser.reversedPeopleDict,
|
|
reversedPeopleDict: analyser.reversedPeopleDict,
|
|
sampling: analyser.Sampling,
|
|
sampling: analyser.Sampling,
|
|
@@ -364,8 +367,8 @@ func (analyser *BurndownAnalysis) Deserialize(pbmessage []byte) (interface{}, er
|
|
return nil, err
|
|
return nil, err
|
|
}
|
|
}
|
|
result := BurndownResult{}
|
|
result := BurndownResult{}
|
|
- convertCSR := func(mat *pb.BurndownSparseMatrix) [][]int64 {
|
|
|
|
- res := make([][]int64, mat.NumberOfRows)
|
|
|
|
|
|
+ convertCSR := func(mat *pb.BurndownSparseMatrix) DenseHistory {
|
|
|
|
+ res := make(DenseHistory, mat.NumberOfRows)
|
|
for i := 0; i < int(mat.NumberOfRows); i++ {
|
|
for i := 0; i < int(mat.NumberOfRows); i++ {
|
|
res[i] = make([]int64, mat.NumberOfColumns)
|
|
res[i] = make([]int64, mat.NumberOfColumns)
|
|
for j := 0; j < len(mat.Rows[i].Columns); j++ {
|
|
for j := 0; j < len(mat.Rows[i].Columns); j++ {
|
|
@@ -375,18 +378,18 @@ func (analyser *BurndownAnalysis) Deserialize(pbmessage []byte) (interface{}, er
|
|
return res
|
|
return res
|
|
}
|
|
}
|
|
result.GlobalHistory = convertCSR(msg.Project)
|
|
result.GlobalHistory = convertCSR(msg.Project)
|
|
- result.FileHistories = map[string][][]int64{}
|
|
|
|
|
|
+ result.FileHistories = map[string]DenseHistory{}
|
|
for _, mat := range msg.Files {
|
|
for _, mat := range msg.Files {
|
|
result.FileHistories[mat.Name] = convertCSR(mat)
|
|
result.FileHistories[mat.Name] = convertCSR(mat)
|
|
}
|
|
}
|
|
result.reversedPeopleDict = make([]string, len(msg.People))
|
|
result.reversedPeopleDict = make([]string, len(msg.People))
|
|
- result.PeopleHistories = make([][][]int64, len(msg.People))
|
|
|
|
|
|
+ result.PeopleHistories = make([]DenseHistory, len(msg.People))
|
|
for i, mat := range msg.People {
|
|
for i, mat := range msg.People {
|
|
result.PeopleHistories[i] = convertCSR(mat)
|
|
result.PeopleHistories[i] = convertCSR(mat)
|
|
result.reversedPeopleDict[i] = mat.Name
|
|
result.reversedPeopleDict[i] = mat.Name
|
|
}
|
|
}
|
|
if msg.PeopleInteraction != nil {
|
|
if msg.PeopleInteraction != nil {
|
|
- result.PeopleMatrix = make([][]int64, msg.PeopleInteraction.NumberOfRows)
|
|
|
|
|
|
+ result.PeopleMatrix = make(DenseHistory, msg.PeopleInteraction.NumberOfRows)
|
|
}
|
|
}
|
|
for i := 0; i < len(result.PeopleMatrix); i++ {
|
|
for i := 0; i < len(result.PeopleMatrix); i++ {
|
|
result.PeopleMatrix[i] = make([]int64, msg.PeopleInteraction.NumberOfColumns)
|
|
result.PeopleMatrix[i] = make([]int64, msg.PeopleInteraction.NumberOfColumns)
|
|
@@ -431,12 +434,12 @@ func (analyser *BurndownAnalysis) MergeResults(
|
|
}()
|
|
}()
|
|
}
|
|
}
|
|
if len(bar1.FileHistories) > 0 || len(bar2.FileHistories) > 0 {
|
|
if len(bar1.FileHistories) > 0 || len(bar2.FileHistories) > 0 {
|
|
- merged.FileHistories = map[string][][]int64{}
|
|
|
|
|
|
+ merged.FileHistories = map[string]DenseHistory{}
|
|
historyMutex := sync.Mutex{}
|
|
historyMutex := sync.Mutex{}
|
|
for key, fh1 := range bar1.FileHistories {
|
|
for key, fh1 := range bar1.FileHistories {
|
|
if fh2, exists := bar2.FileHistories[key]; exists {
|
|
if fh2, exists := bar2.FileHistories[key]; exists {
|
|
wg.Add(1)
|
|
wg.Add(1)
|
|
- go func(fh1, fh2 [][]int64, key string) {
|
|
|
|
|
|
+ go func(fh1, fh2 DenseHistory, key string) {
|
|
defer wg.Done()
|
|
defer wg.Done()
|
|
historyMutex.Lock()
|
|
historyMutex.Lock()
|
|
defer historyMutex.Unlock()
|
|
defer historyMutex.Unlock()
|
|
@@ -458,7 +461,7 @@ func (analyser *BurndownAnalysis) MergeResults(
|
|
}
|
|
}
|
|
}
|
|
}
|
|
if len(merged.reversedPeopleDict) > 0 {
|
|
if len(merged.reversedPeopleDict) > 0 {
|
|
- merged.PeopleHistories = make([][][]int64, len(merged.reversedPeopleDict))
|
|
|
|
|
|
+ merged.PeopleHistories = make([]DenseHistory, len(merged.reversedPeopleDict))
|
|
for i, key := range merged.reversedPeopleDict {
|
|
for i, key := range merged.reversedPeopleDict {
|
|
ptrs := people[key]
|
|
ptrs := people[key]
|
|
if ptrs[1] < 0 {
|
|
if ptrs[1] < 0 {
|
|
@@ -473,7 +476,7 @@ func (analyser *BurndownAnalysis) MergeResults(
|
|
wg.Add(1)
|
|
wg.Add(1)
|
|
go func(i int) {
|
|
go func(i int) {
|
|
defer wg.Done()
|
|
defer wg.Done()
|
|
- var m1, m2 [][]int64
|
|
|
|
|
|
+ var m1, m2 DenseHistory
|
|
if len(bar1.PeopleHistories) > 0 {
|
|
if len(bar1.PeopleHistories) > 0 {
|
|
m1 = bar1.PeopleHistories[ptrs[1]]
|
|
m1 = bar1.PeopleHistories[ptrs[1]]
|
|
}
|
|
}
|
|
@@ -505,7 +508,7 @@ func (analyser *BurndownAnalysis) MergeResults(
|
|
merged.PeopleMatrix, make([]int64, len(merged.reversedPeopleDict)+2))
|
|
merged.PeopleMatrix, make([]int64, len(merged.reversedPeopleDict)+2))
|
|
}
|
|
}
|
|
} else {
|
|
} else {
|
|
- merged.PeopleMatrix = make([][]int64, len(merged.reversedPeopleDict))
|
|
|
|
|
|
+ merged.PeopleMatrix = make(DenseHistory, len(merged.reversedPeopleDict))
|
|
for i := range merged.PeopleMatrix {
|
|
for i := range merged.PeopleMatrix {
|
|
merged.PeopleMatrix[i] = make([]int64, len(merged.reversedPeopleDict)+2)
|
|
merged.PeopleMatrix[i] = make([]int64, len(merged.reversedPeopleDict)+2)
|
|
}
|
|
}
|
|
@@ -534,8 +537,8 @@ func (analyser *BurndownAnalysis) MergeResults(
|
|
// mergeMatrices takes two [number of samples][number of bands] matrices,
|
|
// mergeMatrices takes two [number of samples][number of bands] matrices,
|
|
// resamples them to days so that they become square, sums and resamples back to the
|
|
// resamples them to days so that they become square, sums and resamples back to the
|
|
// least of (sampling1, sampling2) and (granularity1, granularity2).
|
|
// least of (sampling1, sampling2) and (granularity1, granularity2).
|
|
-func mergeMatrices(m1, m2 [][]int64, granularity1, sampling1, granularity2, sampling2 int,
|
|
|
|
- c1, c2 *core.CommonAnalysisResult) [][]int64 {
|
|
|
|
|
|
+func mergeMatrices(m1, m2 DenseHistory, granularity1, sampling1, granularity2, sampling2 int,
|
|
|
|
+ c1, c2 *core.CommonAnalysisResult) DenseHistory {
|
|
commonMerged := *c1
|
|
commonMerged := *c1
|
|
commonMerged.Merge(c2)
|
|
commonMerged.Merge(c2)
|
|
|
|
|
|
@@ -565,8 +568,8 @@ func mergeMatrices(m1, m2 [][]int64, granularity1, sampling1, granularity2, samp
|
|
int(c2.BeginTime-commonMerged.BeginTime)/(3600*24))
|
|
int(c2.BeginTime-commonMerged.BeginTime)/(3600*24))
|
|
}
|
|
}
|
|
|
|
|
|
- // convert daily to [][]in(t64
|
|
|
|
- result := make([][]int64, (size+sampling-1)/sampling)
|
|
|
|
|
|
+ // convert daily to [][]int64
|
|
|
|
+ result := make(DenseHistory, (size+sampling-1)/sampling)
|
|
for i := range result {
|
|
for i := range result {
|
|
result[i] = make([]int64, (size+granularity-1)/granularity)
|
|
result[i] = make([]int64, (size+granularity-1)/granularity)
|
|
sampledIndex := i * sampling
|
|
sampledIndex := i * sampling
|
|
@@ -590,7 +593,7 @@ func mergeMatrices(m1, m2 [][]int64, granularity1, sampling1, granularity2, samp
|
|
// Rows: *at least* len(matrix) * sampling + offset
|
|
// Rows: *at least* len(matrix) * sampling + offset
|
|
// Columns: *at least* len(matrix[...]) * granularity + offset
|
|
// Columns: *at least* len(matrix[...]) * granularity + offset
|
|
// `matrix` can be sparse, so that the last columns which are equal to 0 are truncated.
|
|
// `matrix` can be sparse, so that the last columns which are equal to 0 are truncated.
|
|
-func addBurndownMatrix(matrix [][]int64, granularity, sampling int, daily [][]float32, offset int) {
|
|
|
|
|
|
+func addBurndownMatrix(matrix DenseHistory, granularity, sampling int, daily [][]float32, offset int) {
|
|
// Determine the maximum number of bands; the actual one may be larger but we do not care
|
|
// Determine the maximum number of bands; the actual one may be larger but we do not care
|
|
maxCols := 0
|
|
maxCols := 0
|
|
for _, row := range matrix {
|
|
for _, row := range matrix {
|
|
@@ -808,7 +811,7 @@ func (analyser *BurndownAnalysis) serializeBinary(result *BurndownResult, writer
|
|
return nil
|
|
return nil
|
|
}
|
|
}
|
|
|
|
|
|
-func sortedKeys(m map[string][][]int64) []string {
|
|
|
|
|
|
+func sortedKeys(m map[string]DenseHistory) []string {
|
|
keys := make([]string, 0, len(m))
|
|
keys := make([]string, 0, len(m))
|
|
for k := range m {
|
|
for k := range m {
|
|
keys = append(keys, k)
|
|
keys = append(keys, k)
|
|
@@ -846,54 +849,70 @@ func (analyser *BurndownAnalysis) unpackPersonWithDay(value int) (int, int) {
|
|
}
|
|
}
|
|
|
|
|
|
func (analyser *BurndownAnalysis) onNewDay() {
|
|
func (analyser *BurndownAnalysis) onNewDay() {
|
|
- day := analyser.day
|
|
|
|
- sampling := analyser.Sampling
|
|
|
|
- delta := (day / sampling) - (analyser.previousDay / sampling)
|
|
|
|
- if delta > 0 {
|
|
|
|
- analyser.previousDay = day
|
|
|
|
- gs, fss, pss := analyser.groupStatus()
|
|
|
|
- analyser.updateHistories(delta, gs, fss, pss)
|
|
|
|
|
|
+ if analyser.day > analyser.previousDay {
|
|
|
|
+ analyser.previousDay = analyser.day
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
-func (analyser *BurndownAnalysis) updateStatus(
|
|
|
|
- status interface{}, _ int, previousValue int, delta int) {
|
|
|
|
|
|
+func (analyser *BurndownAnalysis) updateGlobal(currentTime, previousTime, delta int) {
|
|
|
|
+ _, currentDay := analyser.unpackPersonWithDay(currentTime)
|
|
|
|
+ _, previousDay := analyser.unpackPersonWithDay(previousTime)
|
|
|
|
+ currentHistory := analyser.globalHistory[currentDay]
|
|
|
|
+ if currentHistory == nil {
|
|
|
|
+ currentHistory = map[int]int64{}
|
|
|
|
+ analyser.globalHistory[currentDay] = currentHistory
|
|
|
|
+ }
|
|
|
|
+ currentHistory[previousDay] += int64(delta)
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+// updateFile is bound to the specific `history` in the closure.
|
|
|
|
+func (analyser *BurndownAnalysis) updateFile(
|
|
|
|
+ history sparseHistory, currentTime, previousTime, delta int) {
|
|
|
|
+
|
|
|
|
+ _, currentDay := analyser.unpackPersonWithDay(currentTime)
|
|
|
|
+ _, previousDay := analyser.unpackPersonWithDay(previousTime)
|
|
|
|
|
|
- _, previousTime := analyser.unpackPersonWithDay(previousValue)
|
|
|
|
- status.(map[int]int64)[previousTime] += int64(delta)
|
|
|
|
|
|
+ currentHistory := history[currentDay]
|
|
|
|
+ if currentHistory == nil {
|
|
|
|
+ currentHistory = map[int]int64{}
|
|
|
|
+ history[currentDay] = currentHistory
|
|
|
|
+ }
|
|
|
|
+ currentHistory[previousDay] += int64(delta)
|
|
}
|
|
}
|
|
|
|
|
|
-func (analyser *BurndownAnalysis) updatePeople(
|
|
|
|
- peopleUncasted interface{}, _ int, previousValue int, delta int) {
|
|
|
|
- previousAuthor, previousTime := analyser.unpackPersonWithDay(previousValue)
|
|
|
|
|
|
+func (analyser *BurndownAnalysis) updateAuthor(currentTime, previousTime, delta int) {
|
|
|
|
+ previousAuthor, previousDay := analyser.unpackPersonWithDay(previousTime)
|
|
if previousAuthor == identity.AuthorMissing {
|
|
if previousAuthor == identity.AuthorMissing {
|
|
return
|
|
return
|
|
}
|
|
}
|
|
- people := peopleUncasted.([]map[int]int64)
|
|
|
|
- stats := people[previousAuthor]
|
|
|
|
- if stats == nil {
|
|
|
|
- stats = map[int]int64{}
|
|
|
|
- people[previousAuthor] = stats
|
|
|
|
|
|
+ _, currentDay := analyser.unpackPersonWithDay(currentTime)
|
|
|
|
+ history := analyser.peopleHistories[previousAuthor]
|
|
|
|
+ if history == nil {
|
|
|
|
+ history = sparseHistory{}
|
|
|
|
+ analyser.peopleHistories[previousAuthor] = history
|
|
}
|
|
}
|
|
- stats[previousTime] += int64(delta)
|
|
|
|
|
|
+ currentHistory := history[currentDay]
|
|
|
|
+ if currentHistory == nil {
|
|
|
|
+ currentHistory = map[int]int64{}
|
|
|
|
+ history[currentDay] = currentHistory
|
|
|
|
+ }
|
|
|
|
+ currentHistory[previousDay] += int64(delta)
|
|
}
|
|
}
|
|
|
|
|
|
-func (analyser *BurndownAnalysis) updateMatrix(
|
|
|
|
- matrixUncasted interface{}, currentTime int, previousTime int, delta int) {
|
|
|
|
-
|
|
|
|
- matrix := matrixUncasted.([]map[int]int64)
|
|
|
|
|
|
+func (analyser *BurndownAnalysis) updateMatrix(currentTime, previousTime, delta int) {
|
|
newAuthor, _ := analyser.unpackPersonWithDay(currentTime)
|
|
newAuthor, _ := analyser.unpackPersonWithDay(currentTime)
|
|
oldAuthor, _ := analyser.unpackPersonWithDay(previousTime)
|
|
oldAuthor, _ := analyser.unpackPersonWithDay(previousTime)
|
|
|
|
+
|
|
if oldAuthor == identity.AuthorMissing {
|
|
if oldAuthor == identity.AuthorMissing {
|
|
return
|
|
return
|
|
}
|
|
}
|
|
if newAuthor == oldAuthor && delta > 0 {
|
|
if newAuthor == oldAuthor && delta > 0 {
|
|
newAuthor = authorSelf
|
|
newAuthor = authorSelf
|
|
}
|
|
}
|
|
- row := matrix[oldAuthor]
|
|
|
|
|
|
+ row := analyser.matrix[oldAuthor]
|
|
if row == nil {
|
|
if row == nil {
|
|
row = map[int]int64{}
|
|
row = map[int]int64{}
|
|
- matrix[oldAuthor] = row
|
|
|
|
|
|
+ analyser.matrix[oldAuthor] = row
|
|
}
|
|
}
|
|
cell, exists := row[newAuthor]
|
|
cell, exists := row[newAuthor]
|
|
if !exists {
|
|
if !exists {
|
|
@@ -904,19 +923,25 @@ func (analyser *BurndownAnalysis) updateMatrix(
|
|
}
|
|
}
|
|
|
|
|
|
func (analyser *BurndownAnalysis) newFile(
|
|
func (analyser *BurndownAnalysis) newFile(
|
|
- hash plumbing.Hash, author int, day int, size int, global map[int]int64,
|
|
|
|
- people []map[int]int64, matrix []map[int]int64) *burndown.File {
|
|
|
|
- statuses := make([]burndown.Status, 1)
|
|
|
|
- statuses[0] = burndown.NewStatus(global, analyser.updateStatus)
|
|
|
|
|
|
+ hash plumbing.Hash, name string, author int, day int, size int) (*burndown.File, error) {
|
|
|
|
+ statuses := make([]burndown.Updater, 1)
|
|
|
|
+ statuses[0] = analyser.updateGlobal
|
|
if analyser.TrackFiles {
|
|
if analyser.TrackFiles {
|
|
- statuses = append(statuses, burndown.NewStatus(map[int]int64{}, analyser.updateStatus))
|
|
|
|
|
|
+ if _, exists := analyser.fileHistories[name]; exists {
|
|
|
|
+ return nil, fmt.Errorf("file %s already exists", name)
|
|
|
|
+ }
|
|
|
|
+ history := sparseHistory{}
|
|
|
|
+ analyser.fileHistories[name] = history
|
|
|
|
+ statuses = append(statuses, func(currentTime, previousTime, delta int) {
|
|
|
|
+ analyser.updateFile(history, currentTime, previousTime, delta)
|
|
|
|
+ })
|
|
}
|
|
}
|
|
if analyser.PeopleNumber > 0 {
|
|
if analyser.PeopleNumber > 0 {
|
|
- statuses = append(statuses, burndown.NewStatus(people, analyser.updatePeople))
|
|
|
|
- statuses = append(statuses, burndown.NewStatus(matrix, analyser.updateMatrix))
|
|
|
|
|
|
+ statuses = append(statuses, analyser.updateAuthor)
|
|
|
|
+ statuses = append(statuses, analyser.updateMatrix)
|
|
day = analyser.packPersonWithDay(author, day)
|
|
day = analyser.packPersonWithDay(author, day)
|
|
}
|
|
}
|
|
- return burndown.NewFile(hash, day, size, statuses...)
|
|
|
|
|
|
+ return burndown.NewFile(hash, day, size, statuses...), nil
|
|
}
|
|
}
|
|
|
|
|
|
func (analyser *BurndownAnalysis) handleInsertion(
|
|
func (analyser *BurndownAnalysis) handleInsertion(
|
|
@@ -934,11 +959,9 @@ func (analyser *BurndownAnalysis) handleInsertion(
|
|
if exists {
|
|
if exists {
|
|
return fmt.Errorf("file %s already exists", name)
|
|
return fmt.Errorf("file %s already exists", name)
|
|
}
|
|
}
|
|
- file = analyser.newFile(
|
|
|
|
- blob.Hash, author, analyser.day, lines,
|
|
|
|
- analyser.globalStatus, analyser.people, analyser.matrix)
|
|
|
|
|
|
+ file, err = analyser.newFile(blob.Hash, name, author, analyser.day, lines)
|
|
analyser.files[name] = file
|
|
analyser.files[name] = file
|
|
- return nil
|
|
|
|
|
|
+ return err
|
|
}
|
|
}
|
|
|
|
|
|
func (analyser *BurndownAnalysis) handleDeletion(
|
|
func (analyser *BurndownAnalysis) handleDeletion(
|
|
@@ -957,6 +980,7 @@ func (analyser *BurndownAnalysis) handleDeletion(
|
|
file.Update(analyser.packPersonWithDay(author, analyser.day), 0, 0, lines)
|
|
file.Update(analyser.packPersonWithDay(author, analyser.day), 0, 0, lines)
|
|
file.Hash = plumbing.ZeroHash
|
|
file.Hash = plumbing.ZeroHash
|
|
delete(analyser.files, name)
|
|
delete(analyser.files, name)
|
|
|
|
+ delete(analyser.fileHistories, name)
|
|
return nil
|
|
return nil
|
|
}
|
|
}
|
|
|
|
|
|
@@ -1066,113 +1090,63 @@ func (analyser *BurndownAnalysis) handleModification(
|
|
}
|
|
}
|
|
|
|
|
|
func (analyser *BurndownAnalysis) handleRename(from, to string) error {
|
|
func (analyser *BurndownAnalysis) handleRename(from, to string) error {
|
|
|
|
+ if from == to {
|
|
|
|
+ return nil
|
|
|
|
+ }
|
|
file, exists := analyser.files[from]
|
|
file, exists := analyser.files[from]
|
|
if !exists {
|
|
if !exists {
|
|
return fmt.Errorf("file %s does not exist", from)
|
|
return fmt.Errorf("file %s does not exist", from)
|
|
}
|
|
}
|
|
analyser.files[to] = file
|
|
analyser.files[to] = file
|
|
delete(analyser.files, from)
|
|
delete(analyser.files, from)
|
|
- return nil
|
|
|
|
-}
|
|
|
|
|
|
|
|
-func (analyser *BurndownAnalysis) groupStatus() ([]int64, map[string][]int64, [][]int64) {
|
|
|
|
- granularity := analyser.Granularity
|
|
|
|
- if granularity == 0 {
|
|
|
|
- granularity = 1
|
|
|
|
- }
|
|
|
|
- day := analyser.day
|
|
|
|
- day++
|
|
|
|
- adjust := 0
|
|
|
|
- if day%granularity != 0 {
|
|
|
|
- adjust = 1
|
|
|
|
- }
|
|
|
|
- global := make([]int64, day/granularity+adjust)
|
|
|
|
- var group int64
|
|
|
|
- for i := 0; i < day; i++ {
|
|
|
|
- group += analyser.globalStatus[i]
|
|
|
|
- if (i % granularity) == (granularity - 1) {
|
|
|
|
- global[i/granularity] = group
|
|
|
|
- group = 0
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- if day%granularity != 0 {
|
|
|
|
- global[len(global)-1] = group
|
|
|
|
- }
|
|
|
|
- locals := make(map[string][]int64)
|
|
|
|
- if analyser.TrackFiles {
|
|
|
|
- for key, file := range analyser.files {
|
|
|
|
- status := make([]int64, day/granularity+adjust)
|
|
|
|
- var group int64
|
|
|
|
- for i := 0; i < day; i++ {
|
|
|
|
- group += file.Status(1).(map[int]int64)[i]
|
|
|
|
- if (i % granularity) == (granularity - 1) {
|
|
|
|
- status[i/granularity] = group
|
|
|
|
- group = 0
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- if day%granularity != 0 {
|
|
|
|
- status[len(status)-1] = group
|
|
|
|
- }
|
|
|
|
- locals[key] = status
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- peoples := make([][]int64, len(analyser.people))
|
|
|
|
- for key, person := range analyser.people {
|
|
|
|
- status := make([]int64, day/granularity+adjust)
|
|
|
|
- var group int64
|
|
|
|
- for i := 0; i < day; i++ {
|
|
|
|
- group += person[i]
|
|
|
|
- if (i % granularity) == (granularity - 1) {
|
|
|
|
- status[i/granularity] = group
|
|
|
|
- group = 0
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- if day%granularity != 0 {
|
|
|
|
- status[len(status)-1] = group
|
|
|
|
- }
|
|
|
|
- peoples[key] = status
|
|
|
|
|
|
+ history, exists := analyser.fileHistories[from]
|
|
|
|
+ if !exists {
|
|
|
|
+ return fmt.Errorf("file %s does not exist", from)
|
|
}
|
|
}
|
|
- return global, locals, peoples
|
|
|
|
|
|
+ analyser.fileHistories[to] = history
|
|
|
|
+ delete(analyser.fileHistories, from)
|
|
|
|
+ return nil
|
|
}
|
|
}
|
|
|
|
|
|
-func (analyser *BurndownAnalysis) updateHistories(
|
|
|
|
- delta int, globalStatus []int64, fileStatuses map[string][]int64, peopleStatuses [][]int64) {
|
|
|
|
- for i := 0; i < delta; i++ {
|
|
|
|
- analyser.globalHistory = append(analyser.globalHistory, globalStatus)
|
|
|
|
- }
|
|
|
|
- toDelete := make([]string, 0)
|
|
|
|
- for key, fh := range analyser.fileHistories {
|
|
|
|
- ls, exists := fileStatuses[key]
|
|
|
|
- if !exists {
|
|
|
|
- toDelete = append(toDelete, key)
|
|
|
|
- } else {
|
|
|
|
- for i := 0; i < delta; i++ {
|
|
|
|
- fh = append(fh, ls)
|
|
|
|
|
|
+func (analyser *BurndownAnalysis) groupSparseHistory(history sparseHistory) DenseHistory {
|
|
|
|
+ var days []int
|
|
|
|
+ for day := range history {
|
|
|
|
+ days = append(days, day)
|
|
|
|
+ }
|
|
|
|
+ sort.Ints(days)
|
|
|
|
+ // [y][x]
|
|
|
|
+ // y - sampling
|
|
|
|
+ // x - granularity
|
|
|
|
+ maxDay := days[len(days)-1]
|
|
|
|
+ samples := maxDay / analyser.Sampling
|
|
|
|
+ if (samples + 1) * analyser.Sampling - 1 != maxDay {
|
|
|
|
+ samples++
|
|
|
|
+ }
|
|
|
|
+ bands := maxDay / analyser.Granularity
|
|
|
|
+ if (bands + 1) * analyser.Granularity - 1 != maxDay {
|
|
|
|
+ bands++
|
|
|
|
+ }
|
|
|
|
+ result := make(DenseHistory, samples)
|
|
|
|
+ for i := 0; i < bands; i++ {
|
|
|
|
+ result[i] = make([]int64, bands)
|
|
|
|
+ }
|
|
|
|
+ prevsi := 0
|
|
|
|
+ for _, day := range days {
|
|
|
|
+ si := day / analyser.Sampling
|
|
|
|
+ if si > prevsi {
|
|
|
|
+ state := result[prevsi]
|
|
|
|
+ for i := prevsi + 1; i <= si; i++ {
|
|
|
|
+ copy(result[i], state)
|
|
}
|
|
}
|
|
- analyser.fileHistories[key] = fh
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- for _, key := range toDelete {
|
|
|
|
- delete(analyser.fileHistories, key)
|
|
|
|
- }
|
|
|
|
- for key, ls := range fileStatuses {
|
|
|
|
- fh, exists := analyser.fileHistories[key]
|
|
|
|
- if exists {
|
|
|
|
- continue
|
|
|
|
|
|
+ prevsi = si
|
|
}
|
|
}
|
|
- for i := 0; i < delta; i++ {
|
|
|
|
- fh = append(fh, ls)
|
|
|
|
|
|
+ sample := result[si]
|
|
|
|
+ for bday, value := range history[day] {
|
|
|
|
+ sample[bday / analyser.Granularity] += value
|
|
}
|
|
}
|
|
- analyser.fileHistories[key] = fh
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- for key, ph := range analyser.peopleHistories {
|
|
|
|
- ls := peopleStatuses[key]
|
|
|
|
- for i := 0; i < delta; i++ {
|
|
|
|
- ph = append(ph, ls)
|
|
|
|
- }
|
|
|
|
- analyser.peopleHistories[key] = ph
|
|
|
|
}
|
|
}
|
|
|
|
+ return result
|
|
}
|
|
}
|
|
|
|
|
|
func init() {
|
|
func init() {
|