Browse Source

Implement per-user stats

Vadim Markovtsev 7 years ago
parent
commit
f05d8d5d34
4 changed files with 260 additions and 88 deletions
  1. 166 30
      analyser.go
  2. 64 43
      cmd/hercules/main.go
  3. 20 10
      file.go
  4. 10 5
      file_test.go

+ 166 - 30
analyser.go

@@ -35,9 +35,9 @@ type Analyser struct {
 	// It has the same units as cgit's -X rename-threshold or -M. Better to
 	// set it to the default value of 90 (90%).
 	SimilarityThreshold int
-	// Indicates whether we should record per-developer burndown stats.
-	MeasurePeople       bool
-	// Maps email -> developer id.
+	// The number of developers for which to collect the burndown stats. 0 disables it.
+	PeopleNumber        int
+	// Maps email || name  -> developer id.
 	PeopleDict          map[string]int
 	// Debug activates the debugging mode. Analyse() runs slower in this mode
 	// but it accurately checks all the intermediate states for invariant
@@ -49,6 +49,8 @@ type Analyser struct {
 	OnProgress          func(int, int)
 }
 
+type ProtoMatrix map[int]map[int]int64
+
 func checkClose(c io.Closer) {
 	if err := c.Close(); err != nil {
 		panic(err)
@@ -140,9 +142,102 @@ func createDummyBlob(hash *plumbing.Hash) (*object.Blob, error) {
 	return object.DecodeBlob(dummyEncodedObject{*hash})
 }
 
+const MISSING_AUTHOR = -1
+const SELF_AUTHOR = -2
+
+func (analyser *Analyser) packPersonWithDay(person int, day int) int {
+	if analyser.PeopleNumber == 0 {
+		return day
+	}
+	result := day
+	result |= person << 14
+	// This effectively means max 16384 days (>44 years) and 262144 devs
+	return result
+}
+
+func (analyser *Analyser) unpackPersonWithDay(value int) (int, int) {
+	if analyser.PeopleNumber == 0 {
+		return MISSING_AUTHOR, value
+	}
+	return value >> 14, value & 0x3FFF
+}
+
+func (analyser *Analyser) updateStatus(
+  status interface{}, _ int, previous_time_ int, delta int) {
+
+	_, previous_time := analyser.unpackPersonWithDay(previous_time_)
+	status.(map[int]int64)[previous_time] += int64(delta)
+}
+
+func (analyser *Analyser) updatePeople(people interface{}, _ int, previous_time_ int, delta int) {
+	old_author, previous_time := analyser.unpackPersonWithDay(previous_time_)
+	if old_author == MISSING_AUTHOR {
+		return
+	}
+	casted := people.([]map[int]int64)
+	stats := casted[old_author]
+	if stats == nil {
+		stats = map[int]int64{}
+		casted[old_author] = stats
+	}
+	stats[previous_time] += int64(delta)
+}
+
+func (analyser *Analyser) updateMatrix(
+  matrix_ interface{}, current_time int, previous_time int, delta int) {
+
+	matrix := matrix_.([]map[int]int64)
+	new_author, _ := analyser.unpackPersonWithDay(current_time)
+	old_author, _ := analyser.unpackPersonWithDay(previous_time)
+	if old_author == MISSING_AUTHOR {
+		return
+	}
+	if new_author == old_author && delta > 0 {
+		new_author = SELF_AUTHOR
+	}
+	row := matrix[old_author]
+	if row == nil {
+		row = map[int]int64{}
+		matrix[old_author] = row
+	}
+	cell, exists := row[new_author]
+	if !exists {
+		row[new_author] = 0
+		cell = 0
+	}
+	row[new_author] = cell + int64(delta)
+}
+
+func (analyser *Analyser) newFile(
+    author int, day int, size int, global map[int]int64, people []map[int]int64,
+    matrix []map[int]int64) *File {
+	if analyser.PeopleNumber == 0 {
+		return NewFile(day, size, NewStatus(global, analyser.updateStatus),
+			             NewStatus(make(map[int]int64), analyser.updateStatus))
+	}
+	return NewFile(analyser.packPersonWithDay(author, day), size,
+		             NewStatus(global, analyser.updateStatus),
+		             NewStatus(make(map[int]int64), analyser.updateStatus),
+	               NewStatus(people, analyser.updatePeople),
+		             NewStatus(matrix, analyser.updateMatrix))
+}
+
+func (analyser *Analyser) getAuthorId(signature object.Signature) int {
+	id, exists := analyser.PeopleDict[signature.Email]
+	if !exists {
+		id, exists = analyser.PeopleDict[signature.Name]
+		if !exists {
+			id = MISSING_AUTHOR
+		}
+	}
+	return id
+}
+
 func (analyser *Analyser) handleInsertion(
-	change *object.Change, day int, status map[int]int64, files map[string]*File,
-	cache *map[plumbing.Hash]*object.Blob) {
+	change *object.Change, author int, day int, global_status map[int]int64,
+  files map[string]*File, people []map[int]int64, matrix []map[int]int64,
+  cache *map[plumbing.Hash]*object.Blob) {
+
 	blob := (*cache)[change.To.TreeEntry.Hash]
 	lines, err := loc(blob)
 	if err != nil {
@@ -153,13 +248,12 @@ func (analyser *Analyser) handleInsertion(
 	if exists {
 		panic(fmt.Sprintf("file %s already exists", name))
 	}
-	// The second status is specific to each file.
-	file = NewFile(day, lines, status, make(map[int]int64))
+	file = analyser.newFile(author, day, lines, global_status, people, matrix)
 	files[name] = file
 }
 
 func (analyser *Analyser) handleDeletion(
-	change *object.Change, day int, status map[int]int64, files map[string]*File,
+	change *object.Change, author int, day int, status map[int]int64, files map[string]*File,
 	cache *map[plumbing.Hash]*object.Blob) {
 	blob := (*cache)[change.From.TreeEntry.Hash]
 	lines, err := loc(blob)
@@ -168,13 +262,15 @@ func (analyser *Analyser) handleDeletion(
 	}
 	name := change.From.Name
 	file := files[name]
-	file.Update(day, 0, 0, lines)
+	file.Update(analyser.packPersonWithDay(author, day), 0, 0, lines)
 	delete(files, name)
 }
 
 func (analyser *Analyser) handleModification(
-	change *object.Change, day int, status map[int]int64, files map[string]*File,
-	cache *map[plumbing.Hash]*object.Blob) {
+	change *object.Change, author int, day int, status map[int]int64, files map[string]*File,
+	people []map[int]int64, matrix []map[int]int64,
+  cache *map[plumbing.Hash]*object.Blob) {
+
 	blob_from := (*cache)[change.From.TreeEntry.Hash]
 	blob_to := (*cache)[change.To.TreeEntry.Hash]
 	// we are not validating UTF-8 here because for example
@@ -183,7 +279,7 @@ func (analyser *Analyser) handleModification(
 	str_to := str(blob_to)
 	file, exists := files[change.From.Name]
 	if !exists {
-		analyser.handleInsertion(change, day, status, files, cache)
+		analyser.handleInsertion(change, author, day, status, files, people, matrix, cache)
 		return
 	}
 	// possible rename
@@ -207,10 +303,10 @@ func (analyser *Analyser) handleModification(
 	apply := func(edit diffmatchpatch.Diff) {
 		length := utf8.RuneCountInString(edit.Text)
 		if edit.Type == diffmatchpatch.DiffInsert {
-			file.Update(day, position, length, 0)
+			file.Update(analyser.packPersonWithDay(author, day), position, length, 0)
 			position += length
 		} else {
-			file.Update(day, position, 0, length)
+			file.Update(analyser.packPersonWithDay(author, day), position, 0, length)
 		}
 		if analyser.Debug {
 			file.Validate()
@@ -249,7 +345,8 @@ func (analyser *Analyser) handleModification(
 					if pending.Type == diffmatchpatch.DiffInsert {
 						panic("DiffInsert may not appear after DiffInsert")
 					}
-					file.Update(day, position, length, utf8.RuneCountInString(pending.Text))
+					file.Update(analyser.packPersonWithDay(author, day), position, length,
+						          utf8.RuneCountInString(pending.Text))
 					if analyser.Debug {
 						file.Validate()
 					}
@@ -318,7 +415,8 @@ func (analyser *Analyser) Commits() []*object.Commit {
 func (analyser *Analyser) groupStatus(
     status map[int]int64,
     files map[string]*File,
-    day int) ([]int64, map[string][]int64) {
+    people []map[int]int64,
+    day int) ([]int64, map[string][]int64, [][]int64) {
 	granularity := analyser.Granularity
 	if granularity == 0 {
 		granularity = 1
@@ -345,7 +443,7 @@ func (analyser *Analyser) groupStatus(
 		status := make([]int64, day/granularity+adjust)
 		var group int64
 		for i := 0; i < day; i++ {
-			group += file.Status(1)[i]
+			group += file.Status(1).(map[int]int64)[i]
 			if (i%granularity) == (granularity - 1) {
 				status[i/granularity] = group
 				group = 0
@@ -356,12 +454,29 @@ func (analyser *Analyser) groupStatus(
 		}
 		locals[key] = status
 	}
-	return global, locals
+	peoples := make([][]int64, len(people))
+	for key, person := range people {
+		status := make([]int64, day/granularity+adjust)
+		var group int64
+		for i := 0; i < day; i++ {
+			group += person[i]
+			if (i%granularity) == (granularity - 1) {
+				status[i/granularity] = group
+				group = 0
+			}
+		}
+		if day%granularity != 0 {
+			status[len(status)-1] = group
+		}
+		peoples[key] = status
+	}
+	return global, locals, peoples
 }
 
 func (analyser *Analyser) updateHistories(
     global_history [][]int64, global_status []int64,
     file_histories map[string][][]int64, file_statuses map[string][]int64,
+    people_histories [][][]int64, people_statuses [][]int64,
     delta int) [][]int64 {
 	for i := 0; i < delta; i++ {
 		global_history = append(global_history, global_status)
@@ -391,6 +506,14 @@ func (analyser *Analyser) updateHistories(
 		}
 		file_histories[key] = fh
 	}
+
+	for key, ph := range people_histories {
+		ls := people_statuses[key]
+		for i := 0; i < delta; i++ {
+			ph = append(ph, ls)
+		}
+		people_histories[key] = ph
+	}
 	return global_history
 }
 
@@ -660,7 +783,7 @@ func (analyser *Analyser) detectRenames(
 // each snapshot depends on Analyser.Granularity (the more Granularity,
 // the less the value).
 func (analyser *Analyser) Analyse(commits []*object.Commit) (
-    [][]int64, map[string][][]int64, map[int][][]int64, [][]int64) {
+    [][]int64, map[string][][]int64, [][][]int64, [][]int64) {
 	sampling := analyser.Sampling
 	if sampling == 0 {
 		sampling = 1
@@ -680,8 +803,14 @@ func (analyser *Analyser) Analyse(commits []*object.Commit) (
 	global_history := [][]int64{}
 	// weekly snapshots of each file's status
 	file_histories := map[string][][]int64{}
+	// weekly snapshots of each person's status
+	people_histories := make([][][]int64, analyser.PeopleNumber)
 	// mapping <file path> -> hercules.File
 	files := map[string]*File{}
+	// Mutual deletions and self insertions
+	matrix := make([]map[int]int64, analyser.PeopleNumber)
+	// People's individual time stats
+	people := make([]map[int]int64, analyser.PeopleNumber)
 
 	var day0 time.Time // will be initialized in the first iteration
 	var prev_tree *object.Tree = nil
@@ -693,6 +822,7 @@ func (analyser *Analyser) Analyse(commits []*object.Commit) (
 		if err != nil {
 			panic(err)
 		}
+		author := analyser.getAuthorId(commit.Author)
 		if index == 0 {
 			// first iteration - initialize the file objects from the tree
 			day0 = commit.Author.When
@@ -709,7 +839,7 @@ func (analyser *Analyser) Analyse(commits []*object.Commit) (
 					}
 					lines, err := loc(&file.Blob)
 					if err == nil {
-						files[file.Name] = NewFile(0, lines, global_status, make(map[int]int64))
+						files[file.Name] = analyser.newFile(author, 0, lines, global_status, people, matrix)
 					}
 				}
 			}()
@@ -722,9 +852,9 @@ func (analyser *Analyser) Analyse(commits []*object.Commit) (
 			delta := (day / sampling) - (prev_day / sampling)
 			if delta > 0 {
 				prev_day = day
-				gs, fss := analyser.groupStatus(global_status, files, day)
+				gs, fss, pss := analyser.groupStatus(global_status, files, people, day)
 				global_history = analyser.updateHistories(
-					global_history, gs, file_histories, fss, delta)
+					global_history, gs, file_histories, fss, people_histories, pss, delta)
 			}
 			tree_diff, err := object.DiffTree(prev_tree, tree)
 			if err != nil {
@@ -745,9 +875,9 @@ func (analyser *Analyser) Analyse(commits []*object.Commit) (
 				}
 				switch action {
 				case merkletrie.Insert:
-					analyser.handleInsertion(change, day, global_status, files, cache)
+					analyser.handleInsertion(change, author, day, global_status, files, people, matrix, cache)
 				case merkletrie.Delete:
-					analyser.handleDeletion(change, day, global_status, files, cache)
+					analyser.handleDeletion(change, author, day, global_status, files, cache)
 				case merkletrie.Modify:
 					func() {
 						defer func() {
@@ -758,16 +888,16 @@ func (analyser *Analyser) Analyse(commits []*object.Commit) (
 								panic(r)
 							}
 						}()
-						analyser.handleModification(change, day, global_status, files, cache)
+						analyser.handleModification(change, author, day, global_status, files, people, matrix, cache)
 					}()
 				}
 			}
 		}
 		prev_tree = tree
 	}
-	gs, fss := analyser.groupStatus(global_status, files, day)
+	gs, fss, pss := analyser.groupStatus(global_status, files, people, day)
 	global_history = analyser.updateHistories(
-		global_history, gs, file_histories, fss, 1)
+		global_history, gs, file_histories, fss, people_histories, pss, 1)
 	for key, statuses := range file_histories {
 		if len(statuses) == len(global_history) {
 			continue
@@ -778,7 +908,13 @@ func (analyser *Analyser) Analyse(commits []*object.Commit) (
 		}
 		file_histories[key] = append(padding, statuses...)
 	}
-	var people_statuses map[int][][]int64
-	var people_matrix [][]int64
-	return global_history, file_histories, people_statuses, people_matrix
+	people_matrix := make([][]int64, analyser.PeopleNumber)
+	for i, row := range matrix {
+		mrow := make([]int64, analyser.PeopleNumber + 2)
+		people_matrix[i] = mrow
+		for key, val := range row {
+			mrow[key + 2] = val
+		}
+	}
+	return global_history, file_histories, people_histories, people_matrix
 }

+ 64 - 43
cmd/hercules/main.go

@@ -14,7 +14,6 @@ import (
 	"net/http"
 	_ "net/http/pprof"
 	"os"
-	"regexp"
 	"runtime/pprof"
 	"sort"
 	"strconv"
@@ -30,8 +29,14 @@ import (
 	"gopkg.in/src-d/hercules.v1"
 )
 
-func loadPeopleDict(path string) (map[string]int, map[int][]string) {
-	re := regexp.MustCompile(`^(^\s)+\s+(^\s)+$`)
+// Signature stores the author's identification. Only a single field is used to identify the
+// commit: first Email is checked, then Name.
+type Signature struct {
+	Name string
+	Email string
+}
+
+func loadPeopleDict(path string) (map[string]int, map[int]string, int) {
 	file, err := os.Open(path)
 	if err != nil {
 		panic(err)
@@ -39,24 +44,50 @@ func loadPeopleDict(path string) (map[string]int, map[int][]string) {
 	defer file.Close()
 	scanner := bufio.NewScanner(file)
 	dict := make(map[string]int)
-	reverse_vocabulary := make(map[int][]string)
-	vocabulary := make(map[string]int)
+	reverse_dict := make(map[int]string)
+	size := 0
 	for scanner.Scan() {
-		matches := re.FindStringSubmatch(scanner.Text())
-		id, exists := vocabulary[matches[2]]
-		if !exists {
-			id = len(vocabulary)
-			vocabulary[matches[2]] = id
-			_, exists := reverse_vocabulary[id]
+		for _, id := range strings.Split(scanner.Text(), "|") {
+			dict[id] = size
+		}
+		reverse_dict[size] = scanner.Text()
+		size += 1
+	}
+	return dict, reverse_dict, size
+}
+
+func generatePeopleDict(commits []*object.Commit) (map[string]int, map[int]string, int) {
+	dict := make(map[string]int)
+	emails := make(map[int][]string)
+	names := make(map[int][]string)
+	size := 0
+	for _, commit := range commits {
+		id, exists := dict[commit.Author.Email]
+		if exists {
+			_, exists := dict[commit.Author.Name]
 			if !exists {
-				reverse_vocabulary[id] = make([]string, 0)
-				reverse_vocabulary[id] = append(reverse_vocabulary[id], matches[2])
+				dict[commit.Author.Name] = id
+			  names[id] = append(names[id], commit.Author.Name)
 			}
-			reverse_vocabulary[id] = append(reverse_vocabulary[id], matches[1])
+			continue
+		}
+		id, exists = dict[commit.Author.Name]
+		if exists {
+			dict[commit.Author.Email] = id
+			emails[id] = append(emails[id], commit.Author.Email)
+			continue
 		}
-		dict[matches[1]] = id
+		dict[commit.Author.Email] = size
+		dict[commit.Author.Name] = size
+		emails[size] = append(emails[size], commit.Author.Email)
+		names[size] = append(names[size], commit.Author.Name)
+		size += 1
+	}
+	reverse_dict := make(map[int]string)
+	for _, val := range dict {
+		reverse_dict[val] = strings.Join(names[val], "|") + "|" + strings.Join(emails[val], "|")
 	}
-	return dict, reverse_vocabulary
+	return dict, reverse_dict, size
 }
 
 func loadCommitsFromFile(path string, repository *git.Repository) []*object.Commit {
@@ -119,7 +150,7 @@ func printStatuses(statuses [][]int64, name string) {
 	}
 }
 
-func sortedStringKeys(m map[string][][]int64) []string {
+func sortedKeys(m map[string][][]int64) []string {
 	keys := make([]string, 0, len(m))
 	for k := range m {
 		keys = append(keys, k)
@@ -128,15 +159,6 @@ func sortedStringKeys(m map[string][][]int64) []string {
 	return keys
 }
 
-func sortedIntKeys(m map[int][][]int64) []int {
-	keys := make([]int, 0, len(m))
-	for k := range m {
-		keys = append(keys, k)
-	}
-	sort.Ints(keys)
-	return keys
-}
-
 func main() {
 	var with_files bool
 	var with_people bool
@@ -163,11 +185,6 @@ func main() {
 		fmt.Fprint(os.Stderr, "Warning: adjusted the granularity to 1 day\n")
 		granularity = 1
 	}
-	var people_dict map[string]int
-	var people_ids map[int][]string
-	if people_dict_path != "" {
-		people_dict, people_ids = loadPeopleDict(people_dict_path)
-	}
 	if profile {
 		go http.ListenAndServe("localhost:6060", nil)
 		prof, _ := os.Create("hercules.pprof")
@@ -215,8 +232,6 @@ func main() {
 		Granularity:         granularity,
 		Sampling:            sampling,
 		SimilarityThreshold: similarity_threshold,
-		MeasurePeople:       with_people,
-		PeopleDict:          people_dict,
 		Debug:               debug,
 	}
 	// list of commits belonging to the default branch, from oldest to newest
@@ -227,6 +242,18 @@ func main() {
 	} else {
 		commits = loadCommitsFromFile(commitsFile, repository)
 	}
+	var people_ids map[int]string
+	if with_people {
+		var people_dict map[string]int
+		var people_number int
+		if people_dict_path != "" {
+			people_dict, people_ids, people_number = loadPeopleDict(people_dict_path)
+		} else {
+			people_dict, people_ids, people_number = generatePeopleDict(commits)
+		}
+		analyser.PeopleNumber = people_number
+		analyser.PeopleDict = people_dict
+	}
 	global_statuses, file_statuses, people_statuses, people_matrix := analyser.Analyse(commits)
 	fmt.Fprint(os.Stderr, "                \r")
 	if len(global_statuses) == 0 {
@@ -238,7 +265,7 @@ func main() {
 		granularity, sampling)
 	printStatuses(global_statuses, "")
 	if with_files {
-		keys := sortedStringKeys(file_statuses)
+		keys := sortedKeys(file_statuses)
 		for _, key := range keys {
 			fmt.Println()
 			printStatuses(file_statuses[key], key)
@@ -246,16 +273,10 @@ func main() {
 	}
 	if with_people {
 		fmt.Printf("%d\n", len(people_statuses))
-		keys := sortedIntKeys(people_statuses)
-		for _, key := range keys {
+		for key, val := range people_statuses {
+			fmt.Printf("%d: ", key)
+			printStatuses(val, people_ids[key])
 			fmt.Println()
-			sign := strconv.Itoa(key) + ": " + people_ids[key][0]
-			for i, val := range people_ids[key] {
-				if i > 0 {
-					sign += " <" + val + ">"
-				}
-			}
-			printStatuses(people_statuses[key], sign)
 		}
 		for _, row := range(people_matrix) {
 			for _, cell := range(row) {

+ 20 - 10
file.go

@@ -2,6 +2,12 @@ package hercules
 
 import "fmt"
 
+// A status is the something we would like to update during File.Update().
+type Status struct {
+	data interface{}
+	update func(interface{}, int, int, int)
+}
+
 // A file encapsulates a balanced binary tree to store line intervals and
 // a cumulative mapping of values to the corresponding length counters. Users
 // are not supposed to create File-s directly; instead, they should call NewFile().
@@ -15,7 +21,11 @@ import "fmt"
 // Dump() writes the tree to a string and Validate() checks the tree integrity.
 type File struct {
 	tree   *RBTree
-	statuses []map[int]int64
+	statuses []Status
+}
+
+func NewStatus(data interface{}, update func(interface{}, int, int, int)) Status {
+	return Status{data: data, update: update}
 }
 
 // TreeEnd denotes the value of the last leaf in the tree.
@@ -64,9 +74,9 @@ func abs64(v int64) int64 {
 	return v
 }
 
-func (file *File) updateTime(time int, delta int) {
+func (file *File) updateTime(current_time int, previous_time int, delta int) {
 	for _, status := range file.statuses {
-		status[time] += int64(delta)
+		status.update(status.data, current_time, previous_time, delta)
 	}
 }
 
@@ -78,12 +88,12 @@ func (file *File) updateTime(time int, delta int) {
 // last node);
 //
 // statuses are the attached interval length mappings.
-func NewFile(time int, length int, statuses ...map[int]int64) *File {
+func NewFile(time int, length int, statuses ...Status) *File {
 	file := new(File)
 	file.statuses = statuses
 	file.tree = new(RBTree)
 	if length > 0 {
-		file.updateTime(time, length)
+		file.updateTime(time, time, length)
 		file.tree.Insert(Item{key: 0, value: time})
 	}
 	file.tree.Insert(Item{key: length, value: TreeEnd})
@@ -98,7 +108,7 @@ func NewFile(time int, length int, statuses ...map[int]int64) *File {
 // vals is a slice with the starting tree values. Must match the size of keys.
 //
 // statuses are the attached interval length mappings.
-func NewFileFromTree(keys []int, vals []int, statuses ...map[int]int64) *File {
+func NewFileFromTree(keys []int, vals []int, statuses ...Status) *File {
 	file := new(File)
 	file.statuses = statuses
 	file.tree = new(RBTree)
@@ -156,7 +166,7 @@ func (file *File) Update(time int, pos int, ins_length int, del_length int) {
 	}
 	iter := tree.FindLE(pos)
 	origin := *iter.Item()
-	file.updateTime(time, ins_length)
+	file.updateTime(time, time, ins_length)
 	if del_length == 0 {
 		// simple case with insertions only
 		if origin.key < pos || (origin.value == time && pos == 0) {
@@ -188,7 +198,7 @@ func (file *File) Update(time int, pos int, ins_length int, del_length int) {
 		if delta <= 0 {
 			break
 		}
-		file.updateTime(node.value, -delta)
+		file.updateTime(time, node.value, -delta)
 		if node.key >= pos {
 			origin = *node
 			tree.DeleteWithIterator(iter)
@@ -244,12 +254,12 @@ func (file *File) Update(time int, pos int, ins_length int, del_length int) {
 	}
 }
 
-func (file *File) Status(index int) map[int]int64 {
+func (file *File) Status(index int) interface{} {
 	if index < 0 || index >= len(file.statuses) {
 		panic(fmt.Sprintf("status index %d is out of bounds [0, %d)",
 		                  index, len(file.statuses)))
 	}
-	return file.statuses[index]
+	return file.statuses[index].data
 }
 
 // Dump formats the underlying line interval tree into a string.

+ 10 - 5
file_test.go

@@ -6,9 +6,14 @@ import (
 	"github.com/stretchr/testify/assert"
 )
 
+func updateStatus(
+    status interface{}, _ int, previous_time int, delta int) {
+	status.(map[int]int64)[previous_time] += int64(delta)
+}
+
 func fixture() (*File, map[int]int64) {
 	status := map[int]int64{}
-	file := NewFile(0, 100, status)
+	file := NewFile(0, 100, NewStatus(status, updateStatus))
 	return file, status
 }
 
@@ -69,7 +74,7 @@ func TestInsert(t *testing.T) {
 
 func TestZeroInitialize(t *testing.T) {
 	status := map[int]int64{}
-	file := NewFile(0, 0, status)
+	file := NewFile(0, 0, NewStatus(status, updateStatus))
 	assert.NotContains(t, status, 0)
 	dump := file.Dump()
 	// Output:
@@ -320,7 +325,7 @@ func TestBug3(t *testing.T) {
 
 func TestBug4(t *testing.T) {
 	status := map[int]int64{}
-	file := NewFile(0, 10, status)
+	file := NewFile(0, 10, NewStatus(status, updateStatus))
 	file.Update(125, 0, 20, 9)
 	file.Update(125, 0, 20, 20)
 	file.Update(166, 12, 1, 1)
@@ -350,14 +355,14 @@ func TestBug5(t *testing.T) {
 	status := map[int]int64{}
 	keys := []int{0, 2, 4, 7, 10}
 	vals := []int{24, 28, 24, 28, -1}
-	file := NewFileFromTree(keys, vals, status)
+	file := NewFileFromTree(keys, vals, NewStatus(status, updateStatus))
 	file.Update(28, 0, 1, 3)
 	dump := file.Dump()
 	assert.Equal(t, "0 28\n2 24\n5 28\n8 -1\n", dump)
 
 	keys = []int{0, 1, 16, 18}
 	vals = []int{305, 0, 157, -1}
-	file = NewFileFromTree(keys, vals, status)
+	file = NewFileFromTree(keys, vals, NewStatus(status, updateStatus))
 	file.Update(310, 0, 0, 2)
 	dump = file.Dump()
 	assert.Equal(t, "0 0\n14 157\n16 -1\n", dump)