| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526 | 
							- package hercules
 
- import (
 
- 	"bufio"
 
- 	"errors"
 
- 	"fmt"
 
- 	"io"
 
- 	"os"
 
- 	"unicode/utf8"
 
- 	"github.com/sergi/go-diff/diffmatchpatch"
 
- 	"gopkg.in/src-d/go-git.v4"
 
- 	"gopkg.in/src-d/go-git.v4/plumbing"
 
- 	"gopkg.in/src-d/go-git.v4/plumbing/object"
 
- 	"gopkg.in/src-d/go-git.v4/utils/merkletrie"
 
- )
 
- // BurndownAnalyser allows to gather the line burndown statistics for a Git repository.
 
- type BurndownAnalysis struct {
 
- 	// Granularity sets the size of each band - the number of days it spans.
 
- 	// Smaller values provide better resolution but require more work and eat more
 
- 	// memory. 30 days is usually enough.
 
- 	Granularity int
 
- 	// Sampling sets how detailed is the statistic - the size of the interval in
 
- 	// days between consecutive measurements. It is usually a good idea to set it
 
- 	// <= Granularity. Try 15 or 30.
 
- 	Sampling int
 
- 	// The number of developers for which to collect the burndown stats. 0 disables it.
 
- 	PeopleNumber int
 
- 	// Debug activates the debugging mode. Analyse() runs slower in this mode
 
- 	// but it accurately checks all the intermediate states for invariant
 
- 	// violations.
 
- 	Debug bool
 
- 	// Repository points to the analysed Git repository struct from go-git.
 
- 	repository *git.Repository
 
- 	// globalStatus is the current daily alive number of lines; key is the number
 
- 	// of days from the beginning of the history.
 
- 	globalStatus map[int]int64
 
- 	// globalHistory is the weekly snapshots of globalStatus.
 
- 	globalHistory [][]int64
 
- 	// fileHistories is the weekly snapshots of each file's status.
 
- 	fileHistories map[string][][]int64
 
- 	// peopleHistories is the weekly snapshots of each person's status.
 
- 	peopleHistories [][][]int64
 
- 	// files is the mapping <file path> -> *File.
 
- 	files map[string]*File
 
- 	// matrix is the mutual deletions and self insertions.
 
- 	matrix []map[int]int64
 
- 	// people is the people's individual time stats.
 
- 	people []map[int]int64
 
- 	// day is the most recent day index processed.
 
- 	day int
 
- 	// previousDay is the day from the previous sample period -
 
- 	// different from DaysSinceStart.previousDay.
 
- 	previousDay int
 
- }
 
- type BurndownResult struct {
 
- 	GlobalHistory   [][]int64
 
- 	FileHistories   map[string][][]int64
 
- 	PeopleHistories [][][]int64
 
- 	PeopleMatrix    [][]int64
 
- }
 
- func (analyser *BurndownAnalysis) Name() string {
 
- 	return "Burndown"
 
- }
 
- func (analyser *BurndownAnalysis) Provides() []string {
 
- 	return []string{}
 
- }
 
- func (analyser *BurndownAnalysis) Requires() []string {
 
- 	arr := [...]string{"file_diff", "renamed_changes", "blob_cache", "day", "author"}
 
- 	return arr[:]
 
- }
 
- func (analyser *BurndownAnalysis) Initialize(repository *git.Repository) {
 
- 	analyser.repository = repository
 
- 	analyser.globalStatus = map[int]int64{}
 
- 	analyser.globalHistory = [][]int64{}
 
- 	analyser.fileHistories = map[string][][]int64{}
 
- 	analyser.peopleHistories = make([][][]int64, analyser.PeopleNumber)
 
- 	analyser.files = map[string]*File{}
 
- 	analyser.matrix = make([]map[int]int64, analyser.PeopleNumber)
 
- 	analyser.people = make([]map[int]int64, analyser.PeopleNumber)
 
- 	analyser.day = 0
 
- 	analyser.previousDay = 0
 
- }
 
- func (analyser *BurndownAnalysis) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
 
- 	sampling := analyser.Sampling
 
- 	if sampling == 0 {
 
- 		sampling = 1
 
- 	}
 
- 	author := deps["author"].(int)
 
- 	analyser.day = deps["day"].(int)
 
- 	delta := (analyser.day / sampling) - (analyser.previousDay / sampling)
 
- 	if delta > 0 {
 
- 		analyser.previousDay = analyser.day
 
- 		gs, fss, pss := analyser.groupStatus()
 
- 		analyser.updateHistories(gs, fss, pss, delta)
 
- 	}
 
- 	cache := deps["blob_cache"].(map[plumbing.Hash]*object.Blob)
 
- 	treeDiffs := deps["renamed_changes"].(object.Changes)
 
- 	fileDiffs := deps["file_diff"].(map[string]FileDiffData)
 
- 	for _, change := range treeDiffs {
 
- 		action, err := change.Action()
 
- 		if err != nil {
 
- 			return nil, err
 
- 		}
 
- 		switch action {
 
- 		case merkletrie.Insert:
 
- 			err = analyser.handleInsertion(change, author, cache)
 
- 		case merkletrie.Delete:
 
- 			err = analyser.handleDeletion(change, author, cache)
 
- 		case merkletrie.Modify:
 
- 			err = analyser.handleModification(change, author, cache, fileDiffs)
 
- 		}
 
- 		if err != nil {
 
- 			return nil, err
 
- 		}
 
- 	}
 
- 	return nil, nil
 
- }
 
- // Finalize() returns the list of snapshots of the cumulative line edit times
 
- // and the similar lists for every file which is alive in HEAD.
 
- // The number of snapshots (the first dimension >[]<[]int64) depends on
 
- // Analyser.Sampling (the more Sampling, the less the value); the length of
 
- // each snapshot depends on Analyser.Granularity (the more Granularity,
 
- // the less the value).
 
- func (analyser *BurndownAnalysis) Finalize() interface{} {
 
- 	gs, fss, pss := analyser.groupStatus()
 
- 	analyser.updateHistories(gs, fss, pss, 1)
 
- 	for key, statuses := range analyser.fileHistories {
 
- 		if len(statuses) == len(analyser.globalHistory) {
 
- 			continue
 
- 		}
 
- 		padding := make([][]int64, len(analyser.globalHistory)-len(statuses))
 
- 		for i := range padding {
 
- 			padding[i] = make([]int64, len(analyser.globalStatus))
 
- 		}
 
- 		analyser.fileHistories[key] = append(padding, statuses...)
 
- 	}
 
- 	peopleMatrix := make([][]int64, analyser.PeopleNumber)
 
- 	for i, row := range analyser.matrix {
 
- 		mrow := make([]int64, analyser.PeopleNumber+2)
 
- 		peopleMatrix[i] = mrow
 
- 		for key, val := range row {
 
- 			if key == MISSING_AUTHOR {
 
- 				key = -1
 
- 			} else if key == SELF_AUTHOR {
 
- 				key = -2
 
- 			}
 
- 			mrow[key+2] = val
 
- 		}
 
- 	}
 
- 	return BurndownResult{
 
- 		GlobalHistory:   analyser.globalHistory,
 
- 		FileHistories:   analyser.fileHistories,
 
- 		PeopleHistories: analyser.peopleHistories,
 
- 		PeopleMatrix:    peopleMatrix}
 
- }
 
- func checkClose(c io.Closer) {
 
- 	if err := c.Close(); err != nil {
 
- 		panic(err)
 
- 	}
 
- }
 
- func countLines(file *object.Blob) (int, error) {
 
- 	reader, err := file.Reader()
 
- 	if err != nil {
 
- 		return 0, err
 
- 	}
 
- 	defer checkClose(reader)
 
- 	var scanner *bufio.Scanner
 
- 	buffer := make([]byte, bufio.MaxScanTokenSize)
 
- 	counter := 0
 
- 	for scanner == nil || scanner.Err() == bufio.ErrTooLong {
 
- 		if scanner != nil && !utf8.Valid(scanner.Bytes()) {
 
- 			return -1, errors.New("binary")
 
- 		}
 
- 		scanner = bufio.NewScanner(reader)
 
- 		scanner.Buffer(buffer, 0)
 
- 		for scanner.Scan() {
 
- 			if !utf8.Valid(scanner.Bytes()) {
 
- 				return -1, errors.New("binary")
 
- 			}
 
- 			counter++
 
- 		}
 
- 	}
 
- 	return counter, nil
 
- }
 
- func (analyser *BurndownAnalysis) packPersonWithDay(person int, day int) int {
 
- 	if analyser.PeopleNumber == 0 {
 
- 		return day
 
- 	}
 
- 	result := day
 
- 	result |= person << 14
 
- 	// This effectively means max 16384 days (>44 years) and (131072 - 2) devs
 
- 	return result
 
- }
 
- func (analyser *BurndownAnalysis) unpackPersonWithDay(value int) (int, int) {
 
- 	if analyser.PeopleNumber == 0 {
 
- 		return MISSING_AUTHOR, value
 
- 	}
 
- 	return value >> 14, value & 0x3FFF
 
- }
 
- func (analyser *BurndownAnalysis) updateStatus(
 
- 	status interface{}, _ int, previous_time_ int, delta int) {
 
- 	_, previous_time := analyser.unpackPersonWithDay(previous_time_)
 
- 	status.(map[int]int64)[previous_time] += int64(delta)
 
- }
 
- func (analyser *BurndownAnalysis) updatePeople(people interface{}, _ int, previous_time_ int, delta int) {
 
- 	old_author, previous_time := analyser.unpackPersonWithDay(previous_time_)
 
- 	if old_author == MISSING_AUTHOR {
 
- 		return
 
- 	}
 
- 	casted := people.([]map[int]int64)
 
- 	stats := casted[old_author]
 
- 	if stats == nil {
 
- 		stats = map[int]int64{}
 
- 		casted[old_author] = stats
 
- 	}
 
- 	stats[previous_time] += int64(delta)
 
- }
 
- func (analyser *BurndownAnalysis) updateMatrix(
 
- 	matrix_ interface{}, current_time int, previous_time int, delta int) {
 
- 	matrix := matrix_.([]map[int]int64)
 
- 	new_author, _ := analyser.unpackPersonWithDay(current_time)
 
- 	old_author, _ := analyser.unpackPersonWithDay(previous_time)
 
- 	if old_author == MISSING_AUTHOR {
 
- 		return
 
- 	}
 
- 	if new_author == old_author && delta > 0 {
 
- 		new_author = SELF_AUTHOR
 
- 	}
 
- 	row := matrix[old_author]
 
- 	if row == nil {
 
- 		row = map[int]int64{}
 
- 		matrix[old_author] = row
 
- 	}
 
- 	cell, exists := row[new_author]
 
- 	if !exists {
 
- 		row[new_author] = 0
 
- 		cell = 0
 
- 	}
 
- 	row[new_author] = cell + int64(delta)
 
- }
 
- func (analyser *BurndownAnalysis) newFile(
 
- 	author int, day int, size int, global map[int]int64, people []map[int]int64,
 
- 	matrix []map[int]int64) *File {
 
- 	if analyser.PeopleNumber == 0 {
 
- 		return NewFile(day, size, NewStatus(global, analyser.updateStatus),
 
- 			NewStatus(map[int]int64{}, analyser.updateStatus))
 
- 	}
 
- 	return NewFile(analyser.packPersonWithDay(author, day), size,
 
- 		NewStatus(global, analyser.updateStatus),
 
- 		NewStatus(map[int]int64{}, analyser.updateStatus),
 
- 		NewStatus(people, analyser.updatePeople),
 
- 		NewStatus(matrix, analyser.updateMatrix))
 
- }
 
- func (analyser *BurndownAnalysis) handleInsertion(
 
- 	change *object.Change, author int, cache map[plumbing.Hash]*object.Blob) error {
 
- 	blob := cache[change.To.TreeEntry.Hash]
 
- 	lines, err := countLines(blob)
 
- 	if err != nil {
 
- 		if err.Error() == "binary" {
 
- 			return nil
 
- 		}
 
- 		return err
 
- 	}
 
- 	name := change.To.Name
 
- 	file, exists := analyser.files[name]
 
- 	if exists {
 
- 		return errors.New(fmt.Sprintf("file %s already exists", name))
 
- 	}
 
- 	file = analyser.newFile(
 
- 		author, analyser.day, lines, analyser.globalStatus, analyser.people, analyser.matrix)
 
- 	analyser.files[name] = file
 
- 	return nil
 
- }
 
- func (analyser *BurndownAnalysis) handleDeletion(
 
- 	change *object.Change, author int, cache map[plumbing.Hash]*object.Blob) error {
 
- 	blob := cache[change.From.TreeEntry.Hash]
 
- 	lines, err := countLines(blob)
 
- 	if err != nil {
 
- 		if err.Error() == "binary" {
 
- 			return nil
 
- 		}
 
- 		return err
 
- 	}
 
- 	name := change.From.Name
 
- 	file := analyser.files[name]
 
- 	file.Update(analyser.packPersonWithDay(author, analyser.day), 0, 0, lines)
 
- 	delete(analyser.files, name)
 
- 	return nil
 
- }
 
- func (analyser *BurndownAnalysis) handleModification(
 
- 	change *object.Change, author int, cache map[plumbing.Hash]*object.Blob,
 
-   diffs map[string]FileDiffData) error {
 
- 	file, exists := analyser.files[change.From.Name]
 
- 	if !exists {
 
- 		return analyser.handleInsertion(change, author, cache)
 
- 	}
 
- 	// possible rename
 
- 	if change.To.Name != change.From.Name {
 
- 		err := analyser.handleRename(change.From.Name, change.To.Name)
 
- 		if err != nil {
 
- 			return err
 
- 		}
 
- 	}
 
- 	thisDiffs := diffs[change.To.Name]
 
- 	if file.Len() != thisDiffs.OldLinesOfCode {
 
- 		fmt.Fprintf(os.Stderr, "====TREE====\n%s", file.Dump())
 
- 		return errors.New(fmt.Sprintf("%s: internal integrity error src %d != %d %s -> %s",
 
- 			change.To.Name, thisDiffs.OldLinesOfCode, file.Len(),
 
- 			change.From.TreeEntry.Hash.String(), change.To.TreeEntry.Hash.String()))
 
- 	}
 
- 	// we do not call RunesToDiffLines so the number of lines equals
 
- 	// to the rune count
 
- 	position := 0
 
- 	pending := diffmatchpatch.Diff{Text: ""}
 
- 	apply := func(edit diffmatchpatch.Diff) {
 
- 		length := utf8.RuneCountInString(edit.Text)
 
- 		if edit.Type == diffmatchpatch.DiffInsert {
 
- 			file.Update(analyser.packPersonWithDay(author, analyser.day), position, length, 0)
 
- 			position += length
 
- 		} else {
 
- 			file.Update(analyser.packPersonWithDay(author, analyser.day), position, 0, length)
 
- 		}
 
- 		if analyser.Debug {
 
- 			file.Validate()
 
- 		}
 
- 	}
 
- 	for _, edit := range thisDiffs.Diffs {
 
- 		dump_before := ""
 
- 		if analyser.Debug {
 
- 			dump_before = file.Dump()
 
- 		}
 
- 		length := utf8.RuneCountInString(edit.Text)
 
- 		debug_error := func() {
 
- 			fmt.Fprintf(os.Stderr, "%s: internal diff error\n", change.To.Name)
 
- 			fmt.Fprintf(os.Stderr, "Update(%d, %d, %d (0), %d (0))\n", analyser.day, position,
 
- 				length, utf8.RuneCountInString(pending.Text))
 
- 			if dump_before != "" {
 
- 				fmt.Fprintf(os.Stderr, "====TREE BEFORE====\n%s====END====\n", dump_before)
 
- 			}
 
- 			fmt.Fprintf(os.Stderr, "====TREE AFTER====\n%s====END====\n", file.Dump())
 
- 		}
 
- 		switch edit.Type {
 
- 		case diffmatchpatch.DiffEqual:
 
- 			if pending.Text != "" {
 
- 				apply(pending)
 
- 				pending.Text = ""
 
- 			}
 
- 			position += length
 
- 		case diffmatchpatch.DiffInsert:
 
- 			if pending.Text != "" {
 
- 				if pending.Type == diffmatchpatch.DiffInsert {
 
- 					debug_error()
 
- 					return errors.New("DiffInsert may not appear after DiffInsert")
 
- 				}
 
- 				file.Update(analyser.packPersonWithDay(author, analyser.day), position, length,
 
- 					utf8.RuneCountInString(pending.Text))
 
- 				if analyser.Debug {
 
- 					file.Validate()
 
- 				}
 
- 				position += length
 
- 				pending.Text = ""
 
- 			} else {
 
- 				pending = edit
 
- 			}
 
- 		case diffmatchpatch.DiffDelete:
 
- 			if pending.Text != "" {
 
- 				debug_error()
 
- 				return errors.New("DiffDelete may not appear after DiffInsert/DiffDelete")
 
- 			}
 
- 			pending = edit
 
- 		default:
 
- 			debug_error()
 
- 			return errors.New(fmt.Sprintf("diff operation is not supported: %d", edit.Type))
 
- 		}
 
- 	}
 
- 	if pending.Text != "" {
 
- 		apply(pending)
 
- 		pending.Text = ""
 
- 	}
 
- 	if file.Len() != thisDiffs.NewLinesOfCode {
 
- 		return errors.New(fmt.Sprintf("%s: internal integrity error dst %d != %d",
 
- 			change.To.Name, thisDiffs.NewLinesOfCode, file.Len()))
 
- 	}
 
- 	return nil
 
- }
 
- func (analyser *BurndownAnalysis) handleRename(from, to string) error {
 
- 	file, exists := analyser.files[from]
 
- 	if !exists {
 
- 		return errors.New(fmt.Sprintf("file %s does not exist", from))
 
- 	}
 
- 	analyser.files[to] = file
 
- 	delete(analyser.files, from)
 
- 	return nil
 
- }
 
- func (analyser *BurndownAnalysis) groupStatus() ([]int64, map[string][]int64, [][]int64) {
 
- 	granularity := analyser.Granularity
 
- 	if granularity == 0 {
 
- 		granularity = 1
 
- 	}
 
- 	day := analyser.day
 
- 	day++
 
- 	adjust := 0
 
- 	if day%granularity != 0 {
 
- 		adjust = 1
 
- 	}
 
- 	global := make([]int64, day/granularity+adjust)
 
- 	var group int64
 
- 	for i := 0; i < day; i++ {
 
- 		group += analyser.globalStatus[i]
 
- 		if (i % granularity) == (granularity - 1) {
 
- 			global[i/granularity] = group
 
- 			group = 0
 
- 		}
 
- 	}
 
- 	if day%granularity != 0 {
 
- 		global[len(global)-1] = group
 
- 	}
 
- 	locals := make(map[string][]int64)
 
- 	for key, file := range analyser.files {
 
- 		status := make([]int64, day/granularity+adjust)
 
- 		var group int64
 
- 		for i := 0; i < day; i++ {
 
- 			group += file.Status(1).(map[int]int64)[i]
 
- 			if (i % granularity) == (granularity - 1) {
 
- 				status[i/granularity] = group
 
- 				group = 0
 
- 			}
 
- 		}
 
- 		if day%granularity != 0 {
 
- 			status[len(status)-1] = group
 
- 		}
 
- 		locals[key] = status
 
- 	}
 
- 	peoples := make([][]int64, len(analyser.people))
 
- 	for key, person := range analyser.people {
 
- 		status := make([]int64, day/granularity+adjust)
 
- 		var group int64
 
- 		for i := 0; i < day; i++ {
 
- 			group += person[i]
 
- 			if (i % granularity) == (granularity - 1) {
 
- 				status[i/granularity] = group
 
- 				group = 0
 
- 			}
 
- 		}
 
- 		if day%granularity != 0 {
 
- 			status[len(status)-1] = group
 
- 		}
 
- 		peoples[key] = status
 
- 	}
 
- 	return global, locals, peoples
 
- }
 
- func (analyser *BurndownAnalysis) updateHistories(
 
- 	globalStatus []int64, file_statuses map[string][]int64, people_statuses [][]int64, delta int) {
 
- 	for i := 0; i < delta; i++ {
 
- 		analyser.globalHistory = append(analyser.globalHistory, globalStatus)
 
- 	}
 
- 	to_delete := make([]string, 0)
 
- 	for key, fh := range analyser.fileHistories {
 
- 		ls, exists := file_statuses[key]
 
- 		if !exists {
 
- 			to_delete = append(to_delete, key)
 
- 		} else {
 
- 			for i := 0; i < delta; i++ {
 
- 				fh = append(fh, ls)
 
- 			}
 
- 			analyser.fileHistories[key] = fh
 
- 		}
 
- 	}
 
- 	for _, key := range to_delete {
 
- 		delete(analyser.fileHistories, key)
 
- 	}
 
- 	for key, ls := range file_statuses {
 
- 		fh, exists := analyser.fileHistories[key]
 
- 		if exists {
 
- 			continue
 
- 		}
 
- 		for i := 0; i < delta; i++ {
 
- 			fh = append(fh, ls)
 
- 		}
 
- 		analyser.fileHistories[key] = fh
 
- 	}
 
- 	for key, ph := range analyser.peopleHistories {
 
- 		ls := people_statuses[key]
 
- 		for i := 0; i < delta; i++ {
 
- 			ph = append(ph, ls)
 
- 		}
 
- 		analyser.peopleHistories[key] = ph
 
- 	}
 
- }
 
 
  |