Browse Source

Merge pull request #245 from bobheadxi/master

Add more granular intervals (replace 'days' with generic 'ticks')
Vadim Markovtsev 6 years ago
parent
commit
0839cecee8

+ 17 - 17
contrib/_plugin_example/churn_analysis.go

@@ -33,7 +33,7 @@ type ChurnAnalysis struct {
 }
 
 type editInfo struct {
-	Day     int
+	Tick    int
 	Added   int
 	Removed int
 }
@@ -45,7 +45,7 @@ type ChurnAnalysisResult struct {
 }
 
 type Edits struct {
-	Days      []int
+	Ticks     []int
 	Additions []int
 	Removals  []int
 }
@@ -68,14 +68,14 @@ func (churn *ChurnAnalysis) Provides() []string {
 // file_diff - line diff for each commit change
 // changes - list of changed files for each commit
 // blob_cache - set of blobs affected by each commit
-// day - number of days since start for each commit
+// tick - number of ticks since start for each commit
 // author - author of the commit
 func (churn *ChurnAnalysis) Requires() []string {
 	arr := [...]string{
 		hercules.DependencyFileDiff,
 		hercules.DependencyTreeChanges,
 		hercules.DependencyBlobCache,
-		hercules.DependencyDay,
+		hercules.DependencyTick,
 		hercules.DependencyAuthor}
 	return arr[:]
 }
@@ -129,7 +129,7 @@ func (churn *ChurnAnalysis) Consume(deps map[string]interface{}) (map[string]int
 	fileDiffs := deps[hercules.DependencyFileDiff].(map[string]hercules.FileDiffData)
 	treeDiffs := deps[hercules.DependencyTreeChanges].(object.Changes)
 	cache := deps[hercules.DependencyBlobCache].(map[plumbing.Hash]*hercules.CachedBlob)
-	day := deps[hercules.DependencyDay].(int)
+	tick := deps[hercules.DependencyTick].(int)
 	author := deps[hercules.DependencyAuthor].(int)
 	for _, change := range treeDiffs {
 		action, err := change.Action()
@@ -161,7 +161,7 @@ func (churn *ChurnAnalysis) Consume(deps map[string]interface{}) (map[string]int
 		if err != nil {
 			return nil, err
 		}
-		ei := editInfo{Day: day, Added: added, Removed: removed}
+		ei := editInfo{Tick: tick, Added: added, Removed: removed}
 		churn.global = append(churn.global, ei)
 		if churn.TrackPeople {
 			seq, exists := churn.people[author]
@@ -230,13 +230,13 @@ func (churn *ChurnAnalysis) serializeBinary(result *ChurnAnalysisResult, writer
 func editInfosToEdits(eis []editInfo) Edits {
 	aux := map[int]*editInfo{}
 	for _, ei := range eis {
-		ptr := aux[ei.Day]
+		ptr := aux[ei.Tick]
 		if ptr == nil {
-			ptr = &editInfo{Day: ei.Day}
+			ptr = &editInfo{Tick: ei.Tick}
 		}
 		ptr.Added += ei.Added
 		ptr.Removed += ei.Removed
-		aux[ei.Day] = ptr
+		aux[ei.Tick] = ptr
 	}
 	seq := []int{}
 	for key := range aux {
@@ -244,14 +244,14 @@ func editInfosToEdits(eis []editInfo) Edits {
 	}
 	sort.Ints(seq)
 	edits := Edits{
-		Days:      make([]int, len(seq)),
+		Ticks:     make([]int, len(seq)),
 		Additions: make([]int, len(seq)),
 		Removals:  make([]int, len(seq)),
 	}
-	for i, day := range seq {
-		edits.Days[i] = day
-		edits.Additions[i] = aux[day].Added
-		edits.Removals[i] = aux[day].Removed
+	for i, tick := range seq {
+		edits.Ticks[i] = tick
+		edits.Additions[i] = aux[tick].Added
+		edits.Removals[i] = aux[tick].Removed
 	}
 	return edits
 }
@@ -268,14 +268,14 @@ func printEdits(edits Edits, writer io.Writer, indent int) {
 			}
 		}
 	}
-	printArray(edits.Days, "days")
+	printArray(edits.Ticks, "ticks")
 	printArray(edits.Additions, "additions")
 	printArray(edits.Removals, "removals")
 }
 
 func editsToEditsMessage(edits Edits) *EditsMessage {
 	message := &EditsMessage{
-		Days:      make([]uint32, len(edits.Days)),
+		Ticks:     make([]uint32, len(edits.Ticks)),
 		Additions: make([]uint32, len(edits.Additions)),
 		Removals:  make([]uint32, len(edits.Removals)),
 	}
@@ -284,7 +284,7 @@ func editsToEditsMessage(edits Edits) *EditsMessage {
 			where[i] = uint32(v)
 		}
 	}
-	copyInts(edits.Days, message.Days)
+	copyInts(edits.Ticks, message.Ticks)
 	copyInts(edits.Additions, message.Additions)
 	copyInts(edits.Removals, message.Removals)
 	return message

+ 1 - 1
contrib/_plugin_example/churn_analysis.proto

@@ -3,7 +3,7 @@ option go_package = "main";
 
 message EditsMessage {
     // all three are of the same length
-    repeated uint32 days = 1;
+    repeated uint32 ticks = 1;
     repeated uint32 additions = 2;
     repeated uint32 removals = 3;
 }

+ 5 - 5
core.go

@@ -123,9 +123,9 @@ const (
 	DependencyAuthor = identity.DependencyAuthor
 	// DependencyBlobCache identifies the dependency provided by BlobCache.
 	DependencyBlobCache = plumbing.DependencyBlobCache
-	// DependencyDay is the name of the dependency which DaysSinceStart provides - the number
-	// of days since the first commit in the analysed sequence.
-	DependencyDay = plumbing.DependencyDay
+	// DependencyTick is the name of the dependency which TicksSinceStart provides - the number
+	// of ticks since the first commit in the analysed sequence.
+	DependencyTick = plumbing.DependencyTick
 	// DependencyFileDiff is the name of the dependency provided by FileDiff.
 	DependencyFileDiff = plumbing.DependencyFileDiff
 	// DependencyTreeChanges is the name of the dependency provided by TreeDiff.
@@ -134,8 +134,8 @@ const (
 	DependencyUastChanges = uast.DependencyUastChanges
 	// DependencyUasts is the name of the dependency provided by Extractor.
 	DependencyUasts = uast.DependencyUasts
-	// FactCommitsByDay contains the mapping between day indices and the corresponding commits.
-	FactCommitsByDay = plumbing.FactCommitsByDay
+	// FactCommitsByTick contains the mapping between tick indices and the corresponding commits.
+	FactCommitsByTick = plumbing.FactCommitsByTick
 	// FactIdentityDetectorPeopleCount is the name of the fact which is inserted in
 	// identity.Detector.Configure(). It is equal to the overall number of unique authors
 	// (the length of ReversedPeopleDict).

+ 6 - 6
doc/dag.dot

@@ -3,20 +3,20 @@ digraph Hercules {
   node [fontname="Roboto", shape=box, style=rounded]
 
   "6 BlobCache" -> "7 [blob_cache]"
-  "0 DaysSinceStart" -> "3 [day]"
   "10 FileDiff" -> "12 [file_diff]"
   "16 FileDiffRefiner" -> "17 Burndown"
-  "1 IdentityDetector" -> "4 [author]"
+  "0 IdentityDetector" -> "3 [author]"
   "8 RenameAnalysis" -> "17 Burndown"
   "8 RenameAnalysis" -> "9 Couples"
   "8 RenameAnalysis" -> "10 FileDiff"
   "8 RenameAnalysis" -> "11 UAST"
   "8 RenameAnalysis" -> "14 UASTChanges"
+  "1 TicksSinceStart" -> "4 [tick]"
   "2 TreeDiff" -> "5 [changes]"
   "11 UAST" -> "13 [uasts]"
   "14 UASTChanges" -> "15 [changed_uasts]"
-  "4 [author]" -> "17 Burndown"
-  "4 [author]" -> "9 Couples"
+  "3 [author]" -> "17 Burndown"
+  "3 [author]" -> "9 Couples"
   "7 [blob_cache]" -> "17 Burndown"
   "7 [blob_cache]" -> "10 FileDiff"
   "7 [blob_cache]" -> "8 RenameAnalysis"
@@ -24,7 +24,7 @@ digraph Hercules {
   "15 [changed_uasts]" -> "16 FileDiffRefiner"
   "5 [changes]" -> "6 BlobCache"
   "5 [changes]" -> "8 RenameAnalysis"
-  "3 [day]" -> "17 Burndown"
   "12 [file_diff]" -> "16 FileDiffRefiner"
+  "4 [tick]" -> "17 Burndown"
   "13 [uasts]" -> "14 UASTChanges"
-}
+}

BIN
doc/dag.png


+ 1 - 1
internal/burndown/file.go

@@ -31,7 +31,7 @@ type File struct {
 // TreeEnd denotes the value of the last leaf in the tree.
 const TreeEnd = math.MaxUint32
 
-// TreeMaxBinPower is the binary power value which corresponds to the maximum day which
+// TreeMaxBinPower is the binary power value which corresponds to the maximum tick which
 // can be stored in the tree.
 const TreeMaxBinPower = 14
 

+ 8 - 8
internal/global_test.go

@@ -28,18 +28,18 @@ func TestPipelineSerialize(t *testing.T) {
 	dot := string(bdot)
 	assert.Equal(t, `digraph Hercules {
   "6 BlobCache" -> "7 [blob_cache]"
-  "0 DaysSinceStart" -> "3 [day]"
   "9 FileDiff" -> "11 [file_diff]"
   "15 FileDiffRefiner" -> "16 Burndown"
-  "1 IdentityDetector" -> "4 [author]"
+  "0 IdentityDetector" -> "3 [author]"
   "8 RenameAnalysis" -> "16 Burndown"
   "8 RenameAnalysis" -> "9 FileDiff"
   "8 RenameAnalysis" -> "10 UAST"
   "8 RenameAnalysis" -> "13 UASTChanges"
+  "1 TicksSinceStart" -> "4 [tick]"
   "2 TreeDiff" -> "5 [changes]"
   "10 UAST" -> "12 [uasts]"
   "13 UASTChanges" -> "14 [changed_uasts]"
-  "4 [author]" -> "16 Burndown"
+  "3 [author]" -> "16 Burndown"
   "7 [blob_cache]" -> "16 Burndown"
   "7 [blob_cache]" -> "9 FileDiff"
   "7 [blob_cache]" -> "8 RenameAnalysis"
@@ -47,8 +47,8 @@ func TestPipelineSerialize(t *testing.T) {
   "14 [changed_uasts]" -> "15 FileDiffRefiner"
   "5 [changes]" -> "6 BlobCache"
   "5 [changes]" -> "8 RenameAnalysis"
-  "3 [day]" -> "16 Burndown"
   "11 [file_diff]" -> "15 FileDiffRefiner"
+  "4 [tick]" -> "16 Burndown"
   "12 [uasts]" -> "13 UASTChanges"
 }`, dot)
 }
@@ -68,20 +68,20 @@ func TestPipelineSerializeNoUast(t *testing.T) {
 	dot := string(bdot)
 	assert.Equal(t, `digraph Hercules {
   "6 BlobCache" -> "7 [blob_cache]"
-  "0 DaysSinceStart" -> "3 [day]"
   "9 FileDiff" -> "10 [file_diff]"
-  "1 IdentityDetector" -> "4 [author]"
+  "0 IdentityDetector" -> "3 [author]"
   "8 RenameAnalysis" -> "11 Burndown"
   "8 RenameAnalysis" -> "9 FileDiff"
+  "1 TicksSinceStart" -> "4 [tick]"
   "2 TreeDiff" -> "5 [changes]"
-  "4 [author]" -> "11 Burndown"
+  "3 [author]" -> "11 Burndown"
   "7 [blob_cache]" -> "11 Burndown"
   "7 [blob_cache]" -> "9 FileDiff"
   "7 [blob_cache]" -> "8 RenameAnalysis"
   "5 [changes]" -> "6 BlobCache"
   "5 [changes]" -> "8 RenameAnalysis"
-  "3 [day]" -> "11 Burndown"
   "10 [file_diff]" -> "11 Burndown"
+  "4 [tick]" -> "11 Burndown"
 }`, dot)
 }
 

File diff suppressed because it is too large
+ 908 - 326
internal/pb/pb.pb.go


+ 8 - 6
internal/pb/pb.proto

@@ -39,7 +39,7 @@ message FilesOwnership {
 }
 
 message BurndownAnalysisResults {
-    // how many days are in each band [burndown_project, burndown_file, burndown_developer]
+    // how many ticks are in each band [burndown_project, burndown_file, burndown_developer]
     int32 granularity = 1;
     // how frequently we measure the state of each band [burndown_project, burndown_file, burndown_developer]
     int32 sampling = 2;
@@ -53,6 +53,8 @@ message BurndownAnalysisResults {
     CompressedSparseRowMatrix people_interaction = 6;
     // How many lines belong to relevant developers for each file. The order is the same as in `files`.
     repeated FilesOwnership files_ownership = 7;
+    // how long each tick is, as an int64 nanosecond count (Go's time.Duration)
+    int64 tick_size = 8;
 }
 
 message CompressedSparseRowMatrix {
@@ -122,18 +124,18 @@ message LineStats {
     int32 changed = 3;
 }
 
-message DevDay {
+message DevTick {
     int32 commits = 1;
     LineStats stats = 2;
     map<string, LineStats> languages = 3;
 }
 
-message DayDevs {
-    map<int32, DevDay> devs = 1;
+message TickDevs {
+    map<int32, DevTick> devs = 1;
 }
 
 message DevsAnalysisResults {
-    map<int32, DayDevs> days = 1;
+    map<int32, TickDevs> ticks = 1;
     repeated string dev_index = 2;
 }
 
@@ -144,7 +146,7 @@ message Sentiment {
 }
 
 message CommentSentimentResults {
-    map<int32, Sentiment> sentiment_by_day = 1;
+    map<int32, Sentiment> sentiment_by_tick = 1;
 }
 
 message CommitFile {

File diff suppressed because it is too large
+ 145 - 137
internal/pb/pb_pb2.py


+ 0 - 135
internal/plumbing/day.go

@@ -1,135 +0,0 @@
-package plumbing
-
-import (
-	"log"
-	"time"
-
-	"gopkg.in/src-d/go-git.v4"
-	"gopkg.in/src-d/go-git.v4/plumbing"
-	"gopkg.in/src-d/go-git.v4/plumbing/object"
-	"gopkg.in/src-d/hercules.v9/internal/core"
-)
-
-// DaysSinceStart provides the relative date information for every commit.
-// It is a PipelineItem.
-type DaysSinceStart struct {
-	core.NoopMerger
-	remote      string
-	day0        *time.Time
-	previousDay int
-	commits     map[int][]plumbing.Hash
-}
-
-const (
-	// DependencyDay is the name of the dependency which DaysSinceStart provides - the number
-	// of days since the first commit in the analysed sequence.
-	DependencyDay = "day"
-
-	// FactCommitsByDay contains the mapping between day indices and the corresponding commits.
-	FactCommitsByDay = "DaysSinceStart.Commits"
-)
-
-// Name of this PipelineItem. Uniquely identifies the type, used for mapping keys, etc.
-func (days *DaysSinceStart) Name() string {
-	return "DaysSinceStart"
-}
-
-// Provides returns the list of names of entities which are produced by this PipelineItem.
-// Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by core.Registry to build the global map of providers.
-func (days *DaysSinceStart) Provides() []string {
-	arr := [...]string{DependencyDay}
-	return arr[:]
-}
-
-// Requires returns the list of names of entities which are needed by this PipelineItem.
-// Each requested entity will be inserted into `deps` of Consume(). In turn, those
-// entities are Provides() upstream.
-func (days *DaysSinceStart) Requires() []string {
-	return []string{}
-}
-
-// ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (days *DaysSinceStart) ListConfigurationOptions() []core.ConfigurationOption {
-	return []core.ConfigurationOption{}
-}
-
-// Configure sets the properties previously published by ListConfigurationOptions().
-func (days *DaysSinceStart) Configure(facts map[string]interface{}) error {
-	if days.commits == nil {
-		days.commits = map[int][]plumbing.Hash{}
-	}
-	facts[FactCommitsByDay] = days.commits
-	return nil
-}
-
-// Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
-// calls. The repository which is going to be analysed is supplied as an argument.
-func (days *DaysSinceStart) Initialize(repository *git.Repository) error {
-	days.day0 = &time.Time{}
-	days.previousDay = 0
-	if len(days.commits) > 0 {
-		keys := make([]int, len(days.commits))
-		for key := range days.commits {
-			keys = append(keys, key)
-		}
-		for _, key := range keys {
-			delete(days.commits, key)
-		}
-	}
-	if r, err := repository.Remotes(); err == nil && len(r) > 0 {
-		days.remote = r[0].Config().URLs[0]
-	}
-	return nil
-}
-
-// Consume runs this PipelineItem on the next commit data.
-// `deps` contain all the results from upstream PipelineItem-s as requested by Requires().
-// Additionally, DependencyCommit is always present there and represents the analysed *object.Commit.
-// This function returns the mapping with analysis results. The keys must be the same as
-// in Provides(). If there was an error, nil is returned.
-func (days *DaysSinceStart) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
-	commit := deps[core.DependencyCommit].(*object.Commit)
-	index := deps[core.DependencyIndex].(int)
-	if index == 0 {
-		// first iteration - initialize the file objects from the tree
-		// our precision is 1 day
-		*days.day0 = commit.Committer.When.Truncate(24 * time.Hour)
-		if days.day0.Unix() < 631152000 { // 01.01.1990, that was 30 years ago
-			log.Println()
-			log.Printf("Warning: suspicious committer timestamp in %s > %s",
-				days.remote, commit.Hash.String())
-		}
-	}
-	day := int(commit.Committer.When.Sub(*days.day0).Hours() / 24)
-	if day < days.previousDay {
-		// rebase works miracles, but we need the monotonous time
-		day = days.previousDay
-	}
-	days.previousDay = day
-	dayCommits := days.commits[day]
-	if dayCommits == nil {
-		dayCommits = []plumbing.Hash{}
-	}
-	exists := false
-	if commit.NumParents() > 0 {
-		for i := range dayCommits {
-			if dayCommits[len(dayCommits)-i-1] == commit.Hash {
-				exists = true
-			}
-		}
-	}
-	if !exists {
-		days.commits[day] = append(dayCommits, commit.Hash)
-	}
-	return map[string]interface{}{DependencyDay: day}, nil
-}
-
-// Fork clones this PipelineItem.
-func (days *DaysSinceStart) Fork(n int) []core.PipelineItem {
-	return core.ForkCopyPipelineItem(days, n)
-}
-
-func init() {
-	core.Registry.Register(&DaysSinceStart{})
-}

+ 0 - 156
internal/plumbing/day_test.go

@@ -1,156 +0,0 @@
-package plumbing
-
-import (
-	"bytes"
-	"log"
-	"os"
-	"testing"
-	"time"
-
-	"github.com/stretchr/testify/assert"
-	"gopkg.in/src-d/go-git.v4/plumbing"
-	"gopkg.in/src-d/hercules.v9/internal/core"
-	"gopkg.in/src-d/hercules.v9/internal/test"
-)
-
-func fixtureDaysSinceStart() *DaysSinceStart {
-	dss := DaysSinceStart{}
-	dss.Configure(map[string]interface{}{})
-	dss.Initialize(test.Repository)
-	return &dss
-}
-
-func TestDaysSinceStartMeta(t *testing.T) {
-	dss := fixtureDaysSinceStart()
-	assert.Equal(t, dss.Name(), "DaysSinceStart")
-	assert.Equal(t, len(dss.Provides()), 1)
-	assert.Equal(t, dss.Provides()[0], DependencyDay)
-	assert.Equal(t, len(dss.Requires()), 0)
-	assert.Len(t, dss.ListConfigurationOptions(), 0)
-	dss.Configure(map[string]interface{}{})
-}
-
-func TestDaysSinceStartRegistration(t *testing.T) {
-	summoned := core.Registry.Summon((&DaysSinceStart{}).Name())
-	assert.Len(t, summoned, 1)
-	assert.Equal(t, summoned[0].Name(), "DaysSinceStart")
-	summoned = core.Registry.Summon((&DaysSinceStart{}).Provides()[0])
-	assert.Len(t, summoned, 1)
-	assert.Equal(t, summoned[0].Name(), "DaysSinceStart")
-}
-
-func TestDaysSinceStartConsume(t *testing.T) {
-	dss := fixtureDaysSinceStart()
-	deps := map[string]interface{}{}
-	commit, _ := test.Repository.CommitObject(plumbing.NewHash(
-		"cce947b98a050c6d356bc6ba95030254914027b1"))
-	deps[core.DependencyCommit] = commit
-	deps[core.DependencyIndex] = 0
-	res, err := dss.Consume(deps)
-	assert.Nil(t, err)
-	assert.Equal(t, res[DependencyDay].(int), 0)
-	assert.Equal(t, dss.previousDay, 0)
-	assert.Equal(t, dss.day0.Hour(), 1)   // 18 UTC+1
-	assert.Equal(t, dss.day0.Minute(), 0) // 30
-	assert.Equal(t, dss.day0.Second(), 0) // 29
-
-	commit, _ = test.Repository.CommitObject(plumbing.NewHash(
-		"fc9ceecb6dabcb2aab60e8619d972e8d8208a7df"))
-	deps[core.DependencyCommit] = commit
-	deps[core.DependencyIndex] = 10
-	res, err = dss.Consume(deps)
-	assert.Nil(t, err)
-	assert.Equal(t, res[DependencyDay].(int), 1)
-	assert.Equal(t, dss.previousDay, 1)
-
-	commit, _ = test.Repository.CommitObject(plumbing.NewHash(
-		"a3ee37f91f0d705ec9c41ae88426f0ae44b2fbc3"))
-	deps[core.DependencyCommit] = commit
-	deps[core.DependencyIndex] = 20
-	res, err = dss.Consume(deps)
-	assert.Nil(t, err)
-	assert.Equal(t, res[DependencyDay].(int), 1)
-	assert.Equal(t, dss.previousDay, 1)
-
-	commit, _ = test.Repository.CommitObject(plumbing.NewHash(
-		"a8b665a65d7aced63f5ba2ff6d9b71dac227f8cf"))
-	deps[core.DependencyCommit] = commit
-	deps[core.DependencyIndex] = 20
-	res, err = dss.Consume(deps)
-	assert.Nil(t, err)
-	assert.Equal(t, res[DependencyDay].(int), 2)
-	assert.Equal(t, dss.previousDay, 2)
-
-	commit, _ = test.Repository.CommitObject(plumbing.NewHash(
-		"186ff0d7e4983637bb3762a24d6d0a658e7f4712"))
-	deps[core.DependencyCommit] = commit
-	deps[core.DependencyIndex] = 30
-	res, err = dss.Consume(deps)
-	assert.Nil(t, err)
-	assert.Equal(t, res[DependencyDay].(int), 2)
-	assert.Equal(t, dss.previousDay, 2)
-
-	assert.Len(t, dss.commits, 3)
-	assert.Equal(t, dss.commits[0], []plumbing.Hash{plumbing.NewHash(
-		"cce947b98a050c6d356bc6ba95030254914027b1")})
-	assert.Equal(t, dss.commits[1], []plumbing.Hash{
-		plumbing.NewHash("fc9ceecb6dabcb2aab60e8619d972e8d8208a7df"),
-		plumbing.NewHash("a3ee37f91f0d705ec9c41ae88426f0ae44b2fbc3")})
-	assert.Equal(t, dss.commits[2], []plumbing.Hash{
-		plumbing.NewHash("a8b665a65d7aced63f5ba2ff6d9b71dac227f8cf"),
-		plumbing.NewHash("186ff0d7e4983637bb3762a24d6d0a658e7f4712")})
-}
-
-func TestDaysCommits(t *testing.T) {
-	dss := fixtureDaysSinceStart()
-	dss.commits[0] = []plumbing.Hash{plumbing.NewHash(
-		"cce947b98a050c6d356bc6ba95030254914027b1")}
-	commits := dss.commits
-	dss.Initialize(test.Repository)
-	assert.Len(t, dss.commits, 0)
-	assert.Equal(t, dss.commits, commits)
-}
-
-func TestDaysSinceStartFork(t *testing.T) {
-	dss1 := fixtureDaysSinceStart()
-	dss1.commits[0] = []plumbing.Hash{plumbing.NewHash(
-		"cce947b98a050c6d356bc6ba95030254914027b1")}
-	clones := dss1.Fork(1)
-	assert.Len(t, clones, 1)
-	dss2 := clones[0].(*DaysSinceStart)
-	assert.Equal(t, dss1.day0, dss2.day0)
-	assert.Equal(t, dss1.previousDay, dss2.previousDay)
-	assert.Equal(t, dss1.commits, dss2.commits)
-	dss1.commits[0] = append(dss1.commits[0], plumbing.ZeroHash)
-	assert.Len(t, dss2.commits[0], 2)
-	assert.True(t, dss1 != dss2)
-	// just for the sake of it
-	dss1.Merge([]core.PipelineItem{dss2})
-}
-
-func TestDaysSinceStartConsumeZero(t *testing.T) {
-	dss := fixtureDaysSinceStart()
-	deps := map[string]interface{}{}
-	commit, _ := test.Repository.CommitObject(plumbing.NewHash(
-		"cce947b98a050c6d356bc6ba95030254914027b1"))
-	commit.Committer.When = time.Unix(0, 0)
-	deps[core.DependencyCommit] = commit
-	deps[core.DependencyIndex] = 0
-	// print warning to log
-	myOutput := &bytes.Buffer{}
-	log.SetOutput(myOutput)
-	defer func() {
-		log.SetOutput(os.Stderr)
-	}()
-	res, err := dss.Consume(deps)
-	assert.Nil(t, err)
-	assert.Contains(t, myOutput.String(), "Warning")
-	assert.Contains(t, myOutput.String(), "cce947b98a050c6d356bc6ba95030254914027b1")
-	assert.Contains(t, myOutput.String(), "hercules")
-	assert.Contains(t, myOutput.String(), "github.com")
-	assert.Equal(t, res[DependencyDay].(int), 0)
-	assert.Equal(t, dss.previousDay, 0)
-	assert.Equal(t, dss.day0.Year(), 1970)
-	assert.Equal(t, dss.day0.Minute(), 0)
-	assert.Equal(t, dss.day0.Second(), 0)
-}

+ 166 - 0
internal/plumbing/ticks.go

@@ -0,0 +1,166 @@
+package plumbing
+
+import (
+	"log"
+	"time"
+
+	"gopkg.in/src-d/go-git.v4"
+	"gopkg.in/src-d/go-git.v4/plumbing"
+	"gopkg.in/src-d/go-git.v4/plumbing/object"
+	"gopkg.in/src-d/hercules.v9/internal/core"
+)
+
+// TicksSinceStart provides relative tick information for every commit.
+// It is a PipelineItem.
+type TicksSinceStart struct {
+	core.NoopMerger
+	TickSize time.Duration
+
+	remote       string
+	tick0        *time.Time
+	previousTick int
+	commits      map[int][]plumbing.Hash
+}
+
+const (
+	// DependencyTick is the name of the dependency which DaysSinceStart provides - the number
+	// of ticks since the first commit in the analysed sequence.
+	DependencyTick = "tick"
+
+	// FactCommitsByTick contains the mapping between day indices and the corresponding commits.
+	FactCommitsByTick = "TicksSinceStart.Commits"
+
+	// FactTickSize contains the time.Duration of each tick.
+	FactTickSize = "TicksSinceStart.TickSize"
+
+	// ConfigTicksSinceStartTickSize sets the size of each 'tick' in hours.
+	ConfigTicksSinceStartTickSize = "TicksSinceStart.TickSize"
+
+	// DefaultTicksSinceStartTickSize is the default number of hours in each 'tick' (24*hour = 1day).
+	DefaultTicksSinceStartTickSize = 24
+)
+
+// Name of this PipelineItem. Uniquely identifies the type, used for mapping keys, etc.
+func (ticks *TicksSinceStart) Name() string {
+	return "TicksSinceStart"
+}
+
+// Provides returns the list of names of entities which are produced by this PipelineItem.
+// Each produced entity will be inserted into `deps` of dependent Consume()-s according
+// to this list. Also used by core.Registry to build the global map of providers.
+func (ticks *TicksSinceStart) Provides() []string {
+	arr := [...]string{DependencyTick}
+	return arr[:]
+}
+
+// Requires returns the list of names of entities which are needed by this PipelineItem.
+// Each requested entity will be inserted into `deps` of Consume(). In turn, those
+// entities are Provides() upstream.
+func (ticks *TicksSinceStart) Requires() []string {
+	return []string{}
+}
+
+// ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
+func (ticks *TicksSinceStart) ListConfigurationOptions() []core.ConfigurationOption {
+	return []core.ConfigurationOption{{
+		Name:        ConfigTicksSinceStartTickSize,
+		Description: "How long each 'tick' represents in hours.",
+		Flag:        "tick-size",
+		Type:        core.IntConfigurationOption,
+		Default:     DefaultTicksSinceStartTickSize},
+	}
+}
+
+// Configure sets the properties previously published by ListConfigurationOptions().
+func (ticks *TicksSinceStart) Configure(facts map[string]interface{}) error {
+	if val, exists := facts[ConfigTicksSinceStartTickSize].(int); exists {
+		ticks.TickSize = time.Duration(val) * time.Hour
+	} else {
+		ticks.TickSize = DefaultTicksSinceStartTickSize * time.Hour
+	}
+	if ticks.commits == nil {
+		ticks.commits = map[int][]plumbing.Hash{}
+	}
+	facts[FactCommitsByTick] = ticks.commits
+	facts[FactTickSize] = ticks.TickSize
+	return nil
+}
+
+// Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
+// calls. The repository which is going to be analysed is supplied as an argument.
+func (ticks *TicksSinceStart) Initialize(repository *git.Repository) error {
+	if ticks.TickSize == 0 {
+		ticks.TickSize = DefaultTicksSinceStartTickSize * time.Hour
+	}
+	ticks.tick0 = &time.Time{}
+	ticks.previousTick = 0
+	if len(ticks.commits) > 0 {
+		keys := make([]int, len(ticks.commits))
+		for key := range ticks.commits {
+			keys = append(keys, key)
+		}
+		for _, key := range keys {
+			delete(ticks.commits, key)
+		}
+	}
+	if r, err := repository.Remotes(); err == nil && len(r) > 0 {
+		ticks.remote = r[0].Config().URLs[0]
+	}
+	return nil
+}
+
+// Consume runs this PipelineItem on the next commit data.
+// `deps` contain all the results from upstream PipelineItem-s as requested by Requires().
+// Additionally, DependencyCommit is always present there and represents the analysed *object.Commit.
+// This function returns the mapping with analysis results. The keys must be the same as
+// in Provides(). If there was an error, nil is returned.
+func (ticks *TicksSinceStart) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
+	commit := deps[core.DependencyCommit].(*object.Commit)
+	index := deps[core.DependencyIndex].(int)
+	if index == 0 {
+		// first iteration - initialize the file objects from the tree
+		// our precision is 1 day
+		*ticks.tick0 = commit.Committer.When
+		if ticks.tick0.Unix() < 631152000 { // 01.01.1990, that was 30 years ago
+			log.Println()
+			log.Printf("Warning: suspicious committer timestamp in %s > %s: %d",
+				ticks.remote, commit.Hash.String(), ticks.tick0.Unix())
+		}
+	}
+
+	tick := int(commit.Committer.When.Sub(*ticks.tick0) / ticks.TickSize)
+	if tick < ticks.previousTick {
+		// rebase works miracles, but we need the monotonous time
+		tick = ticks.previousTick
+	}
+
+	ticks.previousTick = tick
+	tickCommits := ticks.commits[tick]
+	if tickCommits == nil {
+		tickCommits = []plumbing.Hash{}
+	}
+
+	exists := false
+	if commit.NumParents() > 0 {
+		for i := range tickCommits {
+			if tickCommits[len(tickCommits)-i-1] == commit.Hash {
+				exists = true
+				break
+			}
+		}
+	}
+	if !exists {
+		ticks.commits[tick] = append(tickCommits, commit.Hash)
+	}
+
+	return map[string]interface{}{DependencyTick: tick}, nil
+}
+
+// Fork clones this PipelineItem.
+func (ticks *TicksSinceStart) Fork(n int) []core.PipelineItem {
+	return core.ForkCopyPipelineItem(ticks, n)
+}
+
+func init() {
+	core.Registry.Register(&TicksSinceStart{})
+}

+ 215 - 0
internal/plumbing/ticks_test.go

@@ -0,0 +1,215 @@
+package plumbing
+
+import (
+	"bytes"
+	"log"
+	"os"
+	"strings"
+	"testing"
+	"time"
+
+	"github.com/stretchr/testify/assert"
+	"gopkg.in/src-d/go-git.v4/plumbing"
+	"gopkg.in/src-d/hercules.v9/internal/core"
+	"gopkg.in/src-d/hercules.v9/internal/test"
+)
+
+func fixtureTicksSinceStart(config ...map[string]interface{}) *TicksSinceStart {
+	tss := TicksSinceStart{
+		TickSize: 24 * time.Hour,
+	}
+	if len(config) != 1 {
+		config = []map[string]interface{}{{}}
+	}
+	tss.Configure(config[0])
+	tss.Initialize(test.Repository)
+	return &tss
+}
+
+func TestTicksSinceStartMeta(t *testing.T) {
+	tss := fixtureTicksSinceStart()
+	assert.Equal(t, tss.Name(), "TicksSinceStart")
+	assert.Equal(t, len(tss.Provides()), 1)
+	assert.Equal(t, tss.Provides()[0], DependencyTick)
+	assert.Equal(t, len(tss.Requires()), 0)
+	assert.Len(t, tss.ListConfigurationOptions(), 1)
+	tss.Configure(map[string]interface{}{})
+}
+
+func TestTicksSinceStartRegistration(t *testing.T) {
+	summoned := core.Registry.Summon((&TicksSinceStart{}).Name())
+	assert.Len(t, summoned, 1)
+	assert.Equal(t, summoned[0].Name(), "TicksSinceStart")
+	summoned = core.Registry.Summon((&TicksSinceStart{}).Provides()[0])
+	assert.Len(t, summoned, 1)
+	assert.Equal(t, summoned[0].Name(), "TicksSinceStart")
+}
+
+func TestTicksSinceStartConsume(t *testing.T) {
+	tss := fixtureTicksSinceStart()
+	deps := map[string]interface{}{}
+	commit, _ := test.Repository.CommitObject(plumbing.NewHash(
+		"cce947b98a050c6d356bc6ba95030254914027b1"))
+	deps[core.DependencyCommit] = commit
+	deps[core.DependencyIndex] = 0
+	res, err := tss.Consume(deps)
+	assert.Nil(t, err)
+	assert.Equal(t, 0, res[DependencyTick].(int))
+	assert.Equal(t, 0, tss.previousTick)
+	assert.Equal(t, 18, tss.tick0.Hour())   // 18 UTC+1
+	assert.Equal(t, 30, tss.tick0.Minute()) // 30
+	assert.Equal(t, 29, tss.tick0.Second()) // 29
+
+	commit, _ = test.Repository.CommitObject(plumbing.NewHash(
+		"fc9ceecb6dabcb2aab60e8619d972e8d8208a7df"))
+	deps[core.DependencyCommit] = commit
+	deps[core.DependencyIndex] = 10
+	res, err = tss.Consume(deps)
+	assert.Nil(t, err)
+	assert.Equal(t, 1, res[DependencyTick].(int))
+	assert.Equal(t, 1, tss.previousTick)
+
+	commit, _ = test.Repository.CommitObject(plumbing.NewHash(
+		"a3ee37f91f0d705ec9c41ae88426f0ae44b2fbc3"))
+	deps[core.DependencyCommit] = commit
+	deps[core.DependencyIndex] = 20
+	res, err = tss.Consume(deps)
+	assert.Nil(t, err)
+	assert.Equal(t, 1, res[DependencyTick].(int))
+	assert.Equal(t, 1, tss.previousTick)
+
+	commit, _ = test.Repository.CommitObject(plumbing.NewHash(
+		"a8b665a65d7aced63f5ba2ff6d9b71dac227f8cf"))
+	deps[core.DependencyCommit] = commit
+	deps[core.DependencyIndex] = 20
+	res, err = tss.Consume(deps)
+	assert.Nil(t, err)
+	assert.Equal(t, 2, res[DependencyTick].(int))
+	assert.Equal(t, 2, tss.previousTick)
+
+	commit, _ = test.Repository.CommitObject(plumbing.NewHash(
+		"186ff0d7e4983637bb3762a24d6d0a658e7f4712"))
+	deps[core.DependencyCommit] = commit
+	deps[core.DependencyIndex] = 30
+	res, err = tss.Consume(deps)
+	assert.Nil(t, err)
+	assert.Equal(t, 2, res[DependencyTick].(int))
+	assert.Equal(t, 2, tss.previousTick)
+
+	assert.Len(t, tss.commits, 3)
+	assert.Equal(t, tss.commits[0], []plumbing.Hash{plumbing.NewHash(
+		"cce947b98a050c6d356bc6ba95030254914027b1")})
+	assert.Equal(t, tss.commits[1], []plumbing.Hash{
+		plumbing.NewHash("fc9ceecb6dabcb2aab60e8619d972e8d8208a7df"),
+		plumbing.NewHash("a3ee37f91f0d705ec9c41ae88426f0ae44b2fbc3")})
+	assert.Equal(t, tss.commits[2], []plumbing.Hash{
+		plumbing.NewHash("a8b665a65d7aced63f5ba2ff6d9b71dac227f8cf"),
+		plumbing.NewHash("186ff0d7e4983637bb3762a24d6d0a658e7f4712")})
+}
+
+func TestTicksSinceStartConsumeWithTickSize(t *testing.T) {
+	tss := fixtureTicksSinceStart(map[string]interface{}{
+		ConfigTicksSinceStartTickSize: 1, // 1x hour
+	})
+	commit, _ := test.Repository.CommitObject(plumbing.NewHash(
+		"cce947b98a050c6d356bc6ba95030254914027b1"))
+	deps := map[string]interface{}{
+		core.DependencyCommit: commit,
+		core.DependencyIndex:  0,
+	}
+	res, err := tss.Consume(deps)
+	assert.Nil(t, err)
+	assert.Equal(t, 0, res[DependencyTick].(int))
+	assert.Equal(t, 0, tss.previousTick)
+	assert.Equal(t, 18, tss.tick0.Hour())   // 18 UTC+1
+	assert.Equal(t, 30, tss.tick0.Minute()) // 30
+	assert.Equal(t, 29, tss.tick0.Second()) // 29
+
+	commit, _ = test.Repository.CommitObject(plumbing.NewHash(
+		"fc9ceecb6dabcb2aab60e8619d972e8d8208a7df"))
+	deps[core.DependencyCommit] = commit
+	deps[core.DependencyIndex] = 10
+	res, err = tss.Consume(deps)
+	assert.Nil(t, err)
+	assert.Equal(t, 24, res[DependencyTick].(int)) // 1 day later
+	assert.Equal(t, 24, tss.previousTick)
+
+	commit, _ = test.Repository.CommitObject(plumbing.NewHash(
+		"a3ee37f91f0d705ec9c41ae88426f0ae44b2fbc3"))
+	deps[core.DependencyCommit] = commit
+	deps[core.DependencyIndex] = 20
+	res, err = tss.Consume(deps)
+	assert.Nil(t, err)
+	assert.Equal(t, 24, res[DependencyTick].(int)) // 1 day later
+	assert.Equal(t, 24, tss.previousTick)
+
+	assert.Len(t, tss.commits, 2)
+	assert.Equal(t, []plumbing.Hash{plumbing.NewHash(
+		"cce947b98a050c6d356bc6ba95030254914027b1")},
+		tss.commits[0])
+	assert.Equal(t, []plumbing.Hash{
+		plumbing.NewHash("fc9ceecb6dabcb2aab60e8619d972e8d8208a7df"),
+		plumbing.NewHash("a3ee37f91f0d705ec9c41ae88426f0ae44b2fbc3")},
+		tss.commits[24])
+}
+
+func TestTicksCommits(t *testing.T) {
+	tss := fixtureTicksSinceStart()
+	tss.commits[0] = []plumbing.Hash{plumbing.NewHash(
+		"cce947b98a050c6d356bc6ba95030254914027b1")}
+	commits := tss.commits
+	tss.Initialize(test.Repository)
+	assert.Len(t, tss.commits, 0)
+	assert.Equal(t, tss.commits, commits)
+}
+
+func TestTicksSinceStartFork(t *testing.T) {
+	tss1 := fixtureTicksSinceStart()
+	tss1.commits[0] = []plumbing.Hash{plumbing.NewHash(
+		"cce947b98a050c6d356bc6ba95030254914027b1")}
+	clones := tss1.Fork(1)
+	assert.Len(t, clones, 1)
+	tss2 := clones[0].(*TicksSinceStart)
+	assert.Equal(t, tss1.tick0, tss2.tick0)
+	assert.Equal(t, tss1.previousTick, tss2.previousTick)
+	assert.Equal(t, tss1.commits, tss2.commits)
+	tss1.commits[0] = append(tss1.commits[0], plumbing.ZeroHash)
+	assert.Len(t, tss2.commits[0], 2)
+	assert.True(t, tss1 != tss2)
+	// just for the sake of it
+	tss1.Merge([]core.PipelineItem{tss2})
+}
+
+func TestTicksSinceStartConsumeZero(t *testing.T) {
+	tss := fixtureTicksSinceStart()
+	deps := map[string]interface{}{}
+	commit, _ := test.Repository.CommitObject(plumbing.NewHash(
+		"cce947b98a050c6d356bc6ba95030254914027b1"))
+	commit.Committer.When = time.Unix(0, 0)
+	deps[core.DependencyCommit] = commit
+	deps[core.DependencyIndex] = 0
+	// print warning to log
+	myOutput := &bytes.Buffer{}
+	log.SetOutput(myOutput)
+	defer func() {
+		log.SetOutput(os.Stderr)
+	}()
+	res, err := tss.Consume(deps)
+	assert.Nil(t, err)
+	output := myOutput.String()
+	assert.Contains(t, output, "Warning")
+	assert.Contains(t, output, "cce947b98a050c6d356bc6ba95030254914027b1")
+	assert.Contains(t, output, "hercules")
+	// depending on where the contributor clones this project from, the remote
+	// reported in the error could either be from gopkg.in or github.com
+	if !strings.Contains(output, "github.com") && !strings.Contains(output, "gopkg.in") {
+		assert.Failf(t, "output should contain either 'github.com' or 'gopkg.in'", "got: '%s'", output)
+	}
+	assert.Equal(t, res[DependencyTick].(int), 0)
+	assert.Equal(t, tss.previousTick, 0)
+	if (tss.tick0.Year() != 1969) && (tss.tick0.Year() != 1970) {
+		assert.Failf(t, "tick0 should be unix-0 time (in either 1969 or 1970)", "got: '%v'", tss.tick0)
+	}
+	assert.Equal(t, tss.tick0.Minute(), 0)
+	assert.Equal(t, tss.tick0.Second(), 0)
+}

BIN
internal/test_data/burndown.pb


+ 1 - 1
labours.py

@@ -364,7 +364,7 @@ class ProtobufReader(Reader):
                                 stats.stats.changed, {k: [v.added, v.removed, v.changed]
                                                       for k, v in stats.languages.items()})
                     for dev, stats in day.devs.items()}
-                for d, day in self.contents["Devs"].days.items()}
+                for d, day in self.contents["Devs"].ticks.items()}
         return people, days
 
     def _parse_burndown_matrix(self, matrix):

+ 176 - 138
leaves/burndown.go

@@ -10,6 +10,7 @@ import (
 	"os"
 	"sort"
 	"sync"
+	"time"
 	"unicode/utf8"
 
 	"github.com/gogo/protobuf/proto"
@@ -31,12 +32,12 @@ import (
 // It is a LeafPipelineItem.
 // Reference: https://erikbern.com/2016/12/05/the-half-life-of-code.html
 type BurndownAnalysis struct {
-	// Granularity sets the size of each band - the number of days it spans.
+	// Granularity sets the size of each band - the number of ticks it spans.
 	// Smaller values provide better resolution but require more work and eat more
-	// memory. 30 days is usually enough.
+	// memory. 30 ticks is usually enough.
 	Granularity int
 	// Sampling sets how detailed is the statistic - the size of the interval in
-	// days between consecutive measurements. It may not be greater than Granularity. Try 15 or 30.
+	// ticks between consecutive measurements. It may not be greater than Granularity. Try 15 or 30.
 	Sampling int
 
 	// TrackFiles enables or disables the fine-grained per-file burndown analysis.
@@ -67,9 +68,9 @@ type BurndownAnalysis struct {
 	// Repository points to the analysed Git repository struct from go-git.
 	repository *git.Repository
 	// globalHistory is the daily deltas of daily line counts.
-	// E.g. day 0: day 0 +50 lines
-	//      day 10: day 0 -10 lines; day 10 +20 lines
-	//      day 12: day 0 -5 lines; day 10 -3 lines; day 12 +10 lines
+	// E.g. tick 0: tick 0 +50 lines
+	//      tick 10: tick 0 -10 lines; tick 10 +20 lines
+	//      tick 12: tick 0 -5 lines; tick 10 -3 lines; tick 12 +10 lines
 	// map [0] [0] = 50
 	// map[10] [0] = -10
 	// map[10][10] = 20
@@ -95,11 +96,13 @@ type BurndownAnalysis struct {
 	renames map[string]string
 	// matrix is the mutual deletions and self insertions.
 	matrix []map[int]int64
-	// day is the most recent day index processed.
-	day int
-	// previousDay is the day from the previous sample period -
-	// different from DaysSinceStart.previousDay.
-	previousDay int
+	// tick is the most recent tick index processed.
+	tick int
+	// previousTick is the tick from the previous sample period -
+	// different from TicksSinceStart.previousTick.
+	previousTick int
+	// tickSize indicates the size of each tick.
+	tickSize time.Duration
 	// references IdentityDetector.ReversedPeopleDict
 	reversedPeopleDict []string
 }
@@ -126,6 +129,8 @@ type BurndownResult struct {
 	// The rest of the elements are equal the number of line removals by the corresponding
 	// authors in reversedPeopleDict: 2 -> 0, 3 -> 1, etc.
 	PeopleMatrix DenseHistory
+	// The size of each tick.
+	TickSize time.Duration
 
 	// The following members are private.
 
@@ -160,7 +165,7 @@ const (
 	ConfigBurndownHibernationDirectory = "Burndown.HibernationDirectory"
 	// ConfigBurndownDebug enables some extra debug assertions.
 	ConfigBurndownDebug = "Burndown.Debug"
-	// DefaultBurndownGranularity is the default number of days for BurndownAnalysis.Granularity
+	// DefaultBurndownGranularity is the default number of ticks for BurndownAnalysis.Granularity
 	// and BurndownAnalysis.Sampling.
 	DefaultBurndownGranularity = 30
 	// authorSelf is the internal author index which is used in BurndownAnalysis.Finalize() to
@@ -192,7 +197,7 @@ func (analyser *BurndownAnalysis) Provides() []string {
 func (analyser *BurndownAnalysis) Requires() []string {
 	arr := [...]string{
 		items.DependencyFileDiff, items.DependencyTreeChanges, items.DependencyBlobCache,
-		items.DependencyDay, identity.DependencyAuthor}
+		items.DependencyTick, identity.DependencyAuthor}
 	return arr[:]
 }
 
@@ -200,12 +205,12 @@ func (analyser *BurndownAnalysis) Requires() []string {
 func (analyser *BurndownAnalysis) ListConfigurationOptions() []core.ConfigurationOption {
 	options := [...]core.ConfigurationOption{{
 		Name:        ConfigBurndownGranularity,
-		Description: "How many days there are in a single band.",
+		Description: "How many time ticks there are in a single band.",
 		Flag:        "granularity",
 		Type:        core.IntConfigurationOption,
 		Default:     DefaultBurndownGranularity}, {
 		Name:        ConfigBurndownSampling,
-		Description: "How frequently to record the state in days.",
+		Description: "How frequently to record the state in time ticks.",
 		Flag:        "sampling",
 		Type:        core.IntConfigurationOption,
 		Default:     DefaultBurndownGranularity}, {
@@ -280,6 +285,9 @@ func (analyser *BurndownAnalysis) Configure(facts map[string]interface{}) error
 	if val, exists := facts[ConfigBurndownDebug].(bool); exists {
 		analyser.Debug = val
 	}
+	if val, exists := facts[items.FactTickSize].(time.Duration); exists {
+		analyser.tickSize = val
+	}
 	return nil
 }
 
@@ -298,12 +306,12 @@ func (analyser *BurndownAnalysis) Description() string {
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (analyser *BurndownAnalysis) Initialize(repository *git.Repository) error {
 	if analyser.Granularity <= 0 {
-		log.Printf("Warning: adjusted the granularity to %d days\n",
+		log.Printf("Warning: adjusted the granularity to %d ticks\n",
 			DefaultBurndownGranularity)
 		analyser.Granularity = DefaultBurndownGranularity
 	}
 	if analyser.Sampling <= 0 {
-		log.Printf("Warning: adjusted the sampling to %d days\n",
+		log.Printf("Warning: adjusted the sampling to %d ticks\n",
 			DefaultBurndownGranularity)
 		analyser.Sampling = DefaultBurndownGranularity
 	}
@@ -312,6 +320,11 @@ func (analyser *BurndownAnalysis) Initialize(repository *git.Repository) error {
 			analyser.Granularity)
 		analyser.Sampling = analyser.Granularity
 	}
+	if analyser.tickSize == 0 {
+		def := items.DefaultTicksSinceStartTickSize * time.Hour
+		log.Printf("Warning: tick size was not set, adjusted to %v\n", def)
+		analyser.tickSize = items.DefaultTicksSinceStartTickSize * time.Hour
+	}
 	analyser.repository = repository
 	analyser.globalHistory = sparseHistory{}
 	analyser.fileHistories = map[string]sparseHistory{}
@@ -326,8 +339,8 @@ func (analyser *BurndownAnalysis) Initialize(repository *git.Repository) error {
 	analyser.mergedAuthor = identity.AuthorMissing
 	analyser.renames = map[string]string{}
 	analyser.matrix = make([]map[int]int64, analyser.PeopleNumber)
-	analyser.day = 0
-	analyser.previousDay = 0
+	analyser.tick = 0
+	analyser.previousTick = 0
 	return nil
 }
 
@@ -341,14 +354,14 @@ func (analyser *BurndownAnalysis) Consume(deps map[string]interface{}) (map[stri
 		panic("BurndownAnalysis.Consume() was called on a hibernated instance")
 	}
 	author := deps[identity.DependencyAuthor].(int)
-	day := deps[items.DependencyDay].(int)
+	tick := deps[items.DependencyTick].(int)
 	if !deps[core.DependencyIsMerge].(bool) {
-		analyser.day = day
-		analyser.onNewDay()
+		analyser.tick = tick
+		analyser.onNewTick()
 	} else {
 		// effectively disables the status updates if the commit is a merge
 		// we will analyse the conflicts resolution in Merge()
-		analyser.day = burndown.TreeMergeMark
+		analyser.tick = burndown.TreeMergeMark
 		analyser.mergedFiles = map[string]bool{}
 		analyser.mergedAuthor = author
 	}
@@ -370,8 +383,8 @@ func (analyser *BurndownAnalysis) Consume(deps map[string]interface{}) (map[stri
 			return nil, err
 		}
 	}
-	// in case there is a merge analyser.day equals to TreeMergeMark
-	analyser.day = day
+	// in case there is a merge analyser.tick equals to TreeMergeMark
+	analyser.tick = tick
 	return nil, nil
 }
 
@@ -431,7 +444,9 @@ func (analyser *BurndownAnalysis) Merge(branches []core.PipelineItem) {
 			// it could be also removed in the merge commit itself
 			continue
 		}
-		files[0].Merge(analyser.packPersonWithDay(analyser.mergedAuthor, analyser.day), files[1:]...)
+		files[0].Merge(
+			analyser.packPersonWithTick(analyser.mergedAuthor, analyser.tick),
+			files[1:]...)
 		for _, burn := range all {
 			if burn.files[key] != files[0] {
 				if burn.files[key] != nil {
@@ -441,7 +456,7 @@ func (analyser *BurndownAnalysis) Merge(branches []core.PipelineItem) {
 			}
 		}
 	}
-	analyser.onNewDay()
+	analyser.onNewTick()
 }
 
 // Hibernate compresses the bound RBTree memory with the files.
@@ -486,14 +501,14 @@ func (analyser *BurndownAnalysis) Boot() error {
 
 // Finalize returns the result of the analysis. Further Consume() calls are not expected.
 func (analyser *BurndownAnalysis) Finalize() interface{} {
-	globalHistory, lastDay := analyser.groupSparseHistory(analyser.globalHistory, -1)
+	globalHistory, lastTick := analyser.groupSparseHistory(analyser.globalHistory, -1)
 	fileHistories := map[string]DenseHistory{}
 	fileOwnership := map[string]map[int]int{}
 	for key, history := range analyser.fileHistories {
 		if len(history) == 0 {
 			continue
 		}
-		fileHistories[key], _ = analyser.groupSparseHistory(history, lastDay)
+		fileHistories[key], _ = analyser.groupSparseHistory(history, lastTick)
 		file := analyser.files[key]
 		previousLine := 0
 		previousAuthor := identity.AuthorMissing
@@ -505,7 +520,7 @@ func (analyser *BurndownAnalysis) Finalize() interface{} {
 				ownership[previousAuthor] += length
 			}
 			previousLine = line
-			previousAuthor, _ = analyser.unpackPersonWithDay(int(value))
+			previousAuthor, _ = analyser.unpackPersonWithTick(int(value))
 			if previousAuthor == identity.AuthorMissing {
 				previousAuthor = -1
 			}
@@ -515,7 +530,7 @@ func (analyser *BurndownAnalysis) Finalize() interface{} {
 	for i, history := range analyser.peopleHistories {
 		if len(history) > 0 {
 			// there can be people with only trivial merge commits and without own lines
-			peopleHistories[i], _ = analyser.groupSparseHistory(history, lastDay)
+			peopleHistories[i], _ = analyser.groupSparseHistory(history, lastTick)
 		} else {
 			peopleHistories[i] = make(DenseHistory, len(globalHistory))
 			for j, gh := range globalHistory {
@@ -545,6 +560,7 @@ func (analyser *BurndownAnalysis) Finalize() interface{} {
 		FileOwnership:      fileOwnership,
 		PeopleHistories:    peopleHistories,
 		PeopleMatrix:       peopleMatrix,
+		TickSize:           analyser.tickSize,
 		reversedPeopleDict: analyser.reversedPeopleDict,
 		sampling:           analyser.Sampling,
 		granularity:        analyser.Granularity,
@@ -554,7 +570,10 @@ func (analyser *BurndownAnalysis) Finalize() interface{} {
 // Serialize converts the analysis result as returned by Finalize() to text or bytes.
 // The text format is YAML and the bytes format is Protocol Buffers.
 func (analyser *BurndownAnalysis) Serialize(result interface{}, binary bool, writer io.Writer) error {
-	burndownResult := result.(BurndownResult)
+	burndownResult, ok := result.(BurndownResult)
+	if !ok {
+		return fmt.Errorf("result is not a burndown result: '%v'", result)
+	}
 	if binary {
 		return analyser.serializeBinary(&burndownResult, writer)
 	}
@@ -569,7 +588,6 @@ func (analyser *BurndownAnalysis) Deserialize(pbmessage []byte) (interface{}, er
 	if err != nil {
 		return nil, err
 	}
-	result := BurndownResult{}
 	convertCSR := func(mat *pb.BurndownSparseMatrix) DenseHistory {
 		res := make(DenseHistory, mat.NumberOfRows)
 		for i := 0; i < int(mat.NumberOfRows); i++ {
@@ -580,9 +598,15 @@ func (analyser *BurndownAnalysis) Deserialize(pbmessage []byte) (interface{}, er
 		}
 		return res
 	}
-	result.GlobalHistory = convertCSR(msg.Project)
-	result.FileHistories = map[string]DenseHistory{}
-	result.FileOwnership = map[string]map[int]int{}
+	result := BurndownResult{
+		GlobalHistory: convertCSR(msg.Project),
+		FileHistories: map[string]DenseHistory{},
+		FileOwnership: map[string]map[int]int{},
+		TickSize:      time.Duration(msg.GetTickSize()),
+
+		granularity: int(msg.Granularity),
+		sampling:    int(msg.Sampling),
+	}
 	for i, mat := range msg.Files {
 		result.FileHistories[mat.Name] = convertCSR(mat)
 		ownership := map[int]int{}
@@ -606,8 +630,6 @@ func (analyser *BurndownAnalysis) Deserialize(pbmessage []byte) (interface{}, er
 			result.PeopleMatrix[i][msg.PeopleInteraction.Indices[j]] = msg.PeopleInteraction.Data[j]
 		}
 	}
-	result.sampling = int(msg.Sampling)
-	result.granularity = int(msg.Granularity)
 	return result, nil
 }
 
@@ -616,7 +638,18 @@ func (analyser *BurndownAnalysis) MergeResults(
 	r1, r2 interface{}, c1, c2 *core.CommonAnalysisResult) interface{} {
 	bar1 := r1.(BurndownResult)
 	bar2 := r2.(BurndownResult)
-	merged := BurndownResult{}
+	if bar1.TickSize != bar2.TickSize {
+		return fmt.Errorf("mismatching tick sizes (r1: %d, r2: %d) received",
+			bar1.TickSize, bar2.TickSize)
+	}
+	// for backwards-compatibility, if no tick size is present set to default
+	analyser.tickSize = bar1.TickSize
+	if analyser.tickSize == 0 {
+		analyser.tickSize = items.DefaultTicksSinceStartTickSize * time.Hour
+	}
+	merged := BurndownResult{
+		TickSize: analyser.tickSize,
+	}
 	if bar1.sampling < bar2.sampling {
 		merged.sampling = bar1.sampling
 	} else {
@@ -635,7 +668,7 @@ func (analyser *BurndownAnalysis) MergeResults(
 		wg.Add(1)
 		go func() {
 			defer wg.Done()
-			merged.GlobalHistory = mergeMatrices(
+			merged.GlobalHistory = analyser.mergeMatrices(
 				bar1.GlobalHistory, bar2.GlobalHistory,
 				bar1.granularity, bar1.sampling,
 				bar2.granularity, bar2.sampling,
@@ -658,7 +691,7 @@ func (analyser *BurndownAnalysis) MergeResults(
 					if ptrs[2] >= 0 {
 						m2 = bar2.PeopleHistories[ptrs[2]]
 					}
-					merged.PeopleHistories[i] = mergeMatrices(
+					merged.PeopleHistories[i] = analyser.mergeMatrices(
 						m1, m2,
 						bar1.granularity, bar1.sampling,
 						bar2.granularity, bar2.sampling,
@@ -711,18 +744,18 @@ func (analyser *BurndownAnalysis) MergeResults(
 	return merged
 }
 
-func roundTime(unix int64, dir bool) int {
-	days := float64(unix) / (3600 * 24)
+func (analyser *BurndownAnalysis) roundTime(unix int64, dir bool) int {
+	ticks := float64(unix) / analyser.tickSize.Seconds()
 	if dir {
-		return int(math.Ceil(days))
+		return int(math.Ceil(ticks))
 	}
-	return int(math.Floor(days))
+	return int(math.Floor(ticks))
 }
 
 // mergeMatrices takes two [number of samples][number of bands] matrices,
-// resamples them to days so that they become square, sums and resamples back to the
+// resamples them to ticks so that they become square, sums and resamples back to the
 // least of (sampling1, sampling2) and (granularity1, granularity2).
-func mergeMatrices(m1, m2 DenseHistory, granularity1, sampling1, granularity2, sampling2 int,
+func (analyser *BurndownAnalysis) mergeMatrices(m1, m2 DenseHistory, granularity1, sampling1, granularity2, sampling2 int,
 	c1, c2 *core.CommonAnalysisResult) DenseHistory {
 	commonMerged := c1.Copy()
 	commonMerged.Merge(c2)
@@ -739,18 +772,19 @@ func mergeMatrices(m1, m2 DenseHistory, granularity1, sampling1, granularity2, s
 		granularity = granularity2
 	}
 
-	size := roundTime(commonMerged.EndTime, true) - roundTime(commonMerged.BeginTime, false)
-	daily := make([][]float32, size+granularity)
-	for i := range daily {
-		daily[i] = make([]float32, size+sampling)
+	size := analyser.roundTime(commonMerged.EndTime, true) -
+		analyser.roundTime(commonMerged.BeginTime, false)
+	perTick := make([][]float32, size+granularity)
+	for i := range perTick {
+		perTick[i] = make([]float32, size+sampling)
 	}
 	if len(m1) > 0 {
-		addBurndownMatrix(m1, granularity1, sampling1, daily,
-			roundTime(c1.BeginTime, false)-roundTime(commonMerged.BeginTime, false))
+		addBurndownMatrix(m1, granularity1, sampling1, perTick,
+			analyser.roundTime(c1.BeginTime, false)-analyser.roundTime(commonMerged.BeginTime, false))
 	}
 	if len(m2) > 0 {
-		addBurndownMatrix(m2, granularity2, sampling2, daily,
-			roundTime(c2.BeginTime, false)-roundTime(commonMerged.BeginTime, false))
+		addBurndownMatrix(m2, granularity2, sampling2, perTick,
+			analyser.roundTime(c2.BeginTime, false)-analyser.roundTime(commonMerged.BeginTime, false))
 	}
 
 	// convert daily to [][]int64
@@ -761,7 +795,7 @@ func mergeMatrices(m1, m2 DenseHistory, granularity1, sampling1, granularity2, s
 		for j := 0; j < len(result[i]); j++ {
 			accum := float32(0)
 			for k := j * granularity; k < (j+1)*granularity; k++ {
-				accum += daily[sampledIndex][k]
+				accum += perTick[sampledIndex][k]
 			}
 			result[i][j] = int64(accum)
 		}
@@ -769,13 +803,13 @@ func mergeMatrices(m1, m2 DenseHistory, granularity1, sampling1, granularity2, s
 	return result
 }
 
-// Explode `matrix` so that it is daily sampled and has daily bands, shift by `offset` days
+// Explode `matrix` so that it is daily sampled and has daily bands, shift by `offset` ticks
 // and add to the accumulator. `daily` size is square and is guaranteed to fit `matrix` by
 // the caller.
 // Rows: *at least* len(matrix) * sampling + offset
 // Columns: *at least* len(matrix[...]) * granularity + offset
 // `matrix` can be sparse, so that the last columns which are equal to 0 are truncated.
-func addBurndownMatrix(matrix DenseHistory, granularity, sampling int, accdaily [][]float32, offset int) {
+func addBurndownMatrix(matrix DenseHistory, granularity, sampling int, accPerTick [][]float32, offset int) {
 	// Determine the maximum number of bands; the actual one may be larger but we do not care
 	maxCols := 0
 	for _, row := range matrix {
@@ -784,17 +818,17 @@ func addBurndownMatrix(matrix DenseHistory, granularity, sampling int, accdaily
 		}
 	}
 	neededRows := len(matrix)*sampling + offset
-	if len(accdaily) < neededRows {
-		log.Panicf("merge bug: too few daily rows: required %d, have %d",
-			neededRows, len(accdaily))
+	if len(accPerTick) < neededRows {
+		log.Panicf("merge bug: too few per-tick rows: required %d, have %d",
+			neededRows, len(accPerTick))
 	}
-	if len(accdaily[0]) < maxCols {
-		log.Panicf("merge bug: too few daily cols: required %d, have %d",
-			maxCols, len(accdaily[0]))
+	if len(accPerTick[0]) < maxCols {
+		log.Panicf("merge bug: too few per-tick cols: required %d, have %d",
+			maxCols, len(accPerTick[0]))
 	}
-	daily := make([][]float32, len(accdaily))
-	for i, row := range accdaily {
-		daily[i] = make([]float32, len(row))
+	perTick := make([][]float32, len(accPerTick))
+	for i, row := range accPerTick {
+		perTick[i] = make([]float32, len(row))
 	}
 	for x := 0; x < maxCols; x++ {
 		for y := 0; y < len(matrix); y++ {
@@ -809,9 +843,9 @@ func addBurndownMatrix(matrix DenseHistory, granularity, sampling int, accdaily
 				k := float32(matrix[y][x]) / startVal // <= 1
 				scale := float32((y+1)*sampling - startIndex)
 				for i := x * granularity; i < (x+1)*granularity; i++ {
-					initial := daily[startIndex-1+offset][i+offset]
+					initial := perTick[startIndex-1+offset][i+offset]
 					for j := startIndex; j < (y+1)*sampling; j++ {
-						daily[j+offset][i+offset] = initial * (1 + (k-1)*float32(j-startIndex+1)/scale)
+						perTick[j+offset][i+offset] = initial * (1 + (k-1)*float32(j-startIndex+1)/scale)
 					}
 				}
 			}
@@ -830,13 +864,13 @@ func addBurndownMatrix(matrix DenseHistory, granularity, sampling int, accdaily
 				avg := (finishVal - initial) / float32(finishIndex-startIndex)
 				for j := y * sampling; j < finishIndex; j++ {
 					for i := startIndex; i <= j; i++ {
-						daily[j+offset][i+offset] = avg
+						perTick[j+offset][i+offset] = avg
 					}
 				}
 				// copy [x*g..y*s)
 				for j := y * sampling; j < finishIndex; j++ {
 					for i := x * granularity; i < y*sampling; i++ {
-						daily[j+offset][i+offset] = daily[j-1+offset][i+offset]
+						perTick[j+offset][i+offset] = perTick[j-1+offset][i+offset]
 					}
 				}
 			}
@@ -872,7 +906,7 @@ func addBurndownMatrix(matrix DenseHistory, granularity, sampling int, accdaily
 					avg := float32(matrix[y][x]) / float32((y+1)*sampling-x*granularity)
 					for j := x * granularity; j < (y+1)*sampling; j++ {
 						for i := x * granularity; i <= j; i++ {
-							daily[j+offset][i+offset] = avg
+							perTick[j+offset][i+offset] = avg
 						}
 					}
 				}
@@ -932,14 +966,14 @@ func addBurndownMatrix(matrix DenseHistory, granularity, sampling int, accdaily
 			}
 		}
 	}
-	for y := len(matrix) * sampling; y+offset < len(daily); y++ {
-		copy(daily[y+offset], daily[len(matrix)*sampling-1+offset])
+	for y := len(matrix) * sampling; y+offset < len(perTick); y++ {
+		copy(perTick[y+offset], perTick[len(matrix)*sampling-1+offset])
 	}
-	// the original matrix has been resampled by day
+	// the original matrix has been resampled by tick
 	// add it to the accumulator
-	for y, row := range daily {
+	for y, row := range perTick {
 		for x, val := range row {
-			accdaily[y][x] += val
+			accPerTick[y][x] += val
 		}
 	}
 }
@@ -947,6 +981,7 @@ func addBurndownMatrix(matrix DenseHistory, granularity, sampling int, accdaily
 func (analyser *BurndownAnalysis) serializeText(result *BurndownResult, writer io.Writer) {
 	fmt.Fprintln(writer, "  granularity:", result.granularity)
 	fmt.Fprintln(writer, "  sampling:", result.sampling)
+	fmt.Fprintln(writer, "  tick_size:", result.TickSize)
 	yaml.PrintMatrix(writer, result.GlobalHistory, 2, "project", true)
 	if len(result.FileHistories) > 0 {
 		fmt.Fprintln(writer, "  files:")
@@ -999,6 +1034,7 @@ func (analyser *BurndownAnalysis) serializeBinary(result *BurndownResult, writer
 	message := pb.BurndownAnalysisResults{
 		Granularity: int32(result.granularity),
 		Sampling:    int32(result.sampling),
+		TickSize:    int64(result.TickSize),
 	}
 	if len(result.GlobalHistory) > 0 {
 		message.Project = pb.ToBurndownSparseMatrix(result.GlobalHistory, "project")
@@ -1054,87 +1090,88 @@ func checkClose(c io.Closer) {
 	}
 }
 
-// We do a hack and store the day in the first 14 bits and the author index in the last 18.
+// We do a hack and store the tick in the first 14 bits and the author index in the last 18.
 // Strictly speaking, int can be 64-bit and then the author index occupies 32+18 bits.
 // This hack is needed to simplify the values storage inside File-s. We can compare
-// different values together and they are compared as days for the same author.
-func (analyser *BurndownAnalysis) packPersonWithDay(person int, day int) int {
+// different values together and they are compared as ticks for the same author.
+func (analyser *BurndownAnalysis) packPersonWithTick(person int, tick int) int {
 	if analyser.PeopleNumber == 0 {
-		return day
+		return tick
 	}
-	result := day & burndown.TreeMergeMark
+	result := tick & burndown.TreeMergeMark
 	result |= person << burndown.TreeMaxBinPower
-	// This effectively means max (16383 - 1) days (>44 years) and (262143 - 3) devs.
-	// One day less because burndown.TreeMergeMark = ((1 << 14) - 1) is a special day.
+	// This effectively means max (16383 - 1) ticks (>44 years) and (262143 - 3) devs.
+	// One tick less because burndown.TreeMergeMark = ((1 << 14) - 1) is a special tick.
 	// Three devs less because:
-	// - math.MaxUint32 is the special rbtree value with day == TreeMergeMark (-1)
+	// - math.MaxUint32 is the special rbtree value with tick == TreeMergeMark (-1)
 	// - identity.AuthorMissing (-2)
 	// - authorSelf (-3)
 	return result
 }
 
-func (analyser *BurndownAnalysis) unpackPersonWithDay(value int) (int, int) {
+func (analyser *BurndownAnalysis) unpackPersonWithTick(value int) (int, int) {
 	if analyser.PeopleNumber == 0 {
 		return identity.AuthorMissing, value
 	}
 	return value >> burndown.TreeMaxBinPower, value & burndown.TreeMergeMark
 }
 
-func (analyser *BurndownAnalysis) onNewDay() {
-	if analyser.day > analyser.previousDay {
-		analyser.previousDay = analyser.day
+func (analyser *BurndownAnalysis) onNewTick() {
+	if analyser.tick > analyser.previousTick {
+		analyser.previousTick = analyser.tick
 	}
 	analyser.mergedAuthor = identity.AuthorMissing
 }
 
 func (analyser *BurndownAnalysis) updateGlobal(currentTime, previousTime, delta int) {
-	_, currentDay := analyser.unpackPersonWithDay(currentTime)
-	_, previousDay := analyser.unpackPersonWithDay(previousTime)
-	currentHistory := analyser.globalHistory[currentDay]
+	_, curTick := analyser.unpackPersonWithTick(currentTime)
+	_, prevTick := analyser.unpackPersonWithTick(previousTime)
+
+	currentHistory := analyser.globalHistory[curTick]
 	if currentHistory == nil {
 		currentHistory = map[int]int64{}
-		analyser.globalHistory[currentDay] = currentHistory
+		analyser.globalHistory[curTick] = currentHistory
 	}
-	currentHistory[previousDay] += int64(delta)
+	currentHistory[prevTick] += int64(delta)
 }
 
 // updateFile is bound to the specific `history` in the closure.
 func (analyser *BurndownAnalysis) updateFile(
 	history sparseHistory, currentTime, previousTime, delta int) {
 
-	_, currentDay := analyser.unpackPersonWithDay(currentTime)
-	_, previousDay := analyser.unpackPersonWithDay(previousTime)
+	_, curTick := analyser.unpackPersonWithTick(currentTime)
+	_, prevTick := analyser.unpackPersonWithTick(previousTime)
 
-	currentHistory := history[currentDay]
+	currentHistory := history[curTick]
 	if currentHistory == nil {
 		currentHistory = map[int]int64{}
-		history[currentDay] = currentHistory
+		history[curTick] = currentHistory
 	}
-	currentHistory[previousDay] += int64(delta)
+	currentHistory[prevTick] += int64(delta)
 }
 
 func (analyser *BurndownAnalysis) updateAuthor(currentTime, previousTime, delta int) {
-	previousAuthor, previousDay := analyser.unpackPersonWithDay(previousTime)
+	previousAuthor, prevTick := analyser.unpackPersonWithTick(previousTime)
 	if previousAuthor == identity.AuthorMissing {
 		return
 	}
-	_, currentDay := analyser.unpackPersonWithDay(currentTime)
+	_, curTick := analyser.unpackPersonWithTick(currentTime)
 	history := analyser.peopleHistories[previousAuthor]
 	if history == nil {
 		history = sparseHistory{}
 		analyser.peopleHistories[previousAuthor] = history
 	}
-	currentHistory := history[currentDay]
+	currentHistory := history[curTick]
 	if currentHistory == nil {
 		currentHistory = map[int]int64{}
-		history[currentDay] = currentHistory
+		history[curTick] = currentHistory
 	}
-	currentHistory[previousDay] += int64(delta)
+	currentHistory[prevTick] += int64(delta)
 }
 
 func (analyser *BurndownAnalysis) updateMatrix(currentTime, previousTime, delta int) {
-	newAuthor, _ := analyser.unpackPersonWithDay(currentTime)
-	oldAuthor, _ := analyser.unpackPersonWithDay(previousTime)
+	newAuthor, _ := analyser.unpackPersonWithTick(currentTime)
+	oldAuthor, _ := analyser.unpackPersonWithTick(previousTime)
 
 	if oldAuthor == identity.AuthorMissing {
 		return
@@ -1156,7 +1193,8 @@ func (analyser *BurndownAnalysis) updateMatrix(currentTime, previousTime, delta
 }
 
 func (analyser *BurndownAnalysis) newFile(
-	hash plumbing.Hash, name string, author int, day int, size int) (*burndown.File, error) {
+	hash plumbing.Hash, name string, author int, tick int, size int) (*burndown.File, error) {
+
 	updaters := make([]burndown.Updater, 1)
 	updaters[0] = analyser.updateGlobal
 	if analyser.TrackFiles {
@@ -1173,9 +1211,9 @@ func (analyser *BurndownAnalysis) newFile(
 	if analyser.PeopleNumber > 0 {
 		updaters = append(updaters, analyser.updateAuthor)
 		updaters = append(updaters, analyser.updateMatrix)
-		day = analyser.packPersonWithDay(author, day)
+		tick = analyser.packPersonWithTick(author, tick)
 	}
-	return burndown.NewFile(day, size, analyser.fileAllocator, updaters...), nil
+	return burndown.NewFile(tick, size, analyser.fileAllocator, updaters...), nil
 }
 
 func (analyser *BurndownAnalysis) handleInsertion(
@@ -1192,12 +1230,12 @@ func (analyser *BurndownAnalysis) handleInsertion(
 		return fmt.Errorf("file %s already exists", name)
 	}
 	var hash plumbing.Hash
-	if analyser.day != burndown.TreeMergeMark {
+	if analyser.tick != burndown.TreeMergeMark {
 		hash = blob.Hash
 	}
-	file, err = analyser.newFile(hash, name, author, analyser.day, lines)
+	file, err = analyser.newFile(hash, name, author, analyser.tick, lines)
 	analyser.files[name] = file
-	if analyser.day == burndown.TreeMergeMark {
+	if analyser.tick == burndown.TreeMergeMark {
 		analyser.mergedFiles[name] = true
 	}
 	return err
@@ -1222,7 +1260,7 @@ func (analyser *BurndownAnalysis) handleDeletion(
 	if !exists {
 		return nil
 	}
-	file.Update(analyser.packPersonWithDay(author, analyser.day), 0, 0, lines)
+	file.Update(analyser.packPersonWithTick(author, analyser.tick), 0, 0, lines)
 	file.Delete()
 	delete(analyser.files, name)
 	delete(analyser.fileHistories, name)
@@ -1237,7 +1275,7 @@ func (analyser *BurndownAnalysis) handleDeletion(
 			}
 		}
 	}
-	if analyser.day == burndown.TreeMergeMark {
+	if analyser.tick == burndown.TreeMergeMark {
 		analyser.mergedFiles[name] = false
 	}
 	return nil
@@ -1247,7 +1285,7 @@ func (analyser *BurndownAnalysis) handleModification(
 	change *object.Change, author int, cache map[plumbing.Hash]*items.CachedBlob,
 	diffs map[string]items.FileDiffData) error {
 
-	if analyser.day == burndown.TreeMergeMark {
+	if analyser.tick == burndown.TreeMergeMark {
 		analyser.mergedFiles[change.To.Name] = true
 	}
 	file, exists := analyser.files[change.From.Name]
@@ -1297,10 +1335,10 @@ func (analyser *BurndownAnalysis) handleModification(
 	apply := func(edit diffmatchpatch.Diff) {
 		length := utf8.RuneCountInString(edit.Text)
 		if edit.Type == diffmatchpatch.DiffInsert {
-			file.Update(analyser.packPersonWithDay(author, analyser.day), position, length, 0)
+			file.Update(analyser.packPersonWithTick(author, analyser.tick), position, length, 0)
 			position += length
 		} else {
-			file.Update(analyser.packPersonWithDay(author, analyser.day), position, 0, length)
+			file.Update(analyser.packPersonWithTick(author, analyser.tick), position, 0, length)
 		}
 		if analyser.Debug {
 			file.Validate()
@@ -1315,7 +1353,7 @@ func (analyser *BurndownAnalysis) handleModification(
 		length := utf8.RuneCountInString(edit.Text)
 		debugError := func() {
 			log.Printf("%s: internal diff error\n", change.To.Name)
-			log.Printf("Update(%d, %d, %d (0), %d (0))\n", analyser.day, position,
+			log.Printf("Update(%d, %d, %d (0), %d (0))\n", analyser.tick, position,
 				length, utf8.RuneCountInString(pending.Text))
 			if dumpBefore != "" {
 				log.Printf("====TREE BEFORE====\n%s====END====\n", dumpBefore)
@@ -1335,7 +1373,7 @@ func (analyser *BurndownAnalysis) handleModification(
 					debugError()
 					return errors.New("DiffInsert may not appear after DiffInsert")
 				}
-				file.Update(analyser.packPersonWithDay(author, analyser.day), position, length,
+				file.Update(analyser.packPersonWithTick(author, analyser.tick), position, length,
 					utf8.RuneCountInString(pending.Text))
 				if analyser.Debug {
 					file.Validate()
@@ -1378,7 +1416,7 @@ func (analyser *BurndownAnalysis) handleRename(from, to string) error {
 	}
 	delete(analyser.files, from)
 	analyser.files[to] = file
-	if analyser.day == burndown.TreeMergeMark {
+	if analyser.tick == burndown.TreeMergeMark {
 		analyser.mergedFiles[from] = false
 	}
 
@@ -1428,37 +1466,37 @@ func (analyser *BurndownAnalysis) handleRename(from, to string) error {
 }
 
 func (analyser *BurndownAnalysis) groupSparseHistory(
-	history sparseHistory, lastDay int) (DenseHistory, int) {
+	history sparseHistory, lastTick int) (DenseHistory, int) {
 
 	if len(history) == 0 {
 		panic("empty history")
 	}
-	var days []int
-	for day := range history {
-		days = append(days, day)
+	var ticks []int
+	for tick := range history {
+		ticks = append(ticks, tick)
 	}
-	sort.Ints(days)
-	if lastDay >= 0 {
-		if days[len(days)-1] < lastDay {
-			days = append(days, lastDay)
-		} else if days[len(days)-1] > lastDay {
-			panic("days corruption")
+	sort.Ints(ticks)
+	if lastTick >= 0 {
+		if ticks[len(ticks)-1] < lastTick {
+			ticks = append(ticks, lastTick)
+		} else if ticks[len(ticks)-1] > lastTick {
+			panic("ticks corruption")
 		}
 	} else {
-		lastDay = days[len(days)-1]
+		lastTick = ticks[len(ticks)-1]
 	}
 	// [y][x]
 	// y - sampling
 	// x - granularity
-	samples := lastDay/analyser.Sampling + 1
-	bands := lastDay/analyser.Granularity + 1
+	samples := lastTick/analyser.Sampling + 1
+	bands := lastTick/analyser.Granularity + 1
 	result := make(DenseHistory, samples)
 	for i := 0; i < bands; i++ {
 		result[i] = make([]int64, bands)
 	}
 	prevsi := 0
-	for _, day := range days {
-		si := day / analyser.Sampling
+	for _, tick := range ticks {
+		si := tick / analyser.Sampling
 		if si > prevsi {
 			state := result[prevsi]
 			for i := prevsi + 1; i <= si; i++ {
@@ -1467,11 +1505,11 @@ func (analyser *BurndownAnalysis) groupSparseHistory(
 			prevsi = si
 		}
 		sample := result[si]
-		for bday, value := range history[day] {
-			sample[bday/analyser.Granularity] += value
+		for t, value := range history[tick] {
+			sample[t/analyser.Granularity] += value
 		}
 	}
-	return result, lastDay
+	return result, lastTick
 }
 
 func init() {

+ 69 - 18
leaves/burndown_test.go

@@ -2,11 +2,13 @@ package leaves
 
 import (
 	"bytes"
+	"errors"
 	"fmt"
 	"io"
 	"io/ioutil"
 	"path"
 	"testing"
+	"time"
 
 	"gopkg.in/src-d/hercules.v9/internal/burndown"
 	"gopkg.in/src-d/hercules.v9/internal/core"
@@ -38,7 +40,7 @@ func TestBurndownMeta(t *testing.T) {
 	assert.Len(t, bd.Provides(), 0)
 	required := [...]string{
 		items.DependencyFileDiff, items.DependencyTreeChanges, items.DependencyBlobCache,
-		items.DependencyDay, identity.DependencyAuthor}
+		items.DependencyTick, identity.DependencyAuthor}
 	for _, name := range required {
 		assert.Contains(t, bd.Requires(), name)
 	}
@@ -68,6 +70,7 @@ func TestBurndownConfigure(t *testing.T) {
 	facts[ConfigBurndownHibernationThreshold] = 100
 	facts[ConfigBurndownHibernationToDisk] = true
 	facts[ConfigBurndownHibernationDirectory] = "xxx"
+	facts[items.FactTickSize] = 24 * time.Hour
 	facts[identity.FactIdentityDetectorPeopleCount] = 5
 	facts[identity.FactIdentityDetectorReversedPeopleDict] = bd.Requires()
 	assert.Nil(t, bd.Configure(facts))
@@ -79,6 +82,7 @@ func TestBurndownConfigure(t *testing.T) {
 	assert.True(t, bd.HibernationToDisk)
 	assert.Equal(t, bd.HibernationDirectory, "xxx")
 	assert.Equal(t, bd.Debug, true)
+	assert.Equal(t, bd.tickSize, 24*time.Hour)
 	assert.Equal(t, bd.reversedPeopleDict, bd.Requires())
 	facts[ConfigBurndownTrackPeople] = false
 	facts[identity.FactIdentityDetectorPeopleCount] = 50
@@ -142,7 +146,7 @@ func TestBurndownConsumeFinalize(t *testing.T) {
 
 	// stage 1
 	deps[identity.DependencyAuthor] = 0
-	deps[items.DependencyDay] = 0
+	deps[items.DependencyTick] = 0
 	cache := map[plumbing.Hash]*items.CachedBlob{}
 	AddHash(t, cache, "291286b4ac41952cbd1389fda66420ec03c1a9fe")
 	AddHash(t, cache, "c29112dbd697ad9b401333b80c18a63951bc18d9")
@@ -202,7 +206,7 @@ func TestBurndownConsumeFinalize(t *testing.T) {
 	result, err = bd.Consume(deps)
 	assert.Nil(t, result)
 	assert.Nil(t, err)
-	assert.Equal(t, bd.previousDay, 0)
+	assert.Equal(t, bd.previousTick, 0)
 	assert.Len(t, bd.files, 3)
 	assert.Equal(t, bd.files["cmd/hercules/main.go"].Len(), 207)
 	assert.Equal(t, bd.files["analyser.go"].Len(), 926)
@@ -237,7 +241,7 @@ func TestBurndownConsumeFinalize(t *testing.T) {
 	// stage 2
 	// 2b1ed978194a94edeabbca6de7ff3b5771d4d665
 	deps[core.DependencyIsMerge] = false
-	deps[items.DependencyDay] = 30
+	deps[items.DependencyTick] = 30
 	cache = map[plumbing.Hash]*items.CachedBlob{}
 	AddHash(t, cache, "291286b4ac41952cbd1389fda66420ec03c1a9fe")
 	AddHash(t, cache, "baa64828831d174f40140e4b3cfa77d1e917a2c1")
@@ -304,7 +308,7 @@ func TestBurndownConsumeFinalize(t *testing.T) {
 	result, err = bd.Consume(deps)
 	assert.Nil(t, result)
 	assert.Nil(t, err)
-	assert.Equal(t, bd.previousDay, 30)
+	assert.Equal(t, bd.previousTick, 30)
 	assert.Len(t, bd.files, 2)
 	assert.Equal(t, bd.files["cmd/hercules/main.go"].Len(), 290)
 	assert.Equal(t, bd.files["burndown.go"].Len(), 543)
@@ -360,7 +364,7 @@ func TestBurndownConsumeFinalize(t *testing.T) {
 
 func TestBurndownConsumeMergeAuthorMissing(t *testing.T) {
 	deps := map[string]interface{}{}
-	deps[items.DependencyDay] = 0
+	deps[items.DependencyTick] = 0
 	cache := map[plumbing.Hash]*items.CachedBlob{}
 	AddHash(t, cache, "291286b4ac41952cbd1389fda66420ec03c1a9fe")
 	AddHash(t, cache, "c29112dbd697ad9b401333b80c18a63951bc18d9")
@@ -471,12 +475,13 @@ func bakeBurndownForSerialization(t *testing.T, firstAuthor, secondAuthor int) (
 		Sampling:     30,
 		PeopleNumber: 2,
 		TrackFiles:   true,
+		tickSize:     24 * time.Hour,
 	}
 	assert.Nil(t, bd.Initialize(test.Repository))
 	deps := map[string]interface{}{}
 	// stage 1
 	deps[identity.DependencyAuthor] = firstAuthor
-	deps[items.DependencyDay] = 0
+	deps[items.DependencyTick] = 0
 	cache := map[plumbing.Hash]*items.CachedBlob{}
 	AddHash(t, cache, "291286b4ac41952cbd1389fda66420ec03c1a9fe")
 	AddHash(t, cache, "c29112dbd697ad9b401333b80c18a63951bc18d9")
@@ -537,7 +542,7 @@ func bakeBurndownForSerialization(t *testing.T, firstAuthor, secondAuthor int) (
 	// stage 2
 	// 2b1ed978194a94edeabbca6de7ff3b5771d4d665
 	deps[identity.DependencyAuthor] = secondAuthor
-	deps[items.DependencyDay] = 30
+	deps[items.DependencyTick] = 30
 	cache = map[plumbing.Hash]*items.CachedBlob{}
 	AddHash(t, cache, "291286b4ac41952cbd1389fda66420ec03c1a9fe")
 	AddHash(t, cache, "baa64828831d174f40140e4b3cfa77d1e917a2c1")
@@ -615,6 +620,7 @@ func TestBurndownSerialize(t *testing.T) {
 	assert.Nil(t, bd.Serialize(out, false, buffer))
 	assert.Equal(t, buffer.String(), `  granularity: 30
   sampling: 30
+  tick_size: 24h0m0s
   "project": |-
     1145    0
      464  369
@@ -648,6 +654,7 @@ func TestBurndownSerialize(t *testing.T) {
 	bd.Serialize(out, true, buffer)
 	msg := pb.BurndownAnalysisResults{}
 	proto.Unmarshal(buffer.Bytes(), &msg)
+	assert.Equal(t, msg.TickSize, int64(24*time.Hour))
 	assert.Equal(t, msg.Granularity, int32(30))
 	assert.Equal(t, msg.Sampling, int32(30))
 	assert.Equal(t, msg.Project.Name, "project")
@@ -702,6 +709,7 @@ func TestBurndownSerializeAuthorMissing(t *testing.T) {
 	assert.Nil(t, bd.Serialize(out, false, buffer))
 	assert.Equal(t, buffer.String(), `  granularity: 30
   sampling: 30
+  tick_size: 24h0m0s
   "project": |-
     1145    0
      464  369
@@ -1050,6 +1058,7 @@ func TestBurndownMergeGlobalHistory(t *testing.T) {
 		reversedPeopleDict: people1[:],
 		sampling:           15,
 		granularity:        20,
+		TickSize:           24 * time.Hour,
 	}
 	c1 := core.CommonAnalysisResult{
 		BeginTime:     600566400, // 1989 Jan 12
@@ -1095,6 +1104,7 @@ func TestBurndownMergeGlobalHistory(t *testing.T) {
 		FileHistories:      map[string][][]int64{},
 		PeopleHistories:    nil,
 		PeopleMatrix:       nil,
+		TickSize:           24 * time.Hour,
 		reversedPeopleDict: people2[:],
 		sampling:           14,
 		granularity:        19,
@@ -1137,10 +1147,13 @@ func TestBurndownMergeGlobalHistory(t *testing.T) {
 	res2.PeopleMatrix[1][1] = 600
 	res2.PeopleMatrix[1][2] = 700
 	res2.PeopleMatrix[1][3] = 800
-	bd := BurndownAnalysis{}
+	bd := BurndownAnalysis{
+		tickSize: 24 * time.Hour,
+	}
 	merged := bd.MergeResults(res1, res2, &c1, &c2).(BurndownResult)
 	assert.Equal(t, merged.granularity, 19)
 	assert.Equal(t, merged.sampling, 14)
+	assert.Equal(t, merged.TickSize, 24*time.Hour)
 	assert.Len(t, merged.GlobalHistory, 5)
 	for _, row := range merged.GlobalHistory {
 		assert.Len(t, row, 4)
@@ -1174,12 +1187,40 @@ func TestBurndownMergeGlobalHistory(t *testing.T) {
 	assert.Nil(t, bd.serializeBinary(&merged, ioutil.Discard))
 }
 
+func TestBurndownMergeGlobalHistory_withDifferentTickSizes(t *testing.T) {
+	res1 := BurndownResult{
+		TickSize: 13 * time.Hour,
+	}
+	c1 := core.CommonAnalysisResult{
+		BeginTime:     600566400, // 1989 Jan 12
+		EndTime:       604713600, // 1989 March 1
+		CommitsNumber: 10,
+		RunTime:       100000,
+	}
+	res2 := BurndownResult{
+		TickSize: 24 * time.Hour,
+	}
+	c2 := core.CommonAnalysisResult{
+		BeginTime:     601084800, // 1989 Jan 18
+		EndTime:       605923200, // 1989 March 15
+		CommitsNumber: 10,
+		RunTime:       100000,
+	}
+	bd := BurndownAnalysis{
+		tickSize: 24 * time.Hour,
+	}
+	merged := bd.MergeResults(res1, res2, &c1, &c2)
+	assert.IsType(t, errors.New(""), merged)
+	assert.Contains(t, merged.(error).Error(), "mismatching tick sizes")
+}
+
 func TestBurndownMergeNils(t *testing.T) {
 	res1 := BurndownResult{
 		GlobalHistory:      nil,
 		FileHistories:      map[string][][]int64{},
 		PeopleHistories:    nil,
 		PeopleMatrix:       nil,
+		TickSize:           24 * time.Hour,
 		reversedPeopleDict: nil,
 		sampling:           15,
 		granularity:        20,
@@ -1195,6 +1236,7 @@ func TestBurndownMergeNils(t *testing.T) {
 		FileHistories:      nil,
 		PeopleHistories:    nil,
 		PeopleMatrix:       nil,
+		TickSize:           24 * time.Hour,
 		reversedPeopleDict: nil,
 		sampling:           14,
 		granularity:        19,
@@ -1205,10 +1247,13 @@ func TestBurndownMergeNils(t *testing.T) {
 		CommitsNumber: 10,
 		RunTime:       100000,
 	}
-	bd := BurndownAnalysis{}
+	bd := BurndownAnalysis{
+		tickSize: 24 * time.Hour,
+	}
 	merged := bd.MergeResults(res1, res2, &c1, &c2).(BurndownResult)
 	assert.Equal(t, merged.granularity, 19)
 	assert.Equal(t, merged.sampling, 14)
+	assert.Equal(t, merged.TickSize, 24*time.Hour)
 	assert.Nil(t, merged.GlobalHistory)
 	assert.Nil(t, merged.FileHistories)
 	assert.Nil(t, merged.PeopleHistories)
@@ -1287,6 +1332,7 @@ func TestBurndownDeserialize(t *testing.T) {
 	assert.True(t, len(result.PeopleMatrix) > 0)
 	assert.Equal(t, result.granularity, 30)
 	assert.Equal(t, result.sampling, 30)
+	assert.Equal(t, result.TickSize, 24*time.Hour)
 }
 
 func TestBurndownEmptyFileHistory(t *testing.T) {
@@ -1356,17 +1402,17 @@ func TestBurndownAddBurndownMatrix(t *testing.T) {
 		[]int64{7181, 18750, 55841, 0},
 		[]int64{6345, 16704, 17110, 55981},
 	}
-	daily := make([][]float32, 4*30)
-	for i := range daily {
-		daily[i] = make([]float32, 4*30)
+	perTick := make([][]float32, 4*30)
+	for i := range perTick {
+		perTick[i] = make([]float32, 4*30)
 	}
-	addBurndownMatrix(h, 30, 30, daily, 0)
+	addBurndownMatrix(h, 30, 30, perTick, 0)
 	sum := func(x, y int) int64 {
 		var accum float32
 		row := (y+1)*30 - 1
 		offset := x * 30
 		for i := offset; i < offset+30; i++ {
-			accum += daily[row][i]
+			accum += perTick[row][i]
 		}
 		return int64(accum)
 	}
@@ -1448,13 +1494,14 @@ func TestBurndownMergeMatrices(t *testing.T) {
 		CommitsNumber: 6982,
 		RunTime:       1567214,
 	}
-	nh := mergeMatrices(h, nil, 30, 30, 30, 30, cr, cr)
+	bd := BurndownAnalysis{tickSize: 24 * time.Hour}
+	nh := bd.mergeMatrices(h, nil, 30, 30, 30, 30, cr, cr)
 	for y, row := range nh {
 		for x, v := range row {
 			assert.InDelta(t, v, h[y][x], 1, fmt.Sprintf("y=%d x=%d", y, x))
 		}
 	}
-	nh = mergeMatrices(h, h, 30, 30, 30, 30, cr, cr)
+	nh = bd.mergeMatrices(h, h, 30, 30, 30, 30, cr, cr)
 	for y, row := range nh {
 		for x, v := range row {
 			assert.InDelta(t, v, h[y][x]*2, 1, fmt.Sprintf("y=%d x=%d", y, x))
@@ -1479,6 +1526,7 @@ func TestBurndownMergePeopleHistories(t *testing.T) {
 		FileHistories:      map[string][][]int64{},
 		PeopleHistories:    [][][]int64{h1, h1},
 		PeopleMatrix:       nil,
+		TickSize:           24 * time.Hour,
 		reversedPeopleDict: []string{"one", "three"},
 		sampling:           15, // 3
 		granularity:        20, // 3
@@ -1494,6 +1542,7 @@ func TestBurndownMergePeopleHistories(t *testing.T) {
 		FileHistories:      nil,
 		PeopleHistories:    [][][]int64{h2, h2},
 		PeopleMatrix:       nil,
+		TickSize:           24 * time.Hour,
 		reversedPeopleDict: []string{"one", "two"},
 		sampling:           14,
 		granularity:        19,
@@ -1504,7 +1553,9 @@ func TestBurndownMergePeopleHistories(t *testing.T) {
 		CommitsNumber: 10,
 		RunTime:       100000,
 	}
-	bd := BurndownAnalysis{}
+	bd := BurndownAnalysis{
+		tickSize: 24 * time.Hour,
+	}
 	merged := bd.MergeResults(res1, res2, &c1, &c2).(BurndownResult)
 	mh := [][]int64{
 		{560, 0, 0, 0},

+ 45 - 45
leaves/comment_sentiment.go

@@ -21,7 +21,7 @@ import (
 	"gopkg.in/src-d/hercules.v9/internal/pb"
 	items "gopkg.in/src-d/hercules.v9/internal/plumbing"
 	uast_items "gopkg.in/src-d/hercules.v9/internal/plumbing/uast"
-	"gopkg.in/vmarkovtsev/BiDiSentiment.v1"
+	sentiment "gopkg.in/vmarkovtsev/BiDiSentiment.v1"
 )
 
 // CommentSentimentAnalysis measures comment sentiment through time.
@@ -31,17 +31,17 @@ type CommentSentimentAnalysis struct {
 	MinCommentLength int
 	Gap              float32
 
-	commentsByDay map[int][]string
-	commitsByDay  map[int][]plumbing.Hash
-	xpather       *uast_items.ChangesXPather
+	commentsByTick map[int][]string
+	commitsByTick  map[int][]plumbing.Hash
+	xpather        *uast_items.ChangesXPather
 }
 
-// CommentSentimentResult contains the sentiment values per day, where 1 means very negative
+// CommentSentimentResult contains the sentiment values per tick, where 1 means very negative
 // and 0 means very positive.
 type CommentSentimentResult struct {
-	EmotionsByDay map[int]float32
-	CommentsByDay map[int][]string
-	commitsByDay  map[int][]plumbing.Hash
+	EmotionsByTick map[int]float32
+	CommentsByTick map[int][]string
+	commitsByTick  map[int][]plumbing.Hash
 }
 
 const (
@@ -80,7 +80,7 @@ func (sent *CommentSentimentAnalysis) Provides() []string {
 // Each requested entity will be inserted into `deps` of Consume(). In turn, those
 // entities are Provides() upstream.
 func (sent *CommentSentimentAnalysis) Requires() []string {
-	arr := [...]string{uast_items.DependencyUastChanges, items.DependencyDay}
+	arr := [...]string{uast_items.DependencyUastChanges, items.DependencyTick}
 	return arr[:]
 }
 
@@ -123,7 +123,7 @@ func (sent *CommentSentimentAnalysis) Configure(facts map[string]interface{}) er
 		sent.MinCommentLength = val.(int)
 	}
 	sent.validate()
-	sent.commitsByDay = facts[items.FactCommitsByDay].(map[int][]plumbing.Hash)
+	sent.commitsByTick = facts[items.FactCommitsByTick].(map[int][]plumbing.Hash)
 	return nil
 }
 
@@ -143,7 +143,7 @@ func (sent *CommentSentimentAnalysis) validate() {
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (sent *CommentSentimentAnalysis) Initialize(repository *git.Repository) error {
-	sent.commentsByDay = map[int][]string{}
+	sent.commentsByTick = map[int][]string{}
 	sent.xpather = &uast_items.ChangesXPather{XPath: "//uast:Comment"}
 	sent.validate()
 	sent.OneShotMergeProcessor.Initialize()
@@ -160,33 +160,33 @@ func (sent *CommentSentimentAnalysis) Consume(deps map[string]interface{}) (map[
 		return nil, nil
 	}
 	changes := deps[uast_items.DependencyUastChanges].([]uast_items.Change)
-	day := deps[items.DependencyDay].(int)
+	tick := deps[items.DependencyTick].(int)
 	commentNodes, _ := sent.xpather.Extract(changes)
 	comments := sent.mergeComments(commentNodes)
-	dayComments := sent.commentsByDay[day]
-	if dayComments == nil {
-		dayComments = []string{}
+	tickComments := sent.commentsByTick[tick]
+	if tickComments == nil {
+		tickComments = []string{}
 	}
-	dayComments = append(dayComments, comments...)
-	sent.commentsByDay[day] = dayComments
+	tickComments = append(tickComments, comments...)
+	sent.commentsByTick[tick] = tickComments
 	return nil, nil
 }
 
 // Finalize returns the result of the analysis. Further Consume() calls are not expected.
 func (sent *CommentSentimentAnalysis) Finalize() interface{} {
 	result := CommentSentimentResult{
-		EmotionsByDay: map[int]float32{},
-		CommentsByDay: map[int][]string{},
-		commitsByDay:  sent.commitsByDay,
+		EmotionsByTick: map[int]float32{},
+		CommentsByTick: map[int][]string{},
+		commitsByTick:  sent.commitsByTick,
 	}
-	days := make([]int, 0, len(sent.commentsByDay))
-	for day := range sent.commentsByDay {
-		days = append(days, day)
+	ticks := make([]int, 0, len(sent.commentsByTick))
+	for tick := range sent.commentsByTick {
+		ticks = append(ticks, tick)
 	}
-	sort.Ints(days)
+	sort.Ints(ticks)
 	var texts []string
-	for _, key := range days {
-		texts = append(texts, sent.commentsByDay[key]...)
+	for _, key := range ticks {
+		texts = append(texts, sent.commentsByTick[key]...)
 	}
 	session, err := sentiment.OpenSession()
 	if err != nil {
@@ -217,10 +217,10 @@ func (sent *CommentSentimentAnalysis) Finalize() interface{} {
 		panic(err)
 	}
 	pos := 0
-	for _, key := range days {
+	for _, key := range ticks {
 		sum := float32(0)
-		comments := make([]string, 0, len(sent.commentsByDay[key]))
-		for _, comment := range sent.commentsByDay[key] {
+		comments := make([]string, 0, len(sent.commentsByTick[key]))
+		for _, comment := range sent.commentsByTick[key] {
 			if weights[pos] < 0.5*(1-sent.Gap) || weights[pos] > 0.5*(1+sent.Gap) {
 				sum += weights[pos]
 				comments = append(comments, comment)
@@ -228,8 +228,8 @@ func (sent *CommentSentimentAnalysis) Finalize() interface{} {
 			pos++
 		}
 		if len(comments) > 0 {
-			result.EmotionsByDay[key] = sum / float32(len(comments))
-			result.CommentsByDay[key] = comments
+			result.EmotionsByTick[key] = sum / float32(len(comments))
+			result.CommentsByTick[key] = comments
 		}
 	}
 	return result
@@ -252,36 +252,36 @@ func (sent *CommentSentimentAnalysis) Serialize(result interface{}, binary bool,
 }
 
 func (sent *CommentSentimentAnalysis) serializeText(result *CommentSentimentResult, writer io.Writer) {
-	days := make([]int, 0, len(result.EmotionsByDay))
-	for day := range result.EmotionsByDay {
-		days = append(days, day)
+	ticks := make([]int, 0, len(result.EmotionsByTick))
+	for tick := range result.EmotionsByTick {
+		ticks = append(ticks, tick)
 	}
-	sort.Ints(days)
-	for _, day := range days {
-		commits := result.commitsByDay[day]
+	sort.Ints(ticks)
+	for _, tick := range ticks {
+		commits := result.commitsByTick[tick]
 		hashes := make([]string, len(commits))
 		for i, hash := range commits {
 			hashes[i] = hash.String()
 		}
 		fmt.Fprintf(writer, "  %d: [%.4f, [%s], \"%s\"]\n",
-			day, result.EmotionsByDay[day], strings.Join(hashes, ","),
-			strings.Join(result.CommentsByDay[day], "|"))
+			tick, result.EmotionsByTick[tick], strings.Join(hashes, ","),
+			strings.Join(result.CommentsByTick[tick], "|"))
 	}
 }
 
 func (sent *CommentSentimentAnalysis) serializeBinary(
 	result *CommentSentimentResult, writer io.Writer) error {
 	message := pb.CommentSentimentResults{
-		SentimentByDay: map[int32]*pb.Sentiment{},
+		SentimentByTick: map[int32]*pb.Sentiment{},
 	}
-	for key, val := range result.EmotionsByDay {
-		commits := make([]string, len(result.commitsByDay[key]))
-		for i, commit := range result.commitsByDay[key] {
+	for key, val := range result.EmotionsByTick {
+		commits := make([]string, len(result.commitsByTick[key]))
+		for i, commit := range result.commitsByTick[key] {
 			commits[i] = commit.String()
 		}
-		message.SentimentByDay[int32(key)] = &pb.Sentiment{
+		message.SentimentByTick[int32(key)] = &pb.Sentiment{
 			Value:    val,
-			Comments: result.CommentsByDay[key],
+			Comments: result.CommentsByTick[key],
 			Commits:  commits,
 		}
 	}

+ 28 - 28
leaves/comment_sentiment_test.go

@@ -10,7 +10,7 @@ import (
 
 	"github.com/gogo/protobuf/proto"
 	"github.com/stretchr/testify/assert"
-	"gopkg.in/bblfsh/client-go.v3"
+	bblfsh "gopkg.in/bblfsh/client-go.v3"
 	"gopkg.in/bblfsh/client-go.v3/tools"
 	"gopkg.in/bblfsh/sdk.v2/uast"
 	"gopkg.in/bblfsh/sdk.v2/uast/nodes"
@@ -30,7 +30,7 @@ func fixtureCommentSentiment() *CommentSentimentAnalysis {
 		MinCommentLength: DefaultCommentSentimentCommentMinLength,
 	}
 	facts := map[string]interface{}{
-		items.FactCommitsByDay: map[int][]plumbing.Hash{},
+		items.FactCommitsByTick: map[int][]plumbing.Hash{},
 	}
 	sent.Configure(facts)
 	sent.Initialize(test.Repository)
@@ -41,7 +41,7 @@ func TestCommentSentimentMeta(t *testing.T) {
 	sent := CommentSentimentAnalysis{}
 	assert.Equal(t, sent.Name(), "Sentiment")
 	assert.Equal(t, len(sent.Provides()), 0)
-	required := [...]string{uast_items.DependencyUastChanges, items.DependencyDay}
+	required := [...]string{uast_items.DependencyUastChanges, items.DependencyTick}
 	for _, name := range required {
 		assert.Contains(t, sent.Requires(), name)
 	}
@@ -62,7 +62,7 @@ func TestCommentSentimentConfigure(t *testing.T) {
 	facts := map[string]interface{}{}
 	facts[ConfigCommentSentimentMinLength] = 77
 	facts[ConfigCommentSentimentGap] = float32(0.77)
-	facts[items.FactCommitsByDay] = map[int][]plumbing.Hash{}
+	facts[items.FactCommitsByTick] = map[int][]plumbing.Hash{}
 	sent.Configure(facts)
 	assert.Equal(t, sent.Gap, float32(0.77))
 	assert.Equal(t, sent.MinCommentLength, 77)
@@ -100,13 +100,13 @@ func TestCommentSentimentFork(t *testing.T) {
 func TestCommentSentimentSerializeText(t *testing.T) {
 	sent := fixtureCommentSentiment()
 	result := CommentSentimentResult{
-		EmotionsByDay: map[int]float32{},
-		CommentsByDay: map[int][]string{},
-		commitsByDay:  map[int][]plumbing.Hash{},
+		EmotionsByTick: map[int]float32{},
+		CommentsByTick: map[int][]string{},
+		commitsByTick:  map[int][]plumbing.Hash{},
 	}
-	result.EmotionsByDay[9] = 0.5
-	result.CommentsByDay[9] = []string{"test", "hello"}
-	result.commitsByDay[9] = []plumbing.Hash{plumbing.NewHash("4f7c7a154638a0f2468276c56188d90c9cef0dfc")}
+	result.EmotionsByTick[9] = 0.5
+	result.CommentsByTick[9] = []string{"test", "hello"}
+	result.commitsByTick[9] = []plumbing.Hash{plumbing.NewHash("4f7c7a154638a0f2468276c56188d90c9cef0dfc")}
 	buffer := &bytes.Buffer{}
 	sent.Serialize(result, false, buffer)
 	assert.Equal(t, buffer.String(), "  9: [0.5000, [4f7c7a154638a0f2468276c56188d90c9cef0dfc], \"test|hello\"]\n")
@@ -115,32 +115,32 @@ func TestCommentSentimentSerializeText(t *testing.T) {
 func TestCommentSentimentSerializeBinary(t *testing.T) {
 	sent := fixtureCommentSentiment()
 	result := CommentSentimentResult{
-		EmotionsByDay: map[int]float32{},
-		CommentsByDay: map[int][]string{},
-		commitsByDay:  map[int][]plumbing.Hash{},
+		EmotionsByTick: map[int]float32{},
+		CommentsByTick: map[int][]string{},
+		commitsByTick:  map[int][]plumbing.Hash{},
 	}
-	result.EmotionsByDay[9] = 0.5
-	result.CommentsByDay[9] = []string{"test", "hello"}
-	result.commitsByDay[9] = []plumbing.Hash{plumbing.NewHash("4f7c7a154638a0f2468276c56188d90c9cef0dfc")}
+	result.EmotionsByTick[9] = 0.5
+	result.CommentsByTick[9] = []string{"test", "hello"}
+	result.commitsByTick[9] = []plumbing.Hash{plumbing.NewHash("4f7c7a154638a0f2468276c56188d90c9cef0dfc")}
 	buffer := &bytes.Buffer{}
 	sent.Serialize(result, true, buffer)
 	msg := pb.CommentSentimentResults{}
 	proto.Unmarshal(buffer.Bytes(), &msg)
-	assert.Len(t, msg.SentimentByDay, 1)
-	assert.Equal(t, msg.SentimentByDay[int32(9)].Commits, []string{"4f7c7a154638a0f2468276c56188d90c9cef0dfc"})
-	assert.Equal(t, msg.SentimentByDay[int32(9)].Comments, []string{"test", "hello"})
-	assert.Equal(t, msg.SentimentByDay[int32(9)].Value, float32(0.5))
+	assert.Len(t, msg.SentimentByTick, 1)
+	assert.Equal(t, msg.SentimentByTick[int32(9)].Commits, []string{"4f7c7a154638a0f2468276c56188d90c9cef0dfc"})
+	assert.Equal(t, msg.SentimentByTick[int32(9)].Comments, []string{"test", "hello"})
+	assert.Equal(t, msg.SentimentByTick[int32(9)].Value, float32(0.5))
 }
 
 func TestCommentSentimentFinalize(t *testing.T) {
 	sent := fixtureCommentSentiment()
-	sent.commitsByDay = testSentimentCommits
-	sent.commentsByDay = testSentimentComments
+	sent.commitsByTick = testSentimentCommits
+	sent.commentsByTick = testSentimentComments
 	result := sent.Finalize().(CommentSentimentResult)
 	for key, vals := range testSentimentComments {
-		assert.Equal(t, vals, result.CommentsByDay[key])
-		assert.True(t, result.EmotionsByDay[key] >= 0)
-		assert.True(t, result.EmotionsByDay[key] <= 1)
+		assert.Equal(t, vals, result.CommentsByTick[key])
+		assert.True(t, result.EmotionsByTick[key] >= 0)
+		assert.True(t, result.EmotionsByTick[key] <= 1)
 	}
 }
 
@@ -166,7 +166,7 @@ func TestCommentSentimentConsume(t *testing.T) {
 	}
 	gitChange := test.FakeChangeForName("labours.py", hash1, hash2)
 	deps := map[string]interface{}{
-		items.DependencyDay: 0,
+		items.DependencyTick: 0,
 		uast_items.DependencyUastChanges: []uast_items.Change{
 			{Before: root1, After: root2, Change: gitChange},
 		},
@@ -176,8 +176,8 @@ func TestCommentSentimentConsume(t *testing.T) {
 	result, err := sent.Consume(deps)
 	assert.Nil(t, err)
 	assert.Nil(t, result)
-	assert.Len(t, sent.commentsByDay, 1)
-	assert.Len(t, sent.commentsByDay[0], 4)
+	assert.Len(t, sent.commentsByTick, 1)
+	assert.Len(t, sent.commentsByTick[0], 4)
 }
 
 var (

+ 52 - 52
leaves/devs.go

@@ -27,8 +27,8 @@ type DevsAnalysis struct {
 	// into account.
 	ConsiderEmptyCommits bool
 
-	// days maps days to developers to stats
-	days map[int]map[int]*DevDay
+	// ticks maps ticks to developers to stats
+	ticks map[int]map[int]*DevTick
 	// reversedPeopleDict references IdentityDetector.ReversedPeopleDict
 	reversedPeopleDict []string
 }
@@ -36,16 +36,16 @@ type DevsAnalysis struct {
 // DevsResult is returned by DevsAnalysis.Finalize() and carries the daily statistics
 // per developer.
 type DevsResult struct {
-	// Days is <day index> -> <developer index> -> daily stats
-	Days map[int]map[int]*DevDay
+	// Ticks is <tick index> -> <developer index> -> daily stats
+	Ticks map[int]map[int]*DevTick
 
 	// reversedPeopleDict references IdentityDetector.ReversedPeopleDict
 	reversedPeopleDict []string
 }
 
-// DevDay is the statistics for a development day and a particular developer.
-type DevDay struct {
-	// Commits is the number of commits made by a particular developer in a particular day.
+// DevTick is the statistics for a development tick and a particular developer.
+type DevTick struct {
+	// Commits is the number of commits made by a particular developer in a particular tick.
 	Commits int
 	items.LineStats
 	// LanguagesDetection carries fine-grained line stats per programming language.
@@ -74,7 +74,7 @@ func (devs *DevsAnalysis) Provides() []string {
 // entities are Provides() upstream.
 func (devs *DevsAnalysis) Requires() []string {
 	arr := [...]string{
-		identity.DependencyAuthor, items.DependencyTreeChanges, items.DependencyDay,
+		identity.DependencyAuthor, items.DependencyTreeChanges, items.DependencyTick,
 		items.DependencyLanguages, items.DependencyLineStats}
 	return arr[:]
 }
@@ -114,7 +114,7 @@ func (devs *DevsAnalysis) Description() string {
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (devs *DevsAnalysis) Initialize(repository *git.Repository) error {
-	devs.days = map[int]map[int]*DevDay{}
+	devs.ticks = map[int]map[int]*DevTick{}
 	devs.OneShotMergeProcessor.Initialize()
 	return nil
 }
@@ -133,16 +133,16 @@ func (devs *DevsAnalysis) Consume(deps map[string]interface{}) (map[string]inter
 	if len(treeDiff) == 0 && !devs.ConsiderEmptyCommits {
 		return nil, nil
 	}
-	day := deps[items.DependencyDay].(int)
-	devsDay, exists := devs.days[day]
+	tick := deps[items.DependencyTick].(int)
+	devstick, exists := devs.ticks[tick]
 	if !exists {
-		devsDay = map[int]*DevDay{}
-		devs.days[day] = devsDay
+		devstick = map[int]*DevTick{}
+		devs.ticks[tick] = devstick
 	}
-	dd, exists := devsDay[author]
+	dd, exists := devstick[author]
 	if !exists {
-		dd = &DevDay{Languages: map[string]items.LineStats{}}
-		devsDay[author] = dd
+		dd = &DevTick{Languages: map[string]items.LineStats{}}
+		devstick[author] = dd
 	}
 	dd.Commits++
 	if deps[core.DependencyIsMerge].(bool) {
@@ -170,7 +170,7 @@ func (devs *DevsAnalysis) Consume(deps map[string]interface{}) (map[string]inter
 // Finalize returns the result of the analysis. Further Consume() calls are not expected.
 func (devs *DevsAnalysis) Finalize() interface{} {
 	return DevsResult{
-		Days:               devs.days,
+		Ticks:              devs.ticks,
 		reversedPeopleDict: devs.reversedPeopleDict,
 	}
 }
@@ -198,16 +198,16 @@ func (devs *DevsAnalysis) Deserialize(pbmessage []byte) (interface{}, error) {
 	if err != nil {
 		return nil, err
 	}
-	days := map[int]map[int]*DevDay{}
-	for day, dd := range message.Days {
-		rdd := map[int]*DevDay{}
-		days[int(day)] = rdd
+	ticks := map[int]map[int]*DevTick{}
+	for tick, dd := range message.Ticks {
+		rdd := map[int]*DevTick{}
+		ticks[int(tick)] = rdd
 		for dev, stats := range dd.Devs {
 			if dev == -1 {
 				dev = identity.AuthorMissing
 			}
 			languages := map[string]items.LineStats{}
-			rdd[int(dev)] = &DevDay{
+			rdd[int(dev)] = &DevTick{
 				Commits: int(stats.Commits),
 				LineStats: items.LineStats{
 					Added:   int(stats.Stats.Added),
@@ -226,7 +226,7 @@ func (devs *DevsAnalysis) Deserialize(pbmessage []byte) (interface{}, error) {
 		}
 	}
 	result := DevsResult{
-		Days:               days,
+		Ticks:              ticks,
 		reversedPeopleDict: message.DevIndex,
 	}
 	return result, nil
@@ -269,13 +269,13 @@ func (devs *DevsAnalysis) MergeResults(r1, r2 interface{}, c1, c2 *core.CommonAn
 			invDevIndex2[pair.Index2-1] = i
 		}
 	}
-	newDays := map[int]map[int]*DevDay{}
-	merged.Days = newDays
-	for day, dd := range cr1.Days {
-		newdd, exists := newDays[day]
+	newticks := map[int]map[int]*DevTick{}
+	merged.Ticks = newticks
+	for tick, dd := range cr1.Ticks {
+		newdd, exists := newticks[tick]
 		if !exists {
-			newdd = map[int]*DevDay{}
-			newDays[day] = newdd
+			newdd = map[int]*DevTick{}
+			newticks[tick] = newdd
 		}
 		for dev, stats := range dd {
 			newdev := dev
@@ -284,7 +284,7 @@ func (devs *DevsAnalysis) MergeResults(r1, r2 interface{}, c1, c2 *core.CommonAn
 			}
 			newstats, exists := newdd[newdev]
 			if !exists {
-				newstats = &DevDay{Languages: map[string]items.LineStats{}}
+				newstats = &DevTick{Languages: map[string]items.LineStats{}}
 				newdd[newdev] = newstats
 			}
 			newstats.Commits += stats.Commits
@@ -301,11 +301,11 @@ func (devs *DevsAnalysis) MergeResults(r1, r2 interface{}, c1, c2 *core.CommonAn
 			}
 		}
 	}
-	for day, dd := range cr2.Days {
-		newdd, exists := newDays[day]
+	for tick, dd := range cr2.Ticks {
+		newdd, exists := newticks[tick]
 		if !exists {
-			newdd = map[int]*DevDay{}
-			newDays[day] = newdd
+			newdd = map[int]*DevTick{}
+			newticks[tick] = newdd
 		}
 		for dev, stats := range dd {
 			newdev := dev
@@ -314,7 +314,7 @@ func (devs *DevsAnalysis) MergeResults(r1, r2 interface{}, c1, c2 *core.CommonAn
 			}
 			newstats, exists := newdd[newdev]
 			if !exists {
-				newstats = &DevDay{Languages: map[string]items.LineStats{}}
+				newstats = &DevTick{Languages: map[string]items.LineStats{}}
 				newdd[newdev] = newstats
 			}
 			newstats.Commits += stats.Commits
@@ -335,30 +335,30 @@ func (devs *DevsAnalysis) MergeResults(r1, r2 interface{}, c1, c2 *core.CommonAn
 }
 
 func (devs *DevsAnalysis) serializeText(result *DevsResult, writer io.Writer) {
-	fmt.Fprintln(writer, "  days:")
-	days := make([]int, len(result.Days))
+	fmt.Fprintln(writer, "  ticks:")
+	ticks := make([]int, len(result.Ticks))
 	{
 		i := 0
-		for day := range result.Days {
-			days[i] = day
+		for tick := range result.Ticks {
+			ticks[i] = tick
 			i++
 		}
 	}
-	sort.Ints(days)
-	for _, day := range days {
-		fmt.Fprintf(writer, "    %d:\n", day)
-		rday := result.Days[day]
-		devseq := make([]int, len(rday))
+	sort.Ints(ticks)
+	for _, tick := range ticks {
+		fmt.Fprintf(writer, "    %d:\n", tick)
+		rtick := result.Ticks[tick]
+		devseq := make([]int, len(rtick))
 		{
 			i := 0
-			for dev := range rday {
+			for dev := range rtick {
 				devseq[i] = dev
 				i++
 			}
 		}
 		sort.Ints(devseq)
 		for _, dev := range devseq {
-			stats := rday[dev]
+			stats := rtick[dev]
 			if dev == identity.AuthorMissing {
 				dev = -1
 			}
@@ -385,17 +385,17 @@ func (devs *DevsAnalysis) serializeText(result *DevsResult, writer io.Writer) {
 func (devs *DevsAnalysis) serializeBinary(result *DevsResult, writer io.Writer) error {
 	message := pb.DevsAnalysisResults{}
 	message.DevIndex = result.reversedPeopleDict
-	message.Days = map[int32]*pb.DayDevs{}
-	for day, devs := range result.Days {
-		dd := &pb.DayDevs{}
-		message.Days[int32(day)] = dd
-		dd.Devs = map[int32]*pb.DevDay{}
+	message.Ticks = map[int32]*pb.TickDevs{}
+	for tick, devs := range result.Ticks {
+		dd := &pb.TickDevs{}
+		message.Ticks[int32(tick)] = dd
+		dd.Devs = map[int32]*pb.DevTick{}
 		for dev, stats := range devs {
 			if dev == identity.AuthorMissing {
 				dev = -1
 			}
 			languages := map[string]*pb.LineStats{}
-			dd.Devs[int32(dev)] = &pb.DevDay{
+			dd.Devs[int32(dev)] = &pb.DevTick{
 				Commits: int32(stats.Commits),
 				Stats: &pb.LineStats{
 					Added:   int32(stats.Added),

+ 62 - 62
leaves/devs_test.go

@@ -31,7 +31,7 @@ func TestDevsMeta(t *testing.T) {
 	assert.Equal(t, len(d.Requires()), 5)
 	assert.Equal(t, d.Requires()[0], identity.DependencyAuthor)
 	assert.Equal(t, d.Requires()[1], items.DependencyTreeChanges)
-	assert.Equal(t, d.Requires()[2], items.DependencyDay)
+	assert.Equal(t, d.Requires()[2], items.DependencyTick)
 	assert.Equal(t, d.Requires()[3], items.DependencyLanguages)
 	assert.Equal(t, d.Requires()[4], items.DependencyLineStats)
 	assert.Equal(t, d.Flag(), "devs")
@@ -68,7 +68,7 @@ func TestDevsConfigure(t *testing.T) {
 
 func TestDevsInitialize(t *testing.T) {
 	d := fixtureDevs()
-	assert.NotNil(t, d.days)
+	assert.NotNil(t, d.ticks)
 }
 
 func TestDevsConsumeFinalize(t *testing.T) {
@@ -77,7 +77,7 @@ func TestDevsConsumeFinalize(t *testing.T) {
 
 	// stage 1
 	deps[identity.DependencyAuthor] = 0
-	deps[items.DependencyDay] = 0
+	deps[items.DependencyTick] = 0
 	cache := map[plumbing.Hash]*items.CachedBlob{}
 	AddHash(t, cache, "291286b4ac41952cbd1389fda66420ec03c1a9fe")
 	AddHash(t, cache, "c29112dbd697ad9b401333b80c18a63951bc18d9")
@@ -148,8 +148,8 @@ func TestDevsConsumeFinalize(t *testing.T) {
 	result, err = devs.Consume(deps)
 	assert.Nil(t, result)
 	assert.Nil(t, err)
-	assert.Len(t, devs.days, 1)
-	day := devs.days[0]
+	assert.Len(t, devs.ticks, 1)
+	day := devs.ticks[0]
 	assert.Len(t, day, 1)
 	dev := day[0]
 	assert.Equal(t, dev.Commits, 1)
@@ -167,8 +167,8 @@ func TestDevsConsumeFinalize(t *testing.T) {
 	result, err = devs.Consume(deps)
 	assert.Nil(t, result)
 	assert.Nil(t, err)
-	assert.Len(t, devs.days, 1)
-	day = devs.days[0]
+	assert.Len(t, devs.ticks, 1)
+	day = devs.ticks[0]
 	assert.Len(t, day, 1)
 	dev = day[0]
 	assert.Equal(t, dev.Commits, 2)
@@ -187,8 +187,8 @@ func TestDevsConsumeFinalize(t *testing.T) {
 	result, err = devs.Consume(deps)
 	assert.Nil(t, result)
 	assert.Nil(t, err)
-	assert.Len(t, devs.days, 1)
-	day = devs.days[0]
+	assert.Len(t, devs.ticks, 1)
+	day = devs.ticks[0]
 	assert.Len(t, day, 2)
 	for i := 0; i < 2; i++ {
 		dev = day[i]
@@ -208,8 +208,8 @@ func TestDevsConsumeFinalize(t *testing.T) {
 	result, err = devs.Consume(deps)
 	assert.Nil(t, result)
 	assert.Nil(t, err)
-	assert.Len(t, devs.days, 1)
-	day = devs.days[0]
+	assert.Len(t, devs.ticks, 1)
+	day = devs.ticks[0]
 	assert.Len(t, day, 2)
 	dev = day[0]
 	assert.Equal(t, dev.Commits, 2)
@@ -228,12 +228,12 @@ func TestDevsConsumeFinalize(t *testing.T) {
 	assert.Equal(t, dev.Languages["Go"].Removed, 9*2)
 	assert.Equal(t, dev.Languages["Go"].Changed, 67*2)
 
-	deps[items.DependencyDay] = 1
+	deps[items.DependencyTick] = 1
 	result, err = devs.Consume(deps)
 	assert.Nil(t, result)
 	assert.Nil(t, err)
-	assert.Len(t, devs.days, 2)
-	day = devs.days[0]
+	assert.Len(t, devs.ticks, 2)
+	day = devs.ticks[0]
 	assert.Len(t, day, 2)
 	dev = day[0]
 	assert.Equal(t, dev.Commits, 2)
@@ -251,7 +251,7 @@ func TestDevsConsumeFinalize(t *testing.T) {
 	assert.Equal(t, dev.Languages["Go"].Added, 847*2)
 	assert.Equal(t, dev.Languages["Go"].Removed, 9*2)
 	assert.Equal(t, dev.Languages["Go"].Changed, 67*2)
-	day = devs.days[1]
+	day = devs.ticks[1]
 	assert.Len(t, day, 1)
 	dev = day[1]
 	assert.Equal(t, dev.Commits, 1)
@@ -269,10 +269,10 @@ func ls(added, removed, changed int) items.LineStats {
 
 func TestDevsFinalize(t *testing.T) {
 	devs := fixtureDevs()
-	devs.days[1] = map[int]*DevDay{}
-	devs.days[1][1] = &DevDay{10, ls(20, 30, 40), nil}
+	devs.ticks[1] = map[int]*DevTick{}
+	devs.ticks[1][1] = &DevTick{10, ls(20, 30, 40), nil}
 	x := devs.Finalize().(DevsResult)
-	assert.Equal(t, x.Days, devs.days)
+	assert.Equal(t, x.Ticks, devs.ticks)
 	assert.Equal(t, x.reversedPeopleDict, devs.reversedPeopleDict)
 }
 
@@ -284,18 +284,18 @@ func TestDevsFork(t *testing.T) {
 
 func TestDevsSerialize(t *testing.T) {
 	devs := fixtureDevs()
-	devs.days[1] = map[int]*DevDay{}
-	devs.days[1][0] = &DevDay{10, ls(20, 30, 40), map[string]items.LineStats{"Go": ls(2, 3, 4)}}
-	devs.days[1][1] = &DevDay{1, ls(2, 3, 4), map[string]items.LineStats{"Go": ls(25, 35, 45)}}
-	devs.days[10] = map[int]*DevDay{}
-	devs.days[10][0] = &DevDay{11, ls(21, 31, 41), map[string]items.LineStats{"": ls(12, 13, 14)}}
-	devs.days[10][identity.AuthorMissing] = &DevDay{
+	devs.ticks[1] = map[int]*DevTick{}
+	devs.ticks[1][0] = &DevTick{10, ls(20, 30, 40), map[string]items.LineStats{"Go": ls(2, 3, 4)}}
+	devs.ticks[1][1] = &DevTick{1, ls(2, 3, 4), map[string]items.LineStats{"Go": ls(25, 35, 45)}}
+	devs.ticks[10] = map[int]*DevTick{}
+	devs.ticks[10][0] = &DevTick{11, ls(21, 31, 41), map[string]items.LineStats{"": ls(12, 13, 14)}}
+	devs.ticks[10][identity.AuthorMissing] = &DevTick{
 		100, ls(200, 300, 400), map[string]items.LineStats{"Go": ls(32, 33, 34)}}
 	res := devs.Finalize().(DevsResult)
 	buffer := &bytes.Buffer{}
 	err := devs.Serialize(res, false, buffer)
 	assert.Nil(t, err)
-	assert.Equal(t, `  days:
+	assert.Equal(t, `  ticks:
     1:
       0: [10, 20, 30, 40, {Go: [2, 3, 4]}]
       1: [1, 2, 3, 4, {Go: [25, 35, 45]}]
@@ -313,31 +313,31 @@ func TestDevsSerialize(t *testing.T) {
 	msg := pb.DevsAnalysisResults{}
 	assert.Nil(t, proto.Unmarshal(buffer.Bytes(), &msg))
 	assert.Equal(t, msg.DevIndex, devs.reversedPeopleDict)
-	assert.Len(t, msg.Days, 2)
-	assert.Len(t, msg.Days[1].Devs, 2)
-	assert.Equal(t, msg.Days[1].Devs[0], &pb.DevDay{
+	assert.Len(t, msg.Ticks, 2)
+	assert.Len(t, msg.Ticks[1].Devs, 2)
+	assert.Equal(t, msg.Ticks[1].Devs[0], &pb.DevTick{
 		Commits: 10, Stats: &pb.LineStats{Added: 20, Removed: 30, Changed: 40},
 		Languages: map[string]*pb.LineStats{"Go": {Added: 2, Removed: 3, Changed: 4}}})
-	assert.Equal(t, msg.Days[1].Devs[1], &pb.DevDay{
+	assert.Equal(t, msg.Ticks[1].Devs[1], &pb.DevTick{
 		Commits: 1, Stats: &pb.LineStats{Added: 2, Removed: 3, Changed: 4},
 		Languages: map[string]*pb.LineStats{"Go": {Added: 25, Removed: 35, Changed: 45}}})
-	assert.Len(t, msg.Days[10].Devs, 2)
-	assert.Equal(t, msg.Days[10].Devs[0], &pb.DevDay{
+	assert.Len(t, msg.Ticks[10].Devs, 2)
+	assert.Equal(t, msg.Ticks[10].Devs[0], &pb.DevTick{
 		Commits: 11, Stats: &pb.LineStats{Added: 21, Removed: 31, Changed: 41},
 		Languages: map[string]*pb.LineStats{"": {Added: 12, Removed: 13, Changed: 14}}})
-	assert.Equal(t, msg.Days[10].Devs[-1], &pb.DevDay{
+	assert.Equal(t, msg.Ticks[10].Devs[-1], &pb.DevTick{
 		Commits: 100, Stats: &pb.LineStats{Added: 200, Removed: 300, Changed: 400},
 		Languages: map[string]*pb.LineStats{"Go": {Added: 32, Removed: 33, Changed: 34}}})
 }
 
 func TestDevsDeserialize(t *testing.T) {
 	devs := fixtureDevs()
-	devs.days[1] = map[int]*DevDay{}
-	devs.days[1][0] = &DevDay{10, ls(20, 30, 40), map[string]items.LineStats{"Go": ls(12, 13, 14)}}
-	devs.days[1][1] = &DevDay{1, ls(2, 3, 4), map[string]items.LineStats{"Go": ls(22, 23, 24)}}
-	devs.days[10] = map[int]*DevDay{}
-	devs.days[10][0] = &DevDay{11, ls(21, 31, 41), map[string]items.LineStats{"Go": ls(32, 33, 34)}}
-	devs.days[10][identity.AuthorMissing] = &DevDay{
+	devs.ticks[1] = map[int]*DevTick{}
+	devs.ticks[1][0] = &DevTick{10, ls(20, 30, 40), map[string]items.LineStats{"Go": ls(12, 13, 14)}}
+	devs.ticks[1][1] = &DevTick{1, ls(2, 3, 4), map[string]items.LineStats{"Go": ls(22, 23, 24)}}
+	devs.ticks[10] = map[int]*DevTick{}
+	devs.ticks[10][0] = &DevTick{11, ls(21, 31, 41), map[string]items.LineStats{"Go": ls(32, 33, 34)}}
+	devs.ticks[10][identity.AuthorMissing] = &DevTick{
 		100, ls(200, 300, 400), map[string]items.LineStats{"Go": ls(42, 43, 44)}}
 	res := devs.Finalize().(DevsResult)
 	buffer := &bytes.Buffer{}
@@ -353,51 +353,51 @@ func TestDevsMergeResults(t *testing.T) {
 	people1 := [...]string{"1@srcd", "2@srcd"}
 	people2 := [...]string{"3@srcd", "1@srcd"}
 	r1 := DevsResult{
-		Days:               map[int]map[int]*DevDay{},
+		Ticks:              map[int]map[int]*DevTick{},
 		reversedPeopleDict: people1[:],
 	}
-	r1.Days[1] = map[int]*DevDay{}
-	r1.Days[1][0] = &DevDay{10, ls(20, 30, 40), map[string]items.LineStats{"Go": ls(12, 13, 14)}}
-	r1.Days[1][1] = &DevDay{1, ls(2, 3, 4), map[string]items.LineStats{"Go": ls(22, 23, 24)}}
-	r1.Days[10] = map[int]*DevDay{}
-	r1.Days[10][0] = &DevDay{11, ls(21, 31, 41), nil}
-	r1.Days[10][identity.AuthorMissing] = &DevDay{
+	r1.Ticks[1] = map[int]*DevTick{}
+	r1.Ticks[1][0] = &DevTick{10, ls(20, 30, 40), map[string]items.LineStats{"Go": ls(12, 13, 14)}}
+	r1.Ticks[1][1] = &DevTick{1, ls(2, 3, 4), map[string]items.LineStats{"Go": ls(22, 23, 24)}}
+	r1.Ticks[10] = map[int]*DevTick{}
+	r1.Ticks[10][0] = &DevTick{11, ls(21, 31, 41), nil}
+	r1.Ticks[10][identity.AuthorMissing] = &DevTick{
 		100, ls(200, 300, 400), map[string]items.LineStats{"Go": ls(32, 33, 34)}}
-	r1.Days[11] = map[int]*DevDay{}
-	r1.Days[11][1] = &DevDay{10, ls(20, 30, 40), map[string]items.LineStats{"Go": ls(42, 43, 44)}}
+	r1.Ticks[11] = map[int]*DevTick{}
+	r1.Ticks[11][1] = &DevTick{10, ls(20, 30, 40), map[string]items.LineStats{"Go": ls(42, 43, 44)}}
 	r2 := DevsResult{
-		Days:               map[int]map[int]*DevDay{},
+		Ticks:              map[int]map[int]*DevTick{},
 		reversedPeopleDict: people2[:],
 	}
-	r2.Days[1] = map[int]*DevDay{}
-	r2.Days[1][0] = &DevDay{10, ls(20, 30, 40), map[string]items.LineStats{"Go": ls(12, 13, 14)}}
-	r2.Days[1][1] = &DevDay{1, ls(2, 3, 4), map[string]items.LineStats{"Go": ls(22, 23, 24)}}
-	r2.Days[2] = map[int]*DevDay{}
-	r2.Days[2][0] = &DevDay{11, ls(21, 31, 41), map[string]items.LineStats{"Go": ls(32, 33, 34)}}
-	r2.Days[2][identity.AuthorMissing] = &DevDay{
+	r2.Ticks[1] = map[int]*DevTick{}
+	r2.Ticks[1][0] = &DevTick{10, ls(20, 30, 40), map[string]items.LineStats{"Go": ls(12, 13, 14)}}
+	r2.Ticks[1][1] = &DevTick{1, ls(2, 3, 4), map[string]items.LineStats{"Go": ls(22, 23, 24)}}
+	r2.Ticks[2] = map[int]*DevTick{}
+	r2.Ticks[2][0] = &DevTick{11, ls(21, 31, 41), map[string]items.LineStats{"Go": ls(32, 33, 34)}}
+	r2.Ticks[2][identity.AuthorMissing] = &DevTick{
 		100, ls(200, 300, 400), map[string]items.LineStats{"Go": ls(42, 43, 44)}}
-	r2.Days[10] = map[int]*DevDay{}
-	r2.Days[10][0] = &DevDay{11, ls(21, 31, 41), map[string]items.LineStats{"Go": ls(52, 53, 54)}}
-	r2.Days[10][identity.AuthorMissing] = &DevDay{
+	r2.Ticks[10] = map[int]*DevTick{}
+	r2.Ticks[10][0] = &DevTick{11, ls(21, 31, 41), map[string]items.LineStats{"Go": ls(52, 53, 54)}}
+	r2.Ticks[10][identity.AuthorMissing] = &DevTick{
 		100, ls(200, 300, 400), map[string]items.LineStats{"Go": ls(62, 63, 64)}}
 
 	devs := fixtureDevs()
 	rm := devs.MergeResults(r1, r2, nil, nil).(DevsResult)
 	peoplerm := [...]string{"1@srcd", "2@srcd", "3@srcd"}
 	assert.Equal(t, rm.reversedPeopleDict, peoplerm[:])
-	assert.Len(t, rm.Days, 4)
-	assert.Equal(t, rm.Days[11], map[int]*DevDay{
+	assert.Len(t, rm.Ticks, 4)
+	assert.Equal(t, rm.Ticks[11], map[int]*DevTick{
 		1: {10, ls(20, 30, 40), map[string]items.LineStats{"Go": ls(42, 43, 44)}}})
-	assert.Equal(t, rm.Days[2], map[int]*DevDay{
+	assert.Equal(t, rm.Ticks[2], map[int]*DevTick{
 		identity.AuthorMissing: {100, ls(200, 300, 400), map[string]items.LineStats{"Go": ls(42, 43, 44)}},
 		2:                      {11, ls(21, 31, 41), map[string]items.LineStats{"Go": ls(32, 33, 34)}},
 	})
-	assert.Equal(t, rm.Days[1], map[int]*DevDay{
+	assert.Equal(t, rm.Ticks[1], map[int]*DevTick{
 		0: {11, ls(22, 33, 44), map[string]items.LineStats{"Go": ls(34, 36, 38)}},
 		1: {1, ls(2, 3, 4), map[string]items.LineStats{"Go": ls(22, 23, 24)}},
 		2: {10, ls(20, 30, 40), map[string]items.LineStats{"Go": ls(12, 13, 14)}},
 	})
-	assert.Equal(t, rm.Days[10], map[int]*DevDay{
+	assert.Equal(t, rm.Ticks[10], map[int]*DevTick{
 		0: {11, ls(21, 31, 41), map[string]items.LineStats{}},
 		2: {11, ls(21, 31, 41), map[string]items.LineStats{"Go": ls(52, 53, 54)}},
 		identity.AuthorMissing: {