Browse Source

Add tests for BlobCache and BurndownAnalysis

Vadim Markovtsev 7 years ago
parent
commit
a34ee7e9ec
8 changed files with 346 additions and 24 deletions
  1. 22 1
      blob_cache_test.go
  2. 10 8
      burndown.go
  3. 278 1
      burndown_test.go
  4. 6 6
      couples.go
  5. 2 2
      pipeline.go
  6. 22 0
      pipeline_test.go
  7. 1 1
      stdout/utils.go
  8. 5 5
      uast.go

+ 22 - 1
blob_cache_test.go

@@ -17,9 +17,17 @@ func fixtureBlobCache() *BlobCache {
 	return cache
 }
 
-func TestBlobCacheInitialize(t *testing.T) {
+func TestBlobCacheConfigureInitialize(t *testing.T) {
 	cache := fixtureBlobCache()
 	assert.Equal(t, testRepository, cache.repository)
+	assert.False(t, cache.IgnoreMissingSubmodules)
+	facts := map[string]interface{}{}
+	facts[ConfigBlobCacheIgnoreMissingSubmodules] = true
+	cache.Configure(facts)
+	assert.True(t, cache.IgnoreMissingSubmodules)
+	facts = map[string]interface{}{}
+	cache.Configure(facts)
+	assert.True(t, cache.IgnoreMissingSubmodules)
 }
 
 func TestBlobCacheMetadata(t *testing.T) {
@@ -30,6 +38,19 @@ func TestBlobCacheMetadata(t *testing.T) {
 	assert.Equal(t, len(cache.Requires()), 1)
 	changes := &TreeDiff{}
 	assert.Equal(t, cache.Requires()[0], changes.Provides()[0])
+	opts := cache.ListConfigurationOptions()
+	assert.Len(t, opts, 1)
+	assert.Equal(t, opts[0].Name, ConfigBlobCacheIgnoreMissingSubmodules)
+}
+
+func TestBlobCacheRegistration(t *testing.T) {
+	tp, exists := Registry.registered[(&BlobCache{}).Name()]
+	assert.True(t, exists)
+	assert.Equal(t, tp.Elem().Name(), "BlobCache")
+	tps, exists := Registry.provided[(&BlobCache{}).Provides()[0]]
+	assert.True(t, exists)
+	assert.Len(t, tps, 1)
+	assert.Equal(t, tps[0].Elem().Name(), "BlobCache")
 }
 
 func TestBlobCacheConsumeModification(t *testing.T) {

+ 10 - 8
burndown.go

@@ -9,14 +9,14 @@ import (
 	"sort"
 	"unicode/utf8"
 
+	"github.com/gogo/protobuf/proto"
 	"github.com/sergi/go-diff/diffmatchpatch"
 	"gopkg.in/src-d/go-git.v4"
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
 	"gopkg.in/src-d/go-git.v4/utils/merkletrie"
-	"gopkg.in/src-d/hercules.v3/stdout"
 	"gopkg.in/src-d/hercules.v3/pb"
-	"github.com/gogo/protobuf/proto"
+	"gopkg.in/src-d/hercules.v3/stdout"
 )
 
 // BurndownAnalyser allows to gather the line burndown statistics for a Git repository.
@@ -137,11 +137,13 @@ func (analyser *BurndownAnalysis) Configure(facts map[string]interface{}) {
 	if val, exists := facts[ConfigBurndownTrackFiles].(bool); exists {
 		analyser.TrackFiles = val
 	}
-	if people, _ := facts[ConfigBurndownTrackPeople].(bool); people {
+	if people, exists := facts[ConfigBurndownTrackPeople].(bool); people {
 		if val, exists := facts[FactIdentityDetectorPeopleCount].(int); exists {
 			analyser.PeopleNumber = val
 			analyser.reversedPeopleDict = facts[FactIdentityDetectorReversedPeopleDict].([]string)
 		}
+	} else if exists {
+		analyser.PeopleNumber = 0
 	}
 	if val, exists := facts[ConfigBurndownDebug].(bool); exists {
 		analyser.Debug = val
@@ -273,7 +275,7 @@ func (analyser *BurndownAnalysis) serializeText(result *BurndownResult, writer i
 	if len(result.PeopleHistories) > 0 {
 		fmt.Fprintln(writer, "  people_sequence:")
 		for key := range result.PeopleHistories {
-			fmt.Fprintln(writer, "    - " + stdout.SafeString(analyser.reversedPeopleDict[key]))
+			fmt.Fprintln(writer, "    - "+stdout.SafeString(analyser.reversedPeopleDict[key]))
 		}
 		fmt.Fprintln(writer, "  people:")
 		for key, val := range result.PeopleHistories {
@@ -287,8 +289,8 @@ func (analyser *BurndownAnalysis) serializeText(result *BurndownResult, writer i
 func (analyser *BurndownAnalysis) serializeBinary(result *BurndownResult, writer io.Writer) error {
 	message := pb.BurndownAnalysisResults{
 		Granularity: int32(analyser.Granularity),
-		Sampling: int32(analyser.Sampling),
-		Project: pb.ToBurndownSparseMatrix(result.GlobalHistory, "project"),
+		Sampling:    int32(analyser.Sampling),
+		Project:     pb.ToBurndownSparseMatrix(result.GlobalHistory, "project"),
 	}
 	if len(result.FileHistories) > 0 {
 		message.Files = make([]*pb.BurndownSparseMatrix, len(result.FileHistories))
@@ -303,7 +305,7 @@ func (analyser *BurndownAnalysis) serializeBinary(result *BurndownResult, writer
 
 	if len(result.PeopleHistories) > 0 {
 		message.People = make(
-		  []*pb.BurndownSparseMatrix, len(result.PeopleHistories))
+			[]*pb.BurndownSparseMatrix, len(result.PeopleHistories))
 		for key, val := range result.PeopleHistories {
 			message.People[key] = pb.ToBurndownSparseMatrix(val, analyser.reversedPeopleDict[key])
 		}
@@ -313,7 +315,7 @@ func (analyser *BurndownAnalysis) serializeBinary(result *BurndownResult, writer
 	if err != nil {
 		return err
 	}
-  writer.Write(serialized)
+	writer.Write(serialized)
 	return nil
 }
 

+ 278 - 1
burndown_test.go

@@ -1,12 +1,15 @@
 package hercules
 
 import (
+	"bytes"
+	"io"
 	"testing"
 
 	"github.com/stretchr/testify/assert"
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
-	"io"
+	"github.com/gogo/protobuf/proto"
+	"gopkg.in/src-d/hercules.v3/pb"
 )
 
 func TestBurndownMeta(t *testing.T) {
@@ -17,6 +20,60 @@ func TestBurndownMeta(t *testing.T) {
 	for _, name := range required {
 		assert.Contains(t, burndown.Requires(), name)
 	}
+	opts := burndown.ListConfigurationOptions()
+	matches := 0
+	for _, opt := range opts {
+		switch opt.Name {
+		case ConfigBurndownGranularity:
+		case ConfigBurndownSampling:
+		case ConfigBurndownTrackFiles:
+		case ConfigBurndownTrackPeople:
+		case ConfigBurndownDebug:
+			matches++
+		}
+	}
+	assert.Len(t, opts, matches)
+	assert.Equal(t, burndown.Flag(), "burndown")
+}
+
+func TestBurndownConfigure(t *testing.T) {
+	burndown := BurndownAnalysis{}
+	facts := map[string]interface{}{}
+	facts[ConfigBurndownGranularity] = 100
+	facts[ConfigBurndownSampling] = 200
+	facts[ConfigBurndownTrackFiles] = true
+	facts[ConfigBurndownTrackPeople] = true
+	facts[ConfigBurndownDebug] = true
+	facts[FactIdentityDetectorPeopleCount] = 5
+	facts[FactIdentityDetectorReversedPeopleDict] = burndown.Requires()
+	burndown.Configure(facts)
+	assert.Equal(t, burndown.Granularity, 100)
+	assert.Equal(t, burndown.Sampling, 200)
+	assert.Equal(t, burndown.TrackFiles, true)
+	assert.Equal(t, burndown.PeopleNumber, 5)
+	assert.Equal(t, burndown.Debug, true)
+	assert.Equal(t, burndown.reversedPeopleDict, burndown.Requires())
+	facts[ConfigBurndownTrackPeople] = false
+	facts[FactIdentityDetectorPeopleCount] = 50
+	burndown.Configure(facts)
+	assert.Equal(t, burndown.PeopleNumber, 0)
+	facts = map[string]interface{}{}
+	burndown.Configure(facts)
+	assert.Equal(t, burndown.Granularity, 100)
+	assert.Equal(t, burndown.Sampling, 200)
+	assert.Equal(t, burndown.TrackFiles, true)
+	assert.Equal(t, burndown.PeopleNumber, 0)
+	assert.Equal(t, burndown.Debug, true)
+	assert.Equal(t, burndown.reversedPeopleDict, burndown.Requires())
+}
+
+func TestBurndownRegistration(t *testing.T) {
+	tp, exists := Registry.registered[(&BurndownAnalysis{}).Name()]
+	assert.True(t, exists)
+	assert.Equal(t, tp.Elem().Name(), "BurndownAnalysis")
+	tp, exists = Registry.flags[(&BurndownAnalysis{}).Flag()]
+	assert.True(t, exists)
+	assert.Equal(t, tp.Elem().Name(), "BurndownAnalysis")
 }
 
 func TestBurndownConsumeFinalize(t *testing.T) {
@@ -240,6 +297,226 @@ func TestBurndownConsumeFinalize(t *testing.T) {
 	}
 }
 
+func TestBurndownAnalysisSerialize(t *testing.T) {
+	burndown := BurndownAnalysis{
+		Granularity:  30,
+		Sampling:     30,
+		PeopleNumber: 2,
+		TrackFiles:   true,
+	}
+	burndown.Initialize(testRepository)
+	deps := map[string]interface{}{}
+	// stage 1
+	deps["author"] = 0
+	deps["day"] = 0
+	cache := map[plumbing.Hash]*object.Blob{}
+	hash := plumbing.NewHash("291286b4ac41952cbd1389fda66420ec03c1a9fe")
+	cache[hash], _ = testRepository.BlobObject(hash)
+	hash = plumbing.NewHash("c29112dbd697ad9b401333b80c18a63951bc18d9")
+	cache[hash], _ = testRepository.BlobObject(hash)
+	hash = plumbing.NewHash("baa64828831d174f40140e4b3cfa77d1e917a2c1")
+	cache[hash], _ = testRepository.BlobObject(hash)
+	hash = plumbing.NewHash("dc248ba2b22048cc730c571a748e8ffcf7085ab9")
+	cache[hash], _ = testRepository.BlobObject(hash)
+	deps["blob_cache"] = cache
+	changes := make(object.Changes, 3)
+	treeFrom, _ := testRepository.TreeObject(plumbing.NewHash(
+		"a1eb2ea76eb7f9bfbde9b243861474421000eb96"))
+	treeTo, _ := testRepository.TreeObject(plumbing.NewHash(
+		"994eac1cd07235bb9815e547a75c84265dea00f5"))
+	changes[0] = &object.Change{From: object.ChangeEntry{
+		Name: "analyser.go",
+		Tree: treeFrom,
+		TreeEntry: object.TreeEntry{
+			Name: "analyser.go",
+			Mode: 0100644,
+			Hash: plumbing.NewHash("dc248ba2b22048cc730c571a748e8ffcf7085ab9"),
+		},
+	}, To: object.ChangeEntry{
+		Name: "analyser.go",
+		Tree: treeTo,
+		TreeEntry: object.TreeEntry{
+			Name: "analyser.go",
+			Mode: 0100644,
+			Hash: plumbing.NewHash("baa64828831d174f40140e4b3cfa77d1e917a2c1"),
+		},
+	}}
+	changes[1] = &object.Change{From: object.ChangeEntry{}, To: object.ChangeEntry{
+		Name: "cmd/hercules/main.go",
+		Tree: treeTo,
+		TreeEntry: object.TreeEntry{
+			Name: "cmd/hercules/main.go",
+			Mode: 0100644,
+			Hash: plumbing.NewHash("c29112dbd697ad9b401333b80c18a63951bc18d9"),
+		},
+	},
+	}
+	changes[2] = &object.Change{From: object.ChangeEntry{}, To: object.ChangeEntry{
+		Name: ".travis.yml",
+		Tree: treeTo,
+		TreeEntry: object.TreeEntry{
+			Name: ".travis.yml",
+			Mode: 0100644,
+			Hash: plumbing.NewHash("291286b4ac41952cbd1389fda66420ec03c1a9fe"),
+		},
+	},
+	}
+	deps["changes"] = changes
+	fd := fixtureFileDiff()
+	result, _ := fd.Consume(deps)
+	deps["file_diff"] = result["file_diff"]
+	burndown.Consume(deps)
+
+	// stage 2
+	// 2b1ed978194a94edeabbca6de7ff3b5771d4d665
+	deps["author"] = 1
+	deps["day"] = 30
+	cache = map[plumbing.Hash]*object.Blob{}
+	hash = plumbing.NewHash("291286b4ac41952cbd1389fda66420ec03c1a9fe")
+	cache[hash], _ = testRepository.BlobObject(hash)
+	hash = plumbing.NewHash("baa64828831d174f40140e4b3cfa77d1e917a2c1")
+	cache[hash], _ = testRepository.BlobObject(hash)
+	hash = plumbing.NewHash("29c9fafd6a2fae8cd20298c3f60115bc31a4c0f2")
+	cache[hash], _ = testRepository.BlobObject(hash)
+	hash = plumbing.NewHash("c29112dbd697ad9b401333b80c18a63951bc18d9")
+	cache[hash], _ = testRepository.BlobObject(hash)
+	hash = plumbing.NewHash("f7d918ec500e2f925ecde79b51cc007bac27de72")
+	cache[hash], _ = testRepository.BlobObject(hash)
+	deps["blob_cache"] = cache
+	changes = make(object.Changes, 3)
+	treeFrom, _ = testRepository.TreeObject(plumbing.NewHash(
+		"96c6ece9b2f3c7c51b83516400d278dea5605100"))
+	treeTo, _ = testRepository.TreeObject(plumbing.NewHash(
+		"251f2094d7b523d5bcc60e663b6cf38151bf8844"))
+	changes[0] = &object.Change{From: object.ChangeEntry{
+		Name: "analyser.go",
+		Tree: treeFrom,
+		TreeEntry: object.TreeEntry{
+			Name: "analyser.go",
+			Mode: 0100644,
+			Hash: plumbing.NewHash("baa64828831d174f40140e4b3cfa77d1e917a2c1"),
+		},
+	}, To: object.ChangeEntry{
+		Name: "burndown.go",
+		Tree: treeTo,
+		TreeEntry: object.TreeEntry{
+			Name: "burndown.go",
+			Mode: 0100644,
+			Hash: plumbing.NewHash("29c9fafd6a2fae8cd20298c3f60115bc31a4c0f2"),
+		},
+	},
+	}
+	changes[1] = &object.Change{From: object.ChangeEntry{
+		Name: "cmd/hercules/main.go",
+		Tree: treeFrom,
+		TreeEntry: object.TreeEntry{
+			Name: "cmd/hercules/main.go",
+			Mode: 0100644,
+			Hash: plumbing.NewHash("c29112dbd697ad9b401333b80c18a63951bc18d9"),
+		},
+	}, To: object.ChangeEntry{
+		Name: "cmd/hercules/main.go",
+		Tree: treeTo,
+		TreeEntry: object.TreeEntry{
+			Name: "cmd/hercules/main.go",
+			Mode: 0100644,
+			Hash: plumbing.NewHash("f7d918ec500e2f925ecde79b51cc007bac27de72"),
+		},
+	},
+	}
+	changes[2] = &object.Change{From: object.ChangeEntry{
+		Name: ".travis.yml",
+		Tree: treeTo,
+		TreeEntry: object.TreeEntry{
+			Name: ".travis.yml",
+			Mode: 0100644,
+			Hash: plumbing.NewHash("291286b4ac41952cbd1389fda66420ec03c1a9fe"),
+		},
+	}, To: object.ChangeEntry{},
+	}
+	deps["changes"] = changes
+	fd = fixtureFileDiff()
+	result, _ = fd.Consume(deps)
+	deps["file_diff"] = result["file_diff"]
+	burndown.Consume(deps)
+	out := burndown.Finalize().(BurndownResult)
+
+	people := [...]string{"one@srcd", "two@srcd"}
+	burndown.reversedPeopleDict = people[:]
+	buffer := &bytes.Buffer{}
+	burndown.Serialize(out, false, buffer)
+	assert.Equal(t, buffer.String(), `  granularity: 30
+  sampling: 30
+  "project": |-
+    1145    0
+     464  369
+  files:
+    "burndown.go": |-
+      0     0
+      293 250
+    "cmd/hercules/main.go": |-
+      207   0
+      171 119
+  people_sequence:
+    - "one@srcd"
+    - "two@srcd"
+  people:
+    "one@srcd": |-
+      1145    0
+       464    0
+    "two@srcd": |-
+      0     0
+        0 369
+  people_interaction: |-
+    1145    0    0 -681
+     369    0    0    0
+`)
+	buffer = &bytes.Buffer{}
+	burndown.Serialize(out, true, buffer)
+	msg := pb.BurndownAnalysisResults{}
+	proto.Unmarshal(buffer.Bytes(), &msg)
+	assert.Equal(t, msg.Granularity, int32(30))
+	assert.Equal(t, msg.Sampling, int32(30))
+	assert.Equal(t, msg.Project.Name, "project")
+	assert.Equal(t, msg.Project.NumberOfRows, int32(2))
+	assert.Equal(t, msg.Project.NumberOfColumns, int32(2))
+	assert.Len(t, msg.Project.Rows, 2)
+	assert.Len(t, msg.Project.Rows[0].Columns, 1)
+	assert.Equal(t, msg.Project.Rows[0].Columns[0], uint32(1145))
+	assert.Len(t, msg.Project.Rows[1].Columns, 2)
+	assert.Equal(t, msg.Project.Rows[1].Columns[0], uint32(464))
+	assert.Equal(t, msg.Project.Rows[1].Columns[1], uint32(369))
+	assert.Len(t, msg.Files, 2)
+	assert.Equal(t, msg.Files[0].Name, "burndown.go")
+	assert.Equal(t, msg.Files[1].Name, "cmd/hercules/main.go")
+	assert.Len(t, msg.Files[0].Rows, 2)
+	assert.Len(t, msg.Files[0].Rows[0].Columns, 0)
+	assert.Len(t, msg.Files[0].Rows[1].Columns, 2)
+	assert.Equal(t, msg.Files[0].Rows[1].Columns[0], uint32(293))
+	assert.Equal(t, msg.Files[0].Rows[1].Columns[1], uint32(250))
+	assert.Len(t, msg.People, 2)
+	assert.Equal(t, msg.People[0].Name, "one@srcd")
+	assert.Equal(t, msg.People[1].Name, "two@srcd")
+	assert.Len(t, msg.People[0].Rows, 2)
+	assert.Len(t, msg.People[0].Rows[0].Columns, 1)
+	assert.Len(t, msg.People[0].Rows[1].Columns, 1)
+	assert.Equal(t, msg.People[0].Rows[0].Columns[0], uint32(1145))
+	assert.Equal(t, msg.People[0].Rows[1].Columns[0], uint32(464))
+	assert.Len(t, msg.People[1].Rows, 2)
+	assert.Len(t, msg.People[1].Rows[0].Columns, 0)
+	assert.Len(t, msg.People[1].Rows[1].Columns, 2)
+	assert.Equal(t, msg.People[1].Rows[1].Columns[0], uint32(0))
+	assert.Equal(t, msg.People[1].Rows[1].Columns[1], uint32(369))
+	assert.Equal(t, msg.PeopleInteraction.NumberOfRows, int32(2))
+	assert.Equal(t, msg.PeopleInteraction.NumberOfColumns, int32(4))
+	data := [...]int64{1145, -681, 369}
+	assert.Equal(t, msg.PeopleInteraction.Data, data[:])
+	indices := [...]int32{0, 3, 0}
+	assert.Equal(t, msg.PeopleInteraction.Indices, indices[:])
+	indptr := [...]int64{0, 2, 3}
+	assert.Equal(t, msg.PeopleInteraction.Indptr, indptr[:])
+}
+
 type panickingCloser struct {
 }
 

+ 6 - 6
couples.go

@@ -5,12 +5,12 @@ import (
 	"io"
 	"sort"
 
+	"github.com/gogo/protobuf/proto"
 	"gopkg.in/src-d/go-git.v4"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
 	"gopkg.in/src-d/go-git.v4/utils/merkletrie"
-	"gopkg.in/src-d/hercules.v3/stdout"
 	"gopkg.in/src-d/hercules.v3/pb"
-	"github.com/gogo/protobuf/proto"
+	"gopkg.in/src-d/hercules.v3/stdout"
 )
 
 type Couples struct {
@@ -288,15 +288,15 @@ func (couples *Couples) serializeBinary(result *CouplesResult, writer io.Writer)
 	message := pb.CouplesResults{}
 
 	message.FileCouples = &pb.Couples{
-		Index: result.Files,
+		Index:  result.Files,
 		Matrix: pb.MapToCompressedSparseRowMatrix(result.FilesMatrix),
 	}
 	message.DeveloperCouples = &pb.Couples{
-		Index: couples.reversedPeopleDict,
+		Index:  couples.reversedPeopleDict,
 		Matrix: pb.MapToCompressedSparseRowMatrix(result.PeopleMatrix),
 	}
 	message.TouchedFiles = &pb.DeveloperTouchedFiles{
-    Developers: make([]*pb.TouchedFiles, len(couples.reversedPeopleDict)),
+		Developers: make([]*pb.TouchedFiles, len(couples.reversedPeopleDict)),
 	}
 	for key := range couples.reversedPeopleDict {
 		files := result.PeopleFiles[key]
@@ -313,7 +313,7 @@ func (couples *Couples) serializeBinary(result *CouplesResult, writer io.Writer)
 	if err != nil {
 		return err
 	}
-  writer.Write(serialized)
+	writer.Write(serialized)
 	return nil
 }
 

+ 2 - 2
pipeline.go

@@ -90,7 +90,7 @@ type LeafPipelineItem interface {
 type PipelineItemRegistry struct {
 	provided   map[string][]reflect.Type
 	registered map[string]reflect.Type
-	flags map[string]reflect.Type
+	flags      map[string]reflect.Type
 }
 
 // Register adds another PipelineItem to the registry.
@@ -196,7 +196,7 @@ func (registry *PipelineItemRegistry) AddFlags() (map[string]interface{}, map[st
 var Registry = &PipelineItemRegistry{
 	provided:   map[string][]reflect.Type{},
 	registered: map[string]reflect.Type{},
-	flags: map[string]reflect.Type{},
+	flags:      map[string]reflect.Type{},
 }
 
 type wrappedPipelineItem struct {

+ 22 - 0
pipeline_test.go

@@ -39,6 +39,17 @@ func (item *testPipelineItem) Requires() []string {
 func (item *testPipelineItem) Configure(facts map[string]interface{}) {
 }
 
+func (item *testPipelineItem) ListConfigurationOptions() []ConfigurationOption {
+	options := [...]ConfigurationOption{{
+		Name:        "TestOption",
+		Description: "The option description.",
+		Flag:        "test-option",
+		Type:        IntConfigurationOption,
+		Default:     10,
+	}}
+	return options[:]
+}
+
 func (item *testPipelineItem) Initialize(repository *git.Repository) {
 	item.Initialized = repository != nil
 }
@@ -84,6 +95,17 @@ func (item *dependingTestPipelineItem) Requires() []string {
 	return arr[:]
 }
 
+func (item *dependingTestPipelineItem) ListConfigurationOptions() []ConfigurationOption {
+	options := [...]ConfigurationOption{{
+		Name:        "TestOption",
+		Description: "The option description.",
+		Flag:        "test-option",
+		Type:        IntConfigurationOption,
+		Default:     10,
+	}}
+	return options[:]
+}
+
 func (item *dependingTestPipelineItem) Configure(facts map[string]interface{}) {
 }
 

+ 1 - 1
stdout/utils.go

@@ -57,7 +57,7 @@ func PrintMatrix(writer io.Writer, matrix [][]int64, indent int, name string, fi
 				fmt.Fprintf(writer, " %[1]*[2]d", width, val)
 			} else {
 				first = false
-				fmt.Fprintf(writer, "%d%s", val, strings.Repeat(" ", width-len(strconv.FormatInt(val, 10))))
+				fmt.Fprintf(writer, " %d%s", val, strings.Repeat(" ", width-len(strconv.FormatInt(val, 10))))
 			}
 		}
 		fmt.Fprintln(writer)

+ 5 - 5
uast.go

@@ -13,6 +13,8 @@ import (
 	"sync"
 	"time"
 
+	"github.com/gogo/protobuf/proto"
+	"github.com/jeffail/tunny"
 	"gopkg.in/bblfsh/client-go.v1"
 	"gopkg.in/bblfsh/sdk.v1/protocol"
 	"gopkg.in/bblfsh/sdk.v1/uast"
@@ -23,8 +25,6 @@ import (
 	"gopkg.in/src-d/go-git.v4/utils/ioutil"
 	"gopkg.in/src-d/go-git.v4/utils/merkletrie"
 	"gopkg.in/src-d/hercules.v3/pb"
-	"github.com/jeffail/tunny"
-	"github.com/gogo/protobuf/proto"
 )
 
 type UASTExtractor struct {
@@ -361,7 +361,7 @@ type UASTChangesSaver struct {
 	OutputPath string
 
 	repository *git.Repository
-	result [][]UASTChange
+	result     [][]UASTChange
 }
 
 const (
@@ -472,12 +472,12 @@ func (saver *UASTChangesSaver) serializeText(result []*pb.UASTChange, writer io.
 }
 
 func (saver *UASTChangesSaver) serializeBinary(result []*pb.UASTChange, writer io.Writer) error {
-  message := pb.UASTChangesSaverResults{Changes: result}
+	message := pb.UASTChangesSaverResults{Changes: result}
 	serialized, err := proto.Marshal(&message)
 	if err != nil {
 		return err
 	}
-  writer.Write(serialized)
+	writer.Write(serialized)
 	return nil
 }