Browse Source

Merge pull request #76 from vmarkovtsev/master

v4 > master
Vadim Markovtsev 7 years ago
parent
commit
06716c2b39
81 changed files with 3135 additions and 2036 deletions
  1. 10 3
      .travis.yml
  2. 4 4
      Dockerfile
  3. 9 9
      Makefile
  4. 3 3
      README.md
  5. 5 5
      appveyor.yml
  6. 0 70
      changes_xpather_test.go
  7. 2 2
      cmd/hercules/combine.go
  8. 1 1
      cmd/hercules/plugin.template
  9. 2 2
      cmd/hercules/root.go
  10. 2 2
      contrib/_plugin_example/churn_analysis.go
  11. 130 0
      core.go
  12. 0 0
      internal/__init__.py
  13. 5 46
      file.go
  14. 2 13
      file_test.go
  15. 93 0
      internal/core/global_test.go
  16. 7 4
      pipeline.go
  17. 19 124
      pipeline_test.go
  18. 1 1
      registry.go
  19. 3 2
      registry_test.go
  20. 5 3
      dummies.go
  21. 4 4
      dummies_test.go
  22. 44 0
      internal/math.go
  23. 18 0
      internal/math_test.go
  24. 0 0
      internal/pb/__init__.py
  25. 685 0
      internal/pb/pb.pb.go
  26. 0 0
      internal/pb/pb.proto
  27. 1127 0
      internal/pb/pb_pb2.py
  28. 0 0
      internal/pb/utils.go
  29. 11 9
      blob_cache.go
  30. 39 39
      blob_cache_test.go
  31. 6 5
      day.go
  32. 16 15
      day_test.go
  33. 14 6
      diff.go
  34. 58 60
      diff_test.go
  35. 26 25
      identity.go
  36. 22 21
      identity_test.go
  37. 1 1
      mailmap.go
  38. 3 2
      mailmap_test.go
  39. 11 8
      renames.go
  40. 23 21
      renames_test.go
  41. 8 7
      tree_diff.go
  42. 17 16
      tree_diff_test.go
  43. 3 3
      changes_xpather.go
  44. 33 0
      internal/plumbing/uast/changes_xpather_test.go
  45. 15 13
      diff_refiner.go
  46. 35 33
      diff_refiner_test.go
  47. 39 0
      internal/plumbing/uast/test/utils.go
  48. 109 109
      uast.go
  49. 58 51
      uast_test.go
  50. 0 0
      internal/rbtree/rbtree.go
  51. 13 0
      internal/test/fixtures/fixtures.go
  52. 51 0
      internal/test/repository.go
  53. 0 0
      internal/test_data/1.java
  54. 0 0
      internal/test_data/2.java
  55. 0 0
      internal/test_data/blob
  56. 0 0
      internal/test_data/burndown.pb
  57. 0 0
      internal/test_data/couples.pb
  58. 0 0
      internal/test_data/gitmodules
  59. 0 0
      internal/test_data/gitmodules_empty
  60. 0 0
      internal/test_data/identities
  61. 0 0
      internal/test_data/uast1.pb
  62. 0 0
      internal/test_data/uast2.pb
  63. 0 0
      internal/toposort/toposort.go
  64. 0 0
      internal/toposort/toposort_test.go
  65. 6 5
      labours.py
  66. 44 39
      burndown.go
  67. 87 73
      burndown_test.go
  68. 18 15
      comment_sentiment.go
  69. 30 19
      comment_sentiment_test.go
  70. 19 16
      couples.go
  71. 38 28
      couples_test.go
  72. 10 8
      file_history.go
  73. 29 20
      file_history_test.go
  74. 16 13
      shotness.go
  75. 46 35
      shotness_test.go
  76. 0 19
      vendor/github.com/jeffail/tunny/LICENSE
  77. 0 229
      vendor/github.com/jeffail/tunny/README.md
  78. 0 379
      vendor/github.com/jeffail/tunny/tunny.go
  79. BIN
      vendor/github.com/jeffail/tunny/tunny_logo.png
  80. 0 286
      vendor/github.com/jeffail/tunny/tunny_test.go
  81. 0 110
      vendor/github.com/jeffail/tunny/worker.go

+ 10 - 3
.travis.yml

@@ -17,11 +17,15 @@ addons:
     - gcc-6
     - g++-6
 
-go_import_path: gopkg.in/src-d/hercules.v3
+go_import_path: gopkg.in/src-d/hercules.v4
 go:
   - 1.9.x
   - 1.10.x
   - tip
+cache:
+  directories:
+    - $HOME/.cache/pip
+    - $HOME/gopath/src
 
 matrix:
   fast_finish: true
@@ -48,7 +52,9 @@ before_install:
   - pip3 --version
   - pip3 install --user --no-build-isolation -r requirements.txt tensorflow
   - docker run -d --privileged -p 9432:9432 --name bblfshd bblfsh/bblfshd
-  - docker exec -it bblfshd bblfshctl driver install --all
+  - docker exec -it bblfshd bblfshctl driver install python bblfsh/python-driver:latest
+  - docker exec -it bblfshd bblfshctl driver install go bblfsh/go-driver:latest
+  - docker exec -it bblfshd bblfshctl driver install java bblfsh/java-driver:latest
   - curl -L "https://storage.googleapis.com/tensorflow/libtensorflow/libtensorflow-cpu-$(go env GOOS)-x86_64-$TENSORFLOW_VERSION.tar.gz" | sudo tar -C /usr/local -xz
   - sudo ldconfig
 install:
@@ -57,7 +63,8 @@ script:
   - set -e
   - go vet -tags tensorflow ./...
   - golint -set_exit_status ./...
-  - go test -tags tensorflow -v -cpu=1,2 -coverprofile=coverage.txt -covermode=count gopkg.in/src-d/hercules.v3
+  - if [[ $TRAVIS_GO_VERSION != 1.9.* ]]; then go test -coverpkg=all -v -cpu=1,2 -coverprofile=coverage.txt -covermode=count gopkg.in/src-d/hercules.v4/... && sed -i '/cmd\/hercules\|core.go/d' coverage.txt; fi
+  - if [[ $TRAVIS_GO_VERSION = 1.9.* ]]; then go test -v -cpu=1,2 gopkg.in/src-d/hercules.v4/...; fi
   - $GOPATH/bin/hercules version
   - $GOPATH/bin/hercules --burndown --couples --quiet --pb https://github.com/src-d/hercules > 1.pb
   - cp 1.pb 2.pb

+ 4 - 4
Dockerfile

@@ -12,14 +12,14 @@ RUN apt-get update && \
     unzip -d /usr/local protoc.zip && rm protoc.zip && \
     locale-gen en_US.UTF-8 && \
     export PATH=/usr/lib/go-1.10/bin:$PATH && \
-    go get -v -d gopkg.in/src-d/hercules.v3/... && \
-    cd /root/src/gopkg.in/src-d/hercules.v3 && \
+    go get -v -d gopkg.in/src-d/hercules.v4/... && \
+    cd /root/src/gopkg.in/src-d/hercules.v4 && \
     export CGO_CXXFLAGS=-std=c++14 && \
     curl -L "https://storage.googleapis.com/tensorflow/libtensorflow/libtensorflow-cpu-$(go env GOOS)-x86_64-1.7.0.tar.gz" | tar -C /usr/local -xz && \
     make && \
     rm /usr/local/bin/protoc && rm /usr/local/readme.txt && rm -rf /usr/local/include/google && \
     cp /root/bin/hercules /usr/local/bin && \
-    cp -r /root/src/gopkg.in/src-d/hercules.v3/*.py /root/src/gopkg.in/src-d/hercules.v3/pb /usr/local/bin && \
+    cp -r /root/src/gopkg.in/src-d/hercules.v4/*.py /root/src/gopkg.in/src-d/hercules.v4/internal/pb /usr/local/bin && \
     sed -i 's/parser.add_argument("--backend",/parser.add_argument("--backend", default="Agg",/' /usr/local/bin/labours.py && \
     echo '#!/bin/bash\n\
 \n\
@@ -28,7 +28,7 @@ echo "	$@"\n\
 echo\n\' > /browser && \
     chmod +x /browser && \
     curl https://bootstrap.pypa.io/get-pip.py | python3 && \
-    pip3 install --no-cache-dir --no-build-isolation -r /root/src/gopkg.in/src-d/hercules.v3/requirements.txt https://github.com/mind/wheels/releases/download/tf1.7-cpu/tensorflow-1.7.0-cp36-cp36m-linux_x86_64.whl && \
+    pip3 install --no-cache-dir --no-build-isolation -r /root/src/gopkg.in/src-d/hercules.v4/requirements.txt https://github.com/mind/wheels/releases/download/tf1.7-cpu/tensorflow-1.7.0-cp36-cp36m-linux_x86_64.whl && \
     rm -rf /root/* && \
     apt-get remove -y software-properties-common golang-1.10-go python3-dev libyaml-dev libxml2-dev curl git make unzip g++ && \
     apt-get remove -y *-doc *-man && \

+ 9 - 9
Makefile

@@ -12,22 +12,22 @@ endif
 all: ${GOPATH}/bin/hercules${EXE}
 
 test: all
-	go test gopkg.in/src-d/hercules.v3
+	go test gopkg.in/src-d/hercules.v4
 
 ${GOPATH}/bin/protoc-gen-gogo${EXE}:
 	go get -v github.com/gogo/protobuf/protoc-gen-gogo
 
 ifneq ($(OS),Windows_NT)
-pb/pb.pb.go: pb/pb.proto ${GOPATH}/bin/protoc-gen-gogo
-	PATH=${PATH}:${GOPATH}/bin protoc --gogo_out=pb --proto_path=pb pb/pb.proto
+internal/pb/pb.pb.go: internal/pb/pb.proto ${GOPATH}/bin/protoc-gen-gogo
+	PATH=${PATH}:${GOPATH}/bin protoc --gogo_out=internal/pb --proto_path=internal/pb internal/pb/pb.proto
 else
-pb/pb.pb.go: pb/pb.proto ${GOPATH}/bin/protoc-gen-gogo.exe
+internal/pb/pb.pb.go: internal/pb/pb.proto ${GOPATH}/bin/protoc-gen-gogo.exe
 	set "PATH=${PATH};${GOPATH}\bin" && \
-	call protoc --gogo_out=pb --proto_path=pb pb/pb.proto
+	call protoc --gogo_out=internal/pb --proto_path=internal/pb internal/pb/pb.proto
 endif
 
-pb/pb_pb2.py: pb/pb.proto
-	protoc --python_out pb --proto_path=pb pb/pb.proto
+internal/pb/pb_pb2.py: internal/pb/pb.proto
+	protoc --python_out internal/pb --proto_path=internal/pb internal/pb/pb.proto
 
 cmd/hercules/plugin_template_source.go: cmd/hercules/plugin.template
 	cd cmd/hercules && go generate
@@ -39,5 +39,5 @@ ${GOPATH}/pkg/$(PKG)/gopkg.in/bblfsh/client-go.v2: ${GOPATH}/src/gopkg.in/bblfsh
 	cd ${GOPATH}/src/gopkg.in/bblfsh/client-go.v2 && \
 	make dependencies
 
-${GOPATH}/bin/hercules${EXE}: *.go cmd/hercules/*.go rbtree/*.go yaml/*.go toposort/*.go pb/*.go ${GOPATH}/pkg/$(PKG)/gopkg.in/bblfsh/client-go.v2 pb/pb.pb.go pb/pb_pb2.py cmd/hercules/plugin_template_source.go
-	go get -tags "$(TAGS)" -ldflags "-X gopkg.in/src-d/hercules.v3.BinaryGitHash=$(shell git rev-parse HEAD)" gopkg.in/src-d/hercules.v3/cmd/hercules
+${GOPATH}/bin/hercules${EXE}: *.go */*.go */*/*.go ${GOPATH}/pkg/$(PKG)/gopkg.in/bblfsh/client-go.v2 internal/pb/pb.pb.go internal/pb/pb_pb2.py cmd/hercules/plugin_template_source.go
+	go get -tags "$(TAGS)" -ldflags "-X gopkg.in/src-d/hercules.v4.BinaryGitHash=$(shell git rev-parse HEAD)" gopkg.in/src-d/hercules.v4/cmd/hercules

File diff suppressed because it is too large
+ 3 - 3
README.md


+ 5 - 5
appveyor.yml

@@ -2,7 +2,7 @@ version: "{build}"
 platform: x64
 image: Visual Studio 2017
 
-clone_folder: c:\gopath\src\gopkg.in\src-d\hercules.v3
+clone_folder: c:\gopath\src\gopkg.in\src-d\hercules.v4
 
 environment:
   GOPATH: c:\gopath
@@ -17,14 +17,14 @@ install:
 build_script:
   - set PATH=%PATH:C:\Program Files\Git\usr\bin;=%
   - set PATH=C:\msys64\mingw64\bin;%PATH%
-  - cd %GOPATH%\src\gopkg.in\src-d\hercules.v3
+  - cd %GOPATH%\src\gopkg.in\src-d\hercules.v4
   - set DISABLE_TENSORFLOW=1
   - make
-  - 7z a c:\gopath\src\gopkg.in\src-d\hercules.v3\hercules.win64.zip %GOPATH%\bin\hercules.exe
+  - 7z a c:\gopath\src\gopkg.in\src-d\hercules.v4\hercules.win64.zip %GOPATH%\bin\hercules.exe
 
 test_script:
-  - go get -v -t -d gopkg.in/src-d/hercules.v3/...
-  - go test -v -tags disable_babelfish gopkg.in/src-d/hercules.v3
+  - go get -v -t -d gopkg.in/src-d/hercules.v4/...
+  - go test -v -tags disable_babelfish gopkg.in/src-d/hercules.v4/...
 
 artifacts:
   - name: hercules.win64.zip

+ 0 - 70
changes_xpather_test.go

@@ -1,70 +0,0 @@
-// +build !disable_babelfish
-
-package hercules
-
-import (
-	"io/ioutil"
-	"log"
-	"testing"
-
-	"github.com/stretchr/testify/assert"
-	"gopkg.in/bblfsh/client-go.v2"
-	"gopkg.in/bblfsh/sdk.v1/uast"
-	"gopkg.in/src-d/go-git.v4/plumbing"
-	"gopkg.in/src-d/go-git.v4/plumbing/object"
-)
-
-func TestChangesXPatherExtractChanged(t *testing.T) {
-	client, err := bblfsh.NewClient("0.0.0.0:9432")
-	if err != nil {
-		log.Panicf("Failed to connect to the Babelfish server at 0.0.0.0:9432: %v", err)
-	}
-	hash1 := "a98a6940eb4cfb1eb635c3232485a75c4b63fff3"
-	hash2 := "42457dc695fa73ec9621b47832d5711f6325410d"
-	root1 := parseBlobFromTestRepo(hash1, "burndown.go", client)
-	root2 := parseBlobFromTestRepo(hash2, "burndown.go", client)
-	gitChange := fakeChangeForName("burndown.go", hash1, hash2)
-	uastChanges := []UASTChange{
-		{Before: root1, After: root2, Change: gitChange},
-		{Before: nil, After: root2, Change: gitChange},
-		{Before: root1, After: nil, Change: gitChange},
-	}
-	xpather := ChangesXPather{XPath: "//*[@roleComment]"}
-	nodes := xpather.Extract(uastChanges)
-	assert.True(t, len(nodes) > 0)
-}
-
-func parseBlobFromTestRepo(hash, name string, client *bblfsh.Client) *uast.Node {
-	blob, err := testRepository.BlobObject(plumbing.NewHash(hash))
-	if err != nil {
-		panic(err)
-	}
-	reader, err := blob.Reader()
-	if err != nil {
-		panic(err)
-	}
-	defer reader.Close()
-	data, err := ioutil.ReadAll(reader)
-	if err != nil {
-		panic(err)
-	}
-	request := client.NewParseRequest()
-	request.Content(string(data))
-	request.Filename(name)
-	response, err := request.Do()
-	if err != nil {
-		panic(err)
-	}
-	return response.UAST
-}
-
-func fakeChangeForName(name string, hashFrom string, hashTo string) *object.Change {
-	return &object.Change{
-		From: object.ChangeEntry{Name: name, TreeEntry: object.TreeEntry{
-			Name: name, Hash: plumbing.NewHash(hashFrom),
-		}},
-		To: object.ChangeEntry{Name: name, TreeEntry: object.TreeEntry{
-			Name: name, Hash: plumbing.NewHash(hashTo),
-		}},
-	}
-}

+ 2 - 2
cmd/hercules/combine.go

@@ -12,8 +12,8 @@ import (
 
 	"github.com/gogo/protobuf/proto"
 	"github.com/spf13/cobra"
-	"gopkg.in/src-d/hercules.v3"
-	"gopkg.in/src-d/hercules.v3/pb"
+	"gopkg.in/src-d/hercules.v4"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
 )
 
 // combineCmd represents the combine command

+ 1 - 1
cmd/hercules/plugin.template

@@ -24,7 +24,7 @@ import (
 
   "github.com/gogo/protobuf/proto"
   "gopkg.in/src-d/go-git.v4"
-  "gopkg.in/src-d/hercules.v3"
+  "gopkg.in/src-d/hercules.v4"
 )
 
 // {{.name}} contains the intermediate state which is mutated by Consume(). It should implement

+ 2 - 2
cmd/hercules/root.go

@@ -25,8 +25,8 @@ import (
 	"gopkg.in/src-d/go-git.v4/storage"
 	"gopkg.in/src-d/go-git.v4/storage/filesystem"
 	"gopkg.in/src-d/go-git.v4/storage/memory"
-	"gopkg.in/src-d/hercules.v3"
-	"gopkg.in/src-d/hercules.v3/pb"
+	"gopkg.in/src-d/hercules.v4"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
 )
 
 // oneLineWriter splits the output data by lines and outputs one on top of another using '\r'.

+ 2 - 2
contrib/_plugin_example/churn_analysis.go

@@ -13,8 +13,8 @@ import (
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
 	"gopkg.in/src-d/go-git.v4/utils/merkletrie"
-	"gopkg.in/src-d/hercules.v3"
-	"gopkg.in/src-d/hercules.v3/yaml"
+	"gopkg.in/src-d/hercules.v4"
+	"gopkg.in/src-d/hercules.v4/yaml"
 )
 
 // ChurnAnalysis contains the intermediate state which is mutated by Consume(). It should implement

+ 130 - 0
core.go

@@ -0,0 +1,130 @@
+package hercules
+
+import (
+	git "gopkg.in/src-d/go-git.v4"
+	"gopkg.in/src-d/go-git.v4/plumbing/object"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/plumbing"
+	"gopkg.in/src-d/hercules.v4/internal/plumbing/identity"
+	"gopkg.in/src-d/hercules.v4/internal/plumbing/uast"
+	"gopkg.in/src-d/hercules.v4/leaves"
+)
+
+// ConfigurationOptionType represents the possible types of a ConfigurationOption's value.
+type ConfigurationOptionType = core.ConfigurationOptionType
+
+const (
+	// BoolConfigurationOption reflects the boolean value type.
+	BoolConfigurationOption = core.BoolConfigurationOption
+	// IntConfigurationOption reflects the integer value type.
+	IntConfigurationOption = core.IntConfigurationOption
+	// StringConfigurationOption reflects the string value type.
+	StringConfigurationOption = core.StringConfigurationOption
+	// FloatConfigurationOption reflects a floating point value type.
+	FloatConfigurationOption = core.FloatConfigurationOption
+	// StringsConfigurationOption reflects the array of strings value type.
+	StringsConfigurationOption = core.StringsConfigurationOption
+)
+
+// ConfigurationOption allows for the unified, retrospective way to setup PipelineItem-s.
+type ConfigurationOption = core.ConfigurationOption
+
+// PipelineItem is the interface for all the units in the Git commits analysis pipeline.
+type PipelineItem = core.PipelineItem
+
+// FeaturedPipelineItem enables switching the automatic insertion of pipeline items on or off.
+type FeaturedPipelineItem = core.FeaturedPipelineItem
+
+// LeafPipelineItem corresponds to the top level pipeline items which produce the end results.
+type LeafPipelineItem = core.LeafPipelineItem
+
+// MergeablePipelineItem specifies the methods to combine several analysis results together.
+type MergeablePipelineItem = core.MergeablePipelineItem
+
+// CommonAnalysisResult holds the information which is always extracted at Pipeline.Run().
+type CommonAnalysisResult = core.CommonAnalysisResult
+
+// MetadataToCommonAnalysisResult copies the data from a Protobuf message.
+func MetadataToCommonAnalysisResult(meta *core.Metadata) *CommonAnalysisResult {
+	return core.MetadataToCommonAnalysisResult(meta)
+}
+
+// Pipeline is the core Hercules entity which carries several PipelineItems and executes them.
+// See the extended example of how a Pipeline works in doc.go
+type Pipeline = core.Pipeline
+
+const (
+	// ConfigPipelineDumpPath is the name of the Pipeline configuration option (Pipeline.Initialize())
+	// which enables saving the items DAG to the specified file.
+	ConfigPipelineDumpPath = core.ConfigPipelineDumpPath
+	// ConfigPipelineDryRun is the name of the Pipeline configuration option (Pipeline.Initialize())
+	// which disables Configure() and Initialize() invocation on each PipelineItem during the
+	// Pipeline initialization.
+	// Subsequent Run() calls are going to fail. Useful with ConfigPipelineDumpPath=true.
+	ConfigPipelineDryRun = core.ConfigPipelineDryRun
+	// ConfigPipelineCommits is the name of the Pipeline configuration option (Pipeline.Initialize())
+	// which allows to specify the custom commit sequence. By default, Pipeline.Commits() is used.
+	ConfigPipelineCommits = core.ConfigPipelineCommits
+)
+
+// NewPipeline initializes a new instance of Pipeline struct.
+func NewPipeline(repository *git.Repository) *Pipeline {
+	return core.NewPipeline(repository)
+}
+
+// LoadCommitsFromFile reads the file by the specified FS path and generates the sequence of commits
+// by interpreting each line as a Git commit hash.
+func LoadCommitsFromFile(path string, repository *git.Repository) ([]*object.Commit, error) {
+	return core.LoadCommitsFromFile(path, repository)
+}
+
+// PipelineItemRegistry contains all the known PipelineItem-s.
+type PipelineItemRegistry = core.PipelineItemRegistry
+
+// Registry contains all known pipeline item types.
+var Registry = core.Registry
+
+const (
+	// DependencyAuthor is the name of the dependency provided by identity.Detector.
+	DependencyAuthor = identity.DependencyAuthor
+	// DependencyBlobCache identifies the dependency provided by BlobCache.
+	DependencyBlobCache = plumbing.DependencyBlobCache
+	// DependencyDay is the name of the dependency which DaysSinceStart provides - the number
+	// of days since the first commit in the analysed sequence.
+	DependencyDay = plumbing.DependencyDay
+	// DependencyFileDiff is the name of the dependency provided by FileDiff.
+	DependencyFileDiff = plumbing.DependencyFileDiff
+	// DependencyTreeChanges is the name of the dependency provided by TreeDiff.
+	DependencyTreeChanges = plumbing.DependencyTreeChanges
+	// DependencyUastChanges is the name of the dependency provided by Changes.
+	DependencyUastChanges = uast.DependencyUastChanges
+	// DependencyUasts is the name of the dependency provided by Extractor.
+	DependencyUasts = uast.DependencyUasts
+	// FactCommitsByDay contains the mapping between day indices and the corresponding commits.
+	FactCommitsByDay = plumbing.FactCommitsByDay
+	// FactIdentityDetectorPeopleCount is the name of the fact which is inserted in
+	// identity.Detector.Configure(). It is equal to the overall number of unique authors
+	// (the length of ReversedPeopleDict).
+	FactIdentityDetectorPeopleCount = identity.FactIdentityDetectorPeopleCount
+	// FactIdentityDetectorPeopleDict is the name of the fact which is inserted in
+	// identity.Detector.Configure(). It corresponds to identity.Detector.PeopleDict - the mapping
+	// from the signatures to the author indices.
+	FactIdentityDetectorPeopleDict = identity.FactIdentityDetectorPeopleDict
+	// FactIdentityDetectorReversedPeopleDict is the name of the fact which is inserted in
+	// identity.Detector.Configure(). It corresponds to identity.Detector.ReversedPeopleDict -
+	// the mapping from the author indices to the main signature.
+	FactIdentityDetectorReversedPeopleDict = identity.FactIdentityDetectorReversedPeopleDict
+)
+
+// FileDiffData is the type of the dependency provided by plumbing.FileDiff.
+type FileDiffData = plumbing.FileDiffData
+
+// CountLines returns the number of lines in a *object.Blob.
+func CountLines(file *object.Blob) (int, error) {
+	return plumbing.CountLines(file)
+}
+
+func init() {
+	// hack to link with .leaves
+	_ = leaves.BurndownAnalysis{}
+}

pb/__init__.py → internal/__init__.py


+ 5 - 46
file.go

@@ -1,8 +1,10 @@
-package hercules
+package burndown
 
 import (
 	"fmt"
-	"gopkg.in/src-d/hercules.v3/rbtree"
+
+	"gopkg.in/src-d/hercules.v4/internal"
+	"gopkg.in/src-d/hercules.v4/internal/rbtree"
 )
 
 // Status is the something we would like to keep track of in File.Update().
@@ -36,49 +38,6 @@ func NewStatus(data interface{}, update func(interface{}, int, int, int)) Status
 // TreeEnd denotes the value of the last leaf in the tree.
 const TreeEnd int = -1
 
-// The ugly side of Go.
-// template <typename T> please!
-
-// min calculates the minimum of two 32-bit integers.
-func min(a int, b int) int {
-	if a < b {
-		return a
-	}
-	return b
-}
-
-// min64 calculates the minimum of two 64-bit integers.
-func min64(a int64, b int64) int64 {
-	if a < b {
-		return a
-	}
-	return b
-}
-
-// max calculates the maximum of two 32-bit integers.
-func max(a int, b int) int {
-	if a < b {
-		return b
-	}
-	return a
-}
-
-// max64 calculates the maximum of two 64-bit integers.
-func max64(a int64, b int64) int64 {
-	if a < b {
-		return b
-	}
-	return a
-}
-
-// abs64 calculates the absolute value of a 64-bit integer.
-func abs64(v int64) int64 {
-	if v <= 0 {
-		return -v
-	}
-	return v
-}
-
 func (file *File) updateTime(currentTime int, previousTime int, delta int) {
 	for _, status := range file.statuses {
 		status.update(status.data, currentTime, previousTime, delta)
@@ -199,7 +158,7 @@ func (file *File) Update(time int, pos int, insLength int, delLength int) {
 			}
 			break
 		}
-		delta := min(nextIter.Item().Key, pos+delLength) - max(node.Key, pos)
+		delta := internal.Min(nextIter.Item().Key, pos+delLength) - internal.Max(node.Key, pos)
 		if delta <= 0 {
 			break
 		}

+ 2 - 13
file_test.go

@@ -1,10 +1,10 @@
-package hercules
+package burndown
 
 import (
 	"testing"
 
 	"github.com/stretchr/testify/assert"
-	"gopkg.in/src-d/hercules.v3/rbtree"
+	"gopkg.in/src-d/hercules.v4/internal/rbtree"
 )
 
 func updateStatusFile(
@@ -369,17 +369,6 @@ func TestBug5File(t *testing.T) {
 	assert.Equal(t, "0 0\n14 157\n16 -1\n", dump)
 }
 
-func TestMinMaxAbs64Funcs(t *testing.T) {
-	var a int64 = 1
-	var b int64 = -1
-	assert.Equal(t, min64(a, b), b)
-	assert.Equal(t, max64(a, b), a)
-	assert.Equal(t, min64(b, a), b)
-	assert.Equal(t, max64(b, a), a)
-	assert.Equal(t, abs64(a), a)
-	assert.Equal(t, abs64(b), a)
-}
-
 func TestNewFileFromTreeInvalidSize(t *testing.T) {
 	keys := [...]int{1, 2, 3}
 	vals := [...]int{4, 5}

+ 93 - 0
internal/core/global_test.go

@@ -0,0 +1,93 @@
+package core_test
+
+import (
+	"io/ioutil"
+	"os"
+	"path"
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	uast_items "gopkg.in/src-d/hercules.v4/internal/plumbing/uast"
+	"gopkg.in/src-d/hercules.v4/internal/test"
+	"gopkg.in/src-d/hercules.v4/leaves"
+)
+
+func TestPipelineSerialize(t *testing.T) {
+	pipeline := core.NewPipeline(test.Repository)
+	pipeline.SetFeature(uast_items.FeatureUast)
+	pipeline.DeployItem(&leaves.BurndownAnalysis{})
+	facts := map[string]interface{}{}
+	facts["Pipeline.DryRun"] = true
+	tmpdir, _ := ioutil.TempDir("", "hercules-")
+	defer os.RemoveAll(tmpdir)
+	dotpath := path.Join(tmpdir, "graph.dot")
+	facts["Pipeline.DumpPath"] = dotpath
+	pipeline.Initialize(facts)
+	bdot, _ := ioutil.ReadFile(dotpath)
+	dot := string(bdot)
+	assert.Equal(t, `digraph Hercules {
+  "6 BlobCache" -> "7 [blob_cache]"
+  "0 DaysSinceStart" -> "3 [day]"
+  "9 FileDiff" -> "11 [file_diff]"
+  "15 FileDiffRefiner" -> "16 Burndown"
+  "1 IdentityDetector" -> "4 [author]"
+  "8 RenameAnalysis" -> "16 Burndown"
+  "8 RenameAnalysis" -> "9 FileDiff"
+  "8 RenameAnalysis" -> "10 UAST"
+  "8 RenameAnalysis" -> "13 UASTChanges"
+  "2 TreeDiff" -> "5 [changes]"
+  "10 UAST" -> "12 [uasts]"
+  "13 UASTChanges" -> "14 [changed_uasts]"
+  "4 [author]" -> "16 Burndown"
+  "7 [blob_cache]" -> "16 Burndown"
+  "7 [blob_cache]" -> "9 FileDiff"
+  "7 [blob_cache]" -> "8 RenameAnalysis"
+  "7 [blob_cache]" -> "10 UAST"
+  "14 [changed_uasts]" -> "15 FileDiffRefiner"
+  "5 [changes]" -> "6 BlobCache"
+  "5 [changes]" -> "8 RenameAnalysis"
+  "3 [day]" -> "16 Burndown"
+  "11 [file_diff]" -> "15 FileDiffRefiner"
+  "12 [uasts]" -> "13 UASTChanges"
+}`, dot)
+}
+
+func TestPipelineSerializeNoUast(t *testing.T) {
+	pipeline := core.NewPipeline(test.Repository)
+	// pipeline.SetFeature(FeatureUast)
+	pipeline.DeployItem(&leaves.BurndownAnalysis{})
+	facts := map[string]interface{}{}
+	facts["Pipeline.DryRun"] = true
+	tmpdir, _ := ioutil.TempDir("", "hercules-")
+	defer os.RemoveAll(tmpdir)
+	dotpath := path.Join(tmpdir, "graph.dot")
+	facts["Pipeline.DumpPath"] = dotpath
+	pipeline.Initialize(facts)
+	bdot, _ := ioutil.ReadFile(dotpath)
+	dot := string(bdot)
+	assert.Equal(t, `digraph Hercules {
+  "6 BlobCache" -> "7 [blob_cache]"
+  "0 DaysSinceStart" -> "3 [day]"
+  "9 FileDiff" -> "10 [file_diff]"
+  "1 IdentityDetector" -> "4 [author]"
+  "8 RenameAnalysis" -> "11 Burndown"
+  "8 RenameAnalysis" -> "9 FileDiff"
+  "2 TreeDiff" -> "5 [changes]"
+  "4 [author]" -> "11 Burndown"
+  "7 [blob_cache]" -> "11 Burndown"
+  "7 [blob_cache]" -> "9 FileDiff"
+  "7 [blob_cache]" -> "8 RenameAnalysis"
+  "5 [changes]" -> "6 BlobCache"
+  "5 [changes]" -> "8 RenameAnalysis"
+  "3 [day]" -> "11 Burndown"
+  "10 [file_diff]" -> "11 Burndown"
+}`, dot)
+}
+
+func TestPipelineResolveIntegration(t *testing.T) {
+	pipeline := core.NewPipeline(test.Repository)
+	pipeline.DeployItem(&leaves.BurndownAnalysis{})
+	pipeline.DeployItem(&leaves.CouplesAnalysis{})
+	pipeline.Initialize(nil)
+}

+ 7 - 4
pipeline.go

@@ -1,4 +1,4 @@
-package hercules
+package core
 
 import (
 	"bufio"
@@ -16,8 +16,8 @@ import (
 	"gopkg.in/src-d/go-git.v4"
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
-	"gopkg.in/src-d/hercules.v3/pb"
-	"gopkg.in/src-d/hercules.v3/toposort"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
+	"gopkg.in/src-d/hercules.v4/internal/toposort"
 )
 
 // ConfigurationOptionType represents the possible types of a ConfigurationOption's value.
@@ -180,8 +180,11 @@ func (car *CommonAnalysisResult) FillMetadata(meta *pb.Metadata) *pb.Metadata {
 	return meta
 }
 
+// Metadata is defined in internal/pb/pb.pb.go - header of the binary file.
+type Metadata = pb.Metadata
+
 // MetadataToCommonAnalysisResult copies the data from a Protobuf message.
-func MetadataToCommonAnalysisResult(meta *pb.Metadata) *CommonAnalysisResult {
+func MetadataToCommonAnalysisResult(meta *Metadata) *CommonAnalysisResult {
 	return &CommonAnalysisResult{
 		BeginTime:     meta.BeginUnixTime,
 		EndTime:       meta.EndUnixTime,

+ 19 - 124
pipeline_test.go

@@ -1,19 +1,18 @@
-package hercules
+package core
 
 import (
 	"errors"
 	"io"
 	"io/ioutil"
 	"os"
-	"path"
 	"testing"
 
 	"github.com/stretchr/testify/assert"
 	"gopkg.in/src-d/go-git.v4"
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
-	"gopkg.in/src-d/go-git.v4/storage/memory"
-	"gopkg.in/src-d/hercules.v3/pb"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
+	"gopkg.in/src-d/hercules.v4/internal/test"
 )
 
 type testPipelineItem struct {
@@ -148,13 +147,13 @@ func (item *dependingTestPipelineItem) Serialize(result interface{}, binary bool
 }
 
 func TestPipelineFacts(t *testing.T) {
-	pipeline := NewPipeline(testRepository)
+	pipeline := NewPipeline(test.Repository)
 	pipeline.SetFact("fact", "value")
 	assert.Equal(t, pipeline.GetFact("fact"), "value")
 }
 
 func TestPipelineFeatures(t *testing.T) {
-	pipeline := NewPipeline(testRepository)
+	pipeline := NewPipeline(test.Repository)
 	pipeline.SetFeature("feat")
 	val, _ := pipeline.GetFeature("feat")
 	assert.True(t, val)
@@ -174,13 +173,13 @@ func TestPipelineFeatures(t *testing.T) {
 }
 
 func TestPipelineRun(t *testing.T) {
-	pipeline := NewPipeline(testRepository)
+	pipeline := NewPipeline(test.Repository)
 	item := &testPipelineItem{}
 	pipeline.AddItem(item)
 	pipeline.Initialize(map[string]interface{}{})
 	assert.True(t, item.Initialized)
 	commits := make([]*object.Commit, 1)
-	commits[0], _ = testRepository.CommitObject(plumbing.NewHash(
+	commits[0], _ = test.Repository.CommitObject(plumbing.NewHash(
 		"af9ddc0db70f09f3f27b4b98e415592a7485171c"))
 	result, err := pipeline.Run(commits)
 	assert.Nil(t, err)
@@ -201,7 +200,7 @@ func TestPipelineRun(t *testing.T) {
 }
 
 func TestPipelineOnProgress(t *testing.T) {
-	pipeline := NewPipeline(testRepository)
+	pipeline := NewPipeline(test.Repository)
 	var progressOk1, progressOk2 bool
 
 	onProgress := func(step int, total int) {
@@ -215,7 +214,7 @@ func TestPipelineOnProgress(t *testing.T) {
 
 	pipeline.OnProgress = onProgress
 	commits := make([]*object.Commit, 1)
-	commits[0], _ = testRepository.CommitObject(plumbing.NewHash(
+	commits[0], _ = test.Repository.CommitObject(plumbing.NewHash(
 		"af9ddc0db70f09f3f27b4b98e415592a7485171c"))
 	result, err := pipeline.Run(commits)
 	assert.Nil(t, err)
@@ -225,7 +224,7 @@ func TestPipelineOnProgress(t *testing.T) {
 }
 
 func TestPipelineCommits(t *testing.T) {
-	pipeline := NewPipeline(testRepository)
+	pipeline := NewPipeline(test.Repository)
 	commits := pipeline.Commits()
 	assert.True(t, len(commits) >= 90)
 	assert.Equal(t, commits[0].Hash, plumbing.NewHash(
@@ -241,14 +240,14 @@ func TestLoadCommitsFromFile(t *testing.T) {
 	tmp.WriteString("cce947b98a050c6d356bc6ba95030254914027b1\n6db8065cdb9bb0758f36a7e75fc72ab95f9e8145")
 	tmp.Close()
 	defer os.Remove(tmp.Name())
-	commits, err := LoadCommitsFromFile(tmp.Name(), testRepository)
+	commits, err := LoadCommitsFromFile(tmp.Name(), test.Repository)
 	assert.Nil(t, err)
 	assert.Equal(t, len(commits), 2)
 	assert.Equal(t, commits[0].Hash, plumbing.NewHash(
 		"cce947b98a050c6d356bc6ba95030254914027b1"))
 	assert.Equal(t, commits[1].Hash, plumbing.NewHash(
 		"6db8065cdb9bb0758f36a7e75fc72ab95f9e8145"))
-	commits, err = LoadCommitsFromFile("/WAT?xxx!", testRepository)
+	commits, err = LoadCommitsFromFile("/WAT?xxx!", test.Repository)
 	assert.Nil(t, commits)
 	assert.NotNil(t, err)
 	tmp, err = ioutil.TempFile("", "hercules-test-")
@@ -256,7 +255,7 @@ func TestLoadCommitsFromFile(t *testing.T) {
 	tmp.WriteString("WAT")
 	tmp.Close()
 	defer os.Remove(tmp.Name())
-	commits, err = LoadCommitsFromFile(tmp.Name(), testRepository)
+	commits, err = LoadCommitsFromFile(tmp.Name(), test.Repository)
 	assert.Nil(t, commits)
 	assert.NotNil(t, err)
 	tmp, err = ioutil.TempFile("", "hercules-test-")
@@ -264,13 +263,13 @@ func TestLoadCommitsFromFile(t *testing.T) {
 	tmp.WriteString("ffffffffffffffffffffffffffffffffffffffff")
 	tmp.Close()
 	defer os.Remove(tmp.Name())
-	commits, err = LoadCommitsFromFile(tmp.Name(), testRepository)
+	commits, err = LoadCommitsFromFile(tmp.Name(), test.Repository)
 	assert.Nil(t, commits)
 	assert.NotNil(t, err)
 }
 
 func TestPipelineDeps(t *testing.T) {
-	pipeline := NewPipeline(testRepository)
+	pipeline := NewPipeline(test.Repository)
 	item1 := &dependingTestPipelineItem{}
 	item2 := &testPipelineItem{}
 	pipeline.AddItem(item1)
@@ -278,7 +277,7 @@ func TestPipelineDeps(t *testing.T) {
 	assert.Equal(t, pipeline.Len(), 2)
 	pipeline.Initialize(map[string]interface{}{})
 	commits := make([]*object.Commit, 1)
-	commits[0], _ = testRepository.CommitObject(plumbing.NewHash(
+	commits[0], _ = test.Repository.CommitObject(plumbing.NewHash(
 		"af9ddc0db70f09f3f27b4b98e415592a7485171c"))
 	result, err := pipeline.Run(commits)
 	assert.Nil(t, err)
@@ -289,98 +288,26 @@ func TestPipelineDeps(t *testing.T) {
 }
 
 func TestPipelineDeployFeatures(t *testing.T) {
-	pipeline := NewPipeline(testRepository)
+	pipeline := NewPipeline(test.Repository)
 	pipeline.DeployItem(&testPipelineItem{})
 	f, _ := pipeline.GetFeature("power")
 	assert.True(t, f)
 }
 
 func TestPipelineError(t *testing.T) {
-	pipeline := NewPipeline(testRepository)
+	pipeline := NewPipeline(test.Repository)
 	item := &testPipelineItem{}
 	item.TestError = true
 	pipeline.AddItem(item)
 	pipeline.Initialize(map[string]interface{}{})
 	commits := make([]*object.Commit, 1)
-	commits[0], _ = testRepository.CommitObject(plumbing.NewHash(
+	commits[0], _ = test.Repository.CommitObject(plumbing.NewHash(
 		"af9ddc0db70f09f3f27b4b98e415592a7485171c"))
 	result, err := pipeline.Run(commits)
 	assert.Nil(t, result)
 	assert.NotNil(t, err)
 }
 
-func TestPipelineSerialize(t *testing.T) {
-	pipeline := NewPipeline(testRepository)
-	pipeline.SetFeature(FeatureUast)
-	pipeline.DeployItem(&BurndownAnalysis{})
-	facts := map[string]interface{}{}
-	facts["Pipeline.DryRun"] = true
-	tmpdir, _ := ioutil.TempDir("", "hercules-")
-	defer os.RemoveAll(tmpdir)
-	dotpath := path.Join(tmpdir, "graph.dot")
-	facts["Pipeline.DumpPath"] = dotpath
-	pipeline.Initialize(facts)
-	bdot, _ := ioutil.ReadFile(dotpath)
-	dot := string(bdot)
-	assert.Equal(t, `digraph Hercules {
-  "6 BlobCache" -> "7 [blob_cache]"
-  "0 DaysSinceStart" -> "3 [day]"
-  "9 FileDiff" -> "11 [file_diff]"
-  "15 FileDiffRefiner" -> "16 Burndown"
-  "1 IdentityDetector" -> "4 [author]"
-  "8 RenameAnalysis" -> "16 Burndown"
-  "8 RenameAnalysis" -> "9 FileDiff"
-  "8 RenameAnalysis" -> "10 UAST"
-  "8 RenameAnalysis" -> "13 UASTChanges"
-  "2 TreeDiff" -> "5 [changes]"
-  "10 UAST" -> "12 [uasts]"
-  "13 UASTChanges" -> "14 [changed_uasts]"
-  "4 [author]" -> "16 Burndown"
-  "7 [blob_cache]" -> "16 Burndown"
-  "7 [blob_cache]" -> "9 FileDiff"
-  "7 [blob_cache]" -> "8 RenameAnalysis"
-  "7 [blob_cache]" -> "10 UAST"
-  "14 [changed_uasts]" -> "15 FileDiffRefiner"
-  "5 [changes]" -> "6 BlobCache"
-  "5 [changes]" -> "8 RenameAnalysis"
-  "3 [day]" -> "16 Burndown"
-  "11 [file_diff]" -> "15 FileDiffRefiner"
-  "12 [uasts]" -> "13 UASTChanges"
-}`, dot)
-}
-
-func TestPipelineSerializeNoUast(t *testing.T) {
-	pipeline := NewPipeline(testRepository)
-	// pipeline.SetFeature(FeatureUast)
-	pipeline.DeployItem(&BurndownAnalysis{})
-	facts := map[string]interface{}{}
-	facts["Pipeline.DryRun"] = true
-	tmpdir, _ := ioutil.TempDir("", "hercules-")
-	defer os.RemoveAll(tmpdir)
-	dotpath := path.Join(tmpdir, "graph.dot")
-	facts["Pipeline.DumpPath"] = dotpath
-	pipeline.Initialize(facts)
-	bdot, _ := ioutil.ReadFile(dotpath)
-	dot := string(bdot)
-	assert.Equal(t, `digraph Hercules {
-  "6 BlobCache" -> "7 [blob_cache]"
-  "0 DaysSinceStart" -> "3 [day]"
-  "9 FileDiff" -> "10 [file_diff]"
-  "1 IdentityDetector" -> "4 [author]"
-  "8 RenameAnalysis" -> "11 Burndown"
-  "8 RenameAnalysis" -> "9 FileDiff"
-  "2 TreeDiff" -> "5 [changes]"
-  "4 [author]" -> "11 Burndown"
-  "7 [blob_cache]" -> "11 Burndown"
-  "7 [blob_cache]" -> "9 FileDiff"
-  "7 [blob_cache]" -> "8 RenameAnalysis"
-  "5 [changes]" -> "6 BlobCache"
-  "5 [changes]" -> "8 RenameAnalysis"
-  "3 [day]" -> "11 Burndown"
-  "10 [file_diff]" -> "11 Burndown"
-}`, dot)
-}
-
 func TestCommonAnalysisResultMerge(t *testing.T) {
 	c1 := CommonAnalysisResult{
 		BeginTime: 1513620635, EndTime: 1513720635, CommitsNumber: 1, RunTime: 100}
@@ -406,13 +333,6 @@ func TestCommonAnalysisResultMetadata(t *testing.T) {
 	assert.Equal(t, c1.RunTime.Nanoseconds(), int64(100*1e6))
 }
 
-func TestPipelineResolveIntegration(t *testing.T) {
-	pipeline := NewPipeline(testRepository)
-	pipeline.DeployItem(&BurndownAnalysis{})
-	pipeline.DeployItem(&CouplesAnalysis{})
-	pipeline.Initialize(nil)
-}
-
 func TestConfigurationOptionTypeString(t *testing.T) {
 	opt := ConfigurationOptionType(0)
 	assert.Equal(t, opt.String(), "")
@@ -438,28 +358,3 @@ func TestConfigurationOptionFormatDefault(t *testing.T) {
 	opt = ConfigurationOption{Type: FloatConfigurationOption, Default: 0.5}
 	assert.Equal(t, opt.FormatDefault(), "0.5")
 }
-
-func init() {
-	cwd, err := os.Getwd()
-	if err == nil {
-		testRepository, err = git.PlainOpen(cwd)
-		if err == nil {
-			iter, err := testRepository.CommitObjects()
-			if err == nil {
-				commits := -1
-				for ; err != io.EOF; _, err = iter.Next() {
-					if err != nil {
-						panic(err)
-					}
-					commits++
-					if commits >= 100 {
-						return
-					}
-				}
-			}
-		}
-	}
-	testRepository, _ = git.Clone(memory.NewStorage(), nil, &git.CloneOptions{
-		URL: "https://github.com/src-d/hercules",
-	})
-}

+ 1 - 1
registry.go

@@ -1,4 +1,4 @@
-package hercules
+package core
 
 import (
 	"fmt"

+ 3 - 2
registry_test.go

@@ -1,4 +1,4 @@
-package hercules
+package core
 
 import (
 	"reflect"
@@ -7,6 +7,7 @@ import (
 	"github.com/spf13/cobra"
 	"github.com/stretchr/testify/assert"
 	"gopkg.in/src-d/go-git.v4"
+	"gopkg.in/src-d/hercules.v4/internal/test"
 )
 
 func getRegistry() *PipelineItemRegistry {
@@ -146,7 +147,7 @@ func TestRegistryFeatures(t *testing.T) {
 	reg.AddFlags(testCmd.Flags())
 	args := [...]string{"--feature", "other", "--feature", "power"}
 	testCmd.ParseFlags(args[:])
-	pipeline := NewPipeline(testRepository)
+	pipeline := NewPipeline(test.Repository)
 	val, _ := pipeline.GetFeature("power")
 	assert.False(t, val)
 	val, _ = pipeline.GetFeature("other")

+ 5 - 3
dummies.go

@@ -1,4 +1,4 @@
-package hercules
+package internal
 
 import (
 	"io"
@@ -60,9 +60,11 @@ func (obj dummyEncodedObject) Writer() (io.WriteCloser, error) {
 	return nil, errors.New("dummy failure")
 }
 
-func createDummyBlob(hash plumbing.Hash, fails ...bool) (*object.Blob, error) {
+// CreateDummyBlob constructs a fake object.Blob with empty contents.
+// Optionally returns an error if read or written.
+func CreateDummyBlob(hash plumbing.Hash, fails ...bool) (*object.Blob, error) {
 	if len(fails) > 1 {
-		panic("invalid usage of createDummyBlob() - this is a bug")
+		panic("invalid usage of CreateDummyBlob() - this is a bug")
 	}
 	var realFails bool
 	if len(fails) == 1 {

+ 4 - 4
dummies_test.go

@@ -1,4 +1,4 @@
-package hercules
+package internal
 
 import (
 	"io"
@@ -9,7 +9,7 @@ import (
 )
 
 func TestCreateDummyBlob(t *testing.T) {
-	dummy, err := createDummyBlob(plumbing.NewHash("334cde09da4afcb74f8d2b3e6fd6cce61228b485"))
+	dummy, err := CreateDummyBlob(plumbing.NewHash("334cde09da4afcb74f8d2b3e6fd6cce61228b485"))
 	assert.Nil(t, err)
 	assert.Equal(t, dummy.Hash.String(), "334cde09da4afcb74f8d2b3e6fd6cce61228b485")
 	assert.Equal(t, dummy.Size, int64(0))
@@ -25,13 +25,13 @@ func TestCreateDummyBlob(t *testing.T) {
 }
 
 func TestCreateDummyBlobFails(t *testing.T) {
-	dummy, err := createDummyBlob(plumbing.NewHash("334cde09da4afcb74f8d2b3e6fd6cce61228b485"), true)
+	dummy, err := CreateDummyBlob(plumbing.NewHash("334cde09da4afcb74f8d2b3e6fd6cce61228b485"), true)
 	assert.Nil(t, err)
 	reader, err := dummy.Reader()
 	assert.Nil(t, reader)
 	assert.NotNil(t, err)
 	assert.Panics(t, func() {
-		createDummyBlob(plumbing.NewHash("334cde09da4afcb74f8d2b3e6fd6cce61228b485"), true, true)
+		CreateDummyBlob(plumbing.NewHash("334cde09da4afcb74f8d2b3e6fd6cce61228b485"), true, true)
 	})
 }
 

+ 44 - 0
internal/math.go

@@ -0,0 +1,44 @@
+package internal
+
+// The ugly side of Go.
+// template <typename T> please!
+
+// Min calculates the minimum of two 32-bit integers.
+func Min(a int, b int) int {
+	if a < b {
+		return a
+	}
+	return b
+}
+
+// Min64 calculates the minimum of two 64-bit integers.
+func Min64(a int64, b int64) int64 {
+	if a < b {
+		return a
+	}
+	return b
+}
+
+// Max calculates the maximum of two 32-bit integers.
+func Max(a int, b int) int {
+	if a < b {
+		return b
+	}
+	return a
+}
+
+// Max64 calculates the maximum of two 64-bit integers.
+func Max64(a int64, b int64) int64 {
+	if a < b {
+		return b
+	}
+	return a
+}
+
+// Abs64 calculates the absolute value of a 64-bit integer.
+func Abs64(v int64) int64 {
+	if v <= 0 {
+		return -v
+	}
+	return v
+}

+ 18 - 0
internal/math_test.go

@@ -0,0 +1,18 @@
+package internal
+
+import (
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+)
+
+func TestMinMaxAbs64Funcs(t *testing.T) {
+	var a int64 = 1
+	var b int64 = -1
+	assert.Equal(t, Min64(a, b), b)
+	assert.Equal(t, Max64(a, b), a)
+	assert.Equal(t, Min64(b, a), b)
+	assert.Equal(t, Max64(b, a), a)
+	assert.Equal(t, Abs64(a), a)
+	assert.Equal(t, Abs64(b), a)
+}

test_data/gitmodules_empty → internal/pb/__init__.py


+ 685 - 0
internal/pb/pb.pb.go

@@ -0,0 +1,685 @@
+// Code generated by protoc-gen-gogo. DO NOT EDIT.
+// source: pb.proto
+
+/*
+Package pb is a generated protocol buffer package.
+
+It is generated from these files:
+	pb.proto
+
+It has these top-level messages:
+	Metadata
+	BurndownSparseMatrixRow
+	BurndownSparseMatrix
+	BurndownAnalysisResults
+	CompressedSparseRowMatrix
+	Couples
+	TouchedFiles
+	CouplesAnalysisResults
+	UASTChange
+	UASTChangesSaverResults
+	ShotnessRecord
+	ShotnessAnalysisResults
+	FileHistory
+	FileHistoryResultMessage
+	Sentiment
+	CommentSentimentResults
+	AnalysisResults
+*/
+package pb
+
+import proto "github.com/gogo/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
+
+type Metadata struct {
+	// this format is versioned
+	Version int32 `protobuf:"varint,1,opt,name=version,proto3" json:"version,omitempty"`
+	// git hash of the revision from which Hercules is built
+	Hash string `protobuf:"bytes,2,opt,name=hash,proto3" json:"hash,omitempty"`
+	// repository's name
+	Repository string `protobuf:"bytes,3,opt,name=repository,proto3" json:"repository,omitempty"`
+	// UNIX timestamp of the first analysed commit
+	BeginUnixTime int64 `protobuf:"varint,4,opt,name=begin_unix_time,json=beginUnixTime,proto3" json:"begin_unix_time,omitempty"`
+	// UNIX timestamp of the last analysed commit
+	EndUnixTime int64 `protobuf:"varint,5,opt,name=end_unix_time,json=endUnixTime,proto3" json:"end_unix_time,omitempty"`
+	// number of processed commits
+	Commits int32 `protobuf:"varint,6,opt,name=commits,proto3" json:"commits,omitempty"`
+	// duration of the analysis in milliseconds
+	RunTime int64 `protobuf:"varint,7,opt,name=run_time,json=runTime,proto3" json:"run_time,omitempty"`
+}
+
+func (m *Metadata) Reset()                    { *m = Metadata{} }
+func (m *Metadata) String() string            { return proto.CompactTextString(m) }
+func (*Metadata) ProtoMessage()               {}
+func (*Metadata) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{0} }
+
+func (m *Metadata) GetVersion() int32 {
+	if m != nil {
+		return m.Version
+	}
+	return 0
+}
+
+func (m *Metadata) GetHash() string {
+	if m != nil {
+		return m.Hash
+	}
+	return ""
+}
+
+func (m *Metadata) GetRepository() string {
+	if m != nil {
+		return m.Repository
+	}
+	return ""
+}
+
+func (m *Metadata) GetBeginUnixTime() int64 {
+	if m != nil {
+		return m.BeginUnixTime
+	}
+	return 0
+}
+
+func (m *Metadata) GetEndUnixTime() int64 {
+	if m != nil {
+		return m.EndUnixTime
+	}
+	return 0
+}
+
+func (m *Metadata) GetCommits() int32 {
+	if m != nil {
+		return m.Commits
+	}
+	return 0
+}
+
+func (m *Metadata) GetRunTime() int64 {
+	if m != nil {
+		return m.RunTime
+	}
+	return 0
+}
+
+type BurndownSparseMatrixRow struct {
+	// the first `len(column)` elements are stored,
+	// the rest `number_of_columns - len(column)` values are zeros
+	Columns []uint32 `protobuf:"varint,1,rep,packed,name=columns" json:"columns,omitempty"`
+}
+
+func (m *BurndownSparseMatrixRow) Reset()                    { *m = BurndownSparseMatrixRow{} }
+func (m *BurndownSparseMatrixRow) String() string            { return proto.CompactTextString(m) }
+func (*BurndownSparseMatrixRow) ProtoMessage()               {}
+func (*BurndownSparseMatrixRow) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{1} }
+
+func (m *BurndownSparseMatrixRow) GetColumns() []uint32 {
+	if m != nil {
+		return m.Columns
+	}
+	return nil
+}
+
+type BurndownSparseMatrix struct {
+	Name            string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
+	NumberOfRows    int32  `protobuf:"varint,2,opt,name=number_of_rows,json=numberOfRows,proto3" json:"number_of_rows,omitempty"`
+	NumberOfColumns int32  `protobuf:"varint,3,opt,name=number_of_columns,json=numberOfColumns,proto3" json:"number_of_columns,omitempty"`
+	// `len(row)` matches `number_of_rows`
+	Rows []*BurndownSparseMatrixRow `protobuf:"bytes,4,rep,name=rows" json:"rows,omitempty"`
+}
+
+func (m *BurndownSparseMatrix) Reset()                    { *m = BurndownSparseMatrix{} }
+func (m *BurndownSparseMatrix) String() string            { return proto.CompactTextString(m) }
+func (*BurndownSparseMatrix) ProtoMessage()               {}
+func (*BurndownSparseMatrix) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{2} }
+
+func (m *BurndownSparseMatrix) GetName() string {
+	if m != nil {
+		return m.Name
+	}
+	return ""
+}
+
+func (m *BurndownSparseMatrix) GetNumberOfRows() int32 {
+	if m != nil {
+		return m.NumberOfRows
+	}
+	return 0
+}
+
+func (m *BurndownSparseMatrix) GetNumberOfColumns() int32 {
+	if m != nil {
+		return m.NumberOfColumns
+	}
+	return 0
+}
+
+func (m *BurndownSparseMatrix) GetRows() []*BurndownSparseMatrixRow {
+	if m != nil {
+		return m.Rows
+	}
+	return nil
+}
+
+type BurndownAnalysisResults struct {
+	// how many days are in each band [burndown_project, burndown_file, burndown_developer]
+	Granularity int32 `protobuf:"varint,1,opt,name=granularity,proto3" json:"granularity,omitempty"`
+	// how frequently we measure the state of each band [burndown_project, burndown_file, burndown_developer]
+	Sampling int32 `protobuf:"varint,2,opt,name=sampling,proto3" json:"sampling,omitempty"`
+	// always exists
+	Project *BurndownSparseMatrix `protobuf:"bytes,3,opt,name=project" json:"project,omitempty"`
+	// this is included if `-burndown-files` was specified
+	Files []*BurndownSparseMatrix `protobuf:"bytes,4,rep,name=files" json:"files,omitempty"`
+	// these two are included if `-burndown-people` was specified
+	People []*BurndownSparseMatrix `protobuf:"bytes,5,rep,name=people" json:"people,omitempty"`
+	// rows and cols order correspond to `burndown_developer`
+	PeopleInteraction *CompressedSparseRowMatrix `protobuf:"bytes,6,opt,name=people_interaction,json=peopleInteraction" json:"people_interaction,omitempty"`
+}
+
+func (m *BurndownAnalysisResults) Reset()                    { *m = BurndownAnalysisResults{} }
+func (m *BurndownAnalysisResults) String() string            { return proto.CompactTextString(m) }
+func (*BurndownAnalysisResults) ProtoMessage()               {}
+func (*BurndownAnalysisResults) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{3} }
+
+func (m *BurndownAnalysisResults) GetGranularity() int32 {
+	if m != nil {
+		return m.Granularity
+	}
+	return 0
+}
+
+func (m *BurndownAnalysisResults) GetSampling() int32 {
+	if m != nil {
+		return m.Sampling
+	}
+	return 0
+}
+
+func (m *BurndownAnalysisResults) GetProject() *BurndownSparseMatrix {
+	if m != nil {
+		return m.Project
+	}
+	return nil
+}
+
+func (m *BurndownAnalysisResults) GetFiles() []*BurndownSparseMatrix {
+	if m != nil {
+		return m.Files
+	}
+	return nil
+}
+
+func (m *BurndownAnalysisResults) GetPeople() []*BurndownSparseMatrix {
+	if m != nil {
+		return m.People
+	}
+	return nil
+}
+
+func (m *BurndownAnalysisResults) GetPeopleInteraction() *CompressedSparseRowMatrix {
+	if m != nil {
+		return m.PeopleInteraction
+	}
+	return nil
+}
+
+type CompressedSparseRowMatrix struct {
+	NumberOfRows    int32 `protobuf:"varint,1,opt,name=number_of_rows,json=numberOfRows,proto3" json:"number_of_rows,omitempty"`
+	NumberOfColumns int32 `protobuf:"varint,2,opt,name=number_of_columns,json=numberOfColumns,proto3" json:"number_of_columns,omitempty"`
+	// https://en.wikipedia.org/wiki/Sparse_matrix#Compressed_sparse_row_.28CSR.2C_CRS_or_Yale_format.29
+	Data    []int64 `protobuf:"varint,3,rep,packed,name=data" json:"data,omitempty"`
+	Indices []int32 `protobuf:"varint,4,rep,packed,name=indices" json:"indices,omitempty"`
+	Indptr  []int64 `protobuf:"varint,5,rep,packed,name=indptr" json:"indptr,omitempty"`
+}
+
+func (m *CompressedSparseRowMatrix) Reset()                    { *m = CompressedSparseRowMatrix{} }
+func (m *CompressedSparseRowMatrix) String() string            { return proto.CompactTextString(m) }
+func (*CompressedSparseRowMatrix) ProtoMessage()               {}
+func (*CompressedSparseRowMatrix) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{4} }
+
+func (m *CompressedSparseRowMatrix) GetNumberOfRows() int32 {
+	if m != nil {
+		return m.NumberOfRows
+	}
+	return 0
+}
+
+func (m *CompressedSparseRowMatrix) GetNumberOfColumns() int32 {
+	if m != nil {
+		return m.NumberOfColumns
+	}
+	return 0
+}
+
+func (m *CompressedSparseRowMatrix) GetData() []int64 {
+	if m != nil {
+		return m.Data
+	}
+	return nil
+}
+
+func (m *CompressedSparseRowMatrix) GetIndices() []int32 {
+	if m != nil {
+		return m.Indices
+	}
+	return nil
+}
+
+func (m *CompressedSparseRowMatrix) GetIndptr() []int64 {
+	if m != nil {
+		return m.Indptr
+	}
+	return nil
+}
+
+type Couples struct {
+	// name of each `matrix`'s row and column
+	Index []string `protobuf:"bytes,1,rep,name=index" json:"index,omitempty"`
+	// is always square
+	Matrix *CompressedSparseRowMatrix `protobuf:"bytes,2,opt,name=matrix" json:"matrix,omitempty"`
+}
+
+func (m *Couples) Reset()                    { *m = Couples{} }
+func (m *Couples) String() string            { return proto.CompactTextString(m) }
+func (*Couples) ProtoMessage()               {}
+func (*Couples) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{5} }
+
+func (m *Couples) GetIndex() []string {
+	if m != nil {
+		return m.Index
+	}
+	return nil
+}
+
+func (m *Couples) GetMatrix() *CompressedSparseRowMatrix {
+	if m != nil {
+		return m.Matrix
+	}
+	return nil
+}
+
+type TouchedFiles struct {
+	Files []int32 `protobuf:"varint,1,rep,packed,name=files" json:"files,omitempty"`
+}
+
+func (m *TouchedFiles) Reset()                    { *m = TouchedFiles{} }
+func (m *TouchedFiles) String() string            { return proto.CompactTextString(m) }
+func (*TouchedFiles) ProtoMessage()               {}
+func (*TouchedFiles) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{6} }
+
+func (m *TouchedFiles) GetFiles() []int32 {
+	if m != nil {
+		return m.Files
+	}
+	return nil
+}
+
+type CouplesAnalysisResults struct {
+	FileCouples   *Couples `protobuf:"bytes,6,opt,name=file_couples,json=fileCouples" json:"file_couples,omitempty"`
+	PeopleCouples *Couples `protobuf:"bytes,7,opt,name=people_couples,json=peopleCouples" json:"people_couples,omitempty"`
+	// order corresponds to `people_couples::index`
+	PeopleFiles []*TouchedFiles `protobuf:"bytes,8,rep,name=people_files,json=peopleFiles" json:"people_files,omitempty"`
+}
+
+func (m *CouplesAnalysisResults) Reset()                    { *m = CouplesAnalysisResults{} }
+func (m *CouplesAnalysisResults) String() string            { return proto.CompactTextString(m) }
+func (*CouplesAnalysisResults) ProtoMessage()               {}
+func (*CouplesAnalysisResults) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{7} }
+
+func (m *CouplesAnalysisResults) GetFileCouples() *Couples {
+	if m != nil {
+		return m.FileCouples
+	}
+	return nil
+}
+
+func (m *CouplesAnalysisResults) GetPeopleCouples() *Couples {
+	if m != nil {
+		return m.PeopleCouples
+	}
+	return nil
+}
+
+func (m *CouplesAnalysisResults) GetPeopleFiles() []*TouchedFiles {
+	if m != nil {
+		return m.PeopleFiles
+	}
+	return nil
+}
+
+type UASTChange struct {
+	FileName   string `protobuf:"bytes,1,opt,name=file_name,json=fileName,proto3" json:"file_name,omitempty"`
+	SrcBefore  string `protobuf:"bytes,2,opt,name=src_before,json=srcBefore,proto3" json:"src_before,omitempty"`
+	SrcAfter   string `protobuf:"bytes,3,opt,name=src_after,json=srcAfter,proto3" json:"src_after,omitempty"`
+	UastBefore string `protobuf:"bytes,4,opt,name=uast_before,json=uastBefore,proto3" json:"uast_before,omitempty"`
+	UastAfter  string `protobuf:"bytes,5,opt,name=uast_after,json=uastAfter,proto3" json:"uast_after,omitempty"`
+}
+
+func (m *UASTChange) Reset()                    { *m = UASTChange{} }
+func (m *UASTChange) String() string            { return proto.CompactTextString(m) }
+func (*UASTChange) ProtoMessage()               {}
+func (*UASTChange) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{8} }
+
+func (m *UASTChange) GetFileName() string {
+	if m != nil {
+		return m.FileName
+	}
+	return ""
+}
+
+func (m *UASTChange) GetSrcBefore() string {
+	if m != nil {
+		return m.SrcBefore
+	}
+	return ""
+}
+
+func (m *UASTChange) GetSrcAfter() string {
+	if m != nil {
+		return m.SrcAfter
+	}
+	return ""
+}
+
+func (m *UASTChange) GetUastBefore() string {
+	if m != nil {
+		return m.UastBefore
+	}
+	return ""
+}
+
+func (m *UASTChange) GetUastAfter() string {
+	if m != nil {
+		return m.UastAfter
+	}
+	return ""
+}
+
+type UASTChangesSaverResults struct {
+	Changes []*UASTChange `protobuf:"bytes,1,rep,name=changes" json:"changes,omitempty"`
+}
+
+func (m *UASTChangesSaverResults) Reset()                    { *m = UASTChangesSaverResults{} }
+func (m *UASTChangesSaverResults) String() string            { return proto.CompactTextString(m) }
+func (*UASTChangesSaverResults) ProtoMessage()               {}
+func (*UASTChangesSaverResults) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{9} }
+
+func (m *UASTChangesSaverResults) GetChanges() []*UASTChange {
+	if m != nil {
+		return m.Changes
+	}
+	return nil
+}
+
+type ShotnessRecord struct {
+	InternalRole string          `protobuf:"bytes,1,opt,name=internal_role,json=internalRole,proto3" json:"internal_role,omitempty"`
+	Roles        []int32         `protobuf:"varint,2,rep,packed,name=roles" json:"roles,omitempty"`
+	Name         string          `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"`
+	File         string          `protobuf:"bytes,4,opt,name=file,proto3" json:"file,omitempty"`
+	Counters     map[int32]int32 `protobuf:"bytes,5,rep,name=counters" json:"counters,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"`
+}
+
+func (m *ShotnessRecord) Reset()                    { *m = ShotnessRecord{} }
+func (m *ShotnessRecord) String() string            { return proto.CompactTextString(m) }
+func (*ShotnessRecord) ProtoMessage()               {}
+func (*ShotnessRecord) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{10} }
+
+func (m *ShotnessRecord) GetInternalRole() string {
+	if m != nil {
+		return m.InternalRole
+	}
+	return ""
+}
+
+func (m *ShotnessRecord) GetRoles() []int32 {
+	if m != nil {
+		return m.Roles
+	}
+	return nil
+}
+
+func (m *ShotnessRecord) GetName() string {
+	if m != nil {
+		return m.Name
+	}
+	return ""
+}
+
+func (m *ShotnessRecord) GetFile() string {
+	if m != nil {
+		return m.File
+	}
+	return ""
+}
+
+func (m *ShotnessRecord) GetCounters() map[int32]int32 {
+	if m != nil {
+		return m.Counters
+	}
+	return nil
+}
+
+type ShotnessAnalysisResults struct {
+	Records []*ShotnessRecord `protobuf:"bytes,1,rep,name=records" json:"records,omitempty"`
+}
+
+func (m *ShotnessAnalysisResults) Reset()                    { *m = ShotnessAnalysisResults{} }
+func (m *ShotnessAnalysisResults) String() string            { return proto.CompactTextString(m) }
+func (*ShotnessAnalysisResults) ProtoMessage()               {}
+func (*ShotnessAnalysisResults) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{11} }
+
+func (m *ShotnessAnalysisResults) GetRecords() []*ShotnessRecord {
+	if m != nil {
+		return m.Records
+	}
+	return nil
+}
+
+type FileHistory struct {
+	Commits []string `protobuf:"bytes,1,rep,name=commits" json:"commits,omitempty"`
+}
+
+func (m *FileHistory) Reset()                    { *m = FileHistory{} }
+func (m *FileHistory) String() string            { return proto.CompactTextString(m) }
+func (*FileHistory) ProtoMessage()               {}
+func (*FileHistory) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{12} }
+
+func (m *FileHistory) GetCommits() []string {
+	if m != nil {
+		return m.Commits
+	}
+	return nil
+}
+
+type FileHistoryResultMessage struct {
+	Files map[string]*FileHistory `protobuf:"bytes,1,rep,name=files" json:"files,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value"`
+}
+
+func (m *FileHistoryResultMessage) Reset()                    { *m = FileHistoryResultMessage{} }
+func (m *FileHistoryResultMessage) String() string            { return proto.CompactTextString(m) }
+func (*FileHistoryResultMessage) ProtoMessage()               {}
+func (*FileHistoryResultMessage) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{13} }
+
+func (m *FileHistoryResultMessage) GetFiles() map[string]*FileHistory {
+	if m != nil {
+		return m.Files
+	}
+	return nil
+}
+
+type Sentiment struct {
+	Value    float32  `protobuf:"fixed32,1,opt,name=value,proto3" json:"value,omitempty"`
+	Comments []string `protobuf:"bytes,2,rep,name=comments" json:"comments,omitempty"`
+	Commits  []string `protobuf:"bytes,3,rep,name=commits" json:"commits,omitempty"`
+}
+
+func (m *Sentiment) Reset()                    { *m = Sentiment{} }
+func (m *Sentiment) String() string            { return proto.CompactTextString(m) }
+func (*Sentiment) ProtoMessage()               {}
+func (*Sentiment) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{14} }
+
+func (m *Sentiment) GetValue() float32 {
+	if m != nil {
+		return m.Value
+	}
+	return 0
+}
+
+func (m *Sentiment) GetComments() []string {
+	if m != nil {
+		return m.Comments
+	}
+	return nil
+}
+
+func (m *Sentiment) GetCommits() []string {
+	if m != nil {
+		return m.Commits
+	}
+	return nil
+}
+
+type CommentSentimentResults struct {
+	SentimentByDay map[int32]*Sentiment `protobuf:"bytes,1,rep,name=sentiment_by_day,json=sentimentByDay" json:"sentiment_by_day,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value"`
+}
+
+func (m *CommentSentimentResults) Reset()                    { *m = CommentSentimentResults{} }
+func (m *CommentSentimentResults) String() string            { return proto.CompactTextString(m) }
+func (*CommentSentimentResults) ProtoMessage()               {}
+func (*CommentSentimentResults) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{15} }
+
+func (m *CommentSentimentResults) GetSentimentByDay() map[int32]*Sentiment {
+	if m != nil {
+		return m.SentimentByDay
+	}
+	return nil
+}
+
+type AnalysisResults struct {
+	Header *Metadata `protobuf:"bytes,1,opt,name=header" json:"header,omitempty"`
+	// the mapped values are dynamic messages which require the second parsing pass.
+	Contents map[string][]byte `protobuf:"bytes,2,rep,name=contents" json:"contents,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
+}
+
+func (m *AnalysisResults) Reset()                    { *m = AnalysisResults{} }
+func (m *AnalysisResults) String() string            { return proto.CompactTextString(m) }
+func (*AnalysisResults) ProtoMessage()               {}
+func (*AnalysisResults) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{16} }
+
+func (m *AnalysisResults) GetHeader() *Metadata {
+	if m != nil {
+		return m.Header
+	}
+	return nil
+}
+
+func (m *AnalysisResults) GetContents() map[string][]byte {
+	if m != nil {
+		return m.Contents
+	}
+	return nil
+}
+
+func init() {
+	proto.RegisterType((*Metadata)(nil), "Metadata")
+	proto.RegisterType((*BurndownSparseMatrixRow)(nil), "BurndownSparseMatrixRow")
+	proto.RegisterType((*BurndownSparseMatrix)(nil), "BurndownSparseMatrix")
+	proto.RegisterType((*BurndownAnalysisResults)(nil), "BurndownAnalysisResults")
+	proto.RegisterType((*CompressedSparseRowMatrix)(nil), "CompressedSparseRowMatrix")
+	proto.RegisterType((*Couples)(nil), "Couples")
+	proto.RegisterType((*TouchedFiles)(nil), "TouchedFiles")
+	proto.RegisterType((*CouplesAnalysisResults)(nil), "CouplesAnalysisResults")
+	proto.RegisterType((*UASTChange)(nil), "UASTChange")
+	proto.RegisterType((*UASTChangesSaverResults)(nil), "UASTChangesSaverResults")
+	proto.RegisterType((*ShotnessRecord)(nil), "ShotnessRecord")
+	proto.RegisterType((*ShotnessAnalysisResults)(nil), "ShotnessAnalysisResults")
+	proto.RegisterType((*FileHistory)(nil), "FileHistory")
+	proto.RegisterType((*FileHistoryResultMessage)(nil), "FileHistoryResultMessage")
+	proto.RegisterType((*Sentiment)(nil), "Sentiment")
+	proto.RegisterType((*CommentSentimentResults)(nil), "CommentSentimentResults")
+	proto.RegisterType((*AnalysisResults)(nil), "AnalysisResults")
+}
+
+func init() { proto.RegisterFile("pb.proto", fileDescriptorPb) }
+
+var fileDescriptorPb = []byte{
+	// 1053 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x56, 0xdf, 0x6e, 0x1b, 0xc5,
+	0x17, 0xd6, 0x66, 0xfd, 0xf7, 0xac, 0x9d, 0xb4, 0xf3, 0xeb, 0xaf, 0xd9, 0x06, 0xb5, 0x98, 0x25,
+	0x80, 0xa1, 0x65, 0x8b, 0xdc, 0x1b, 0x08, 0x37, 0x24, 0x2e, 0x15, 0xbd, 0x08, 0x48, 0xe3, 0x14,
+	0x2e, 0xad, 0xf1, 0xee, 0x24, 0x5e, 0x58, 0xcf, 0xac, 0x66, 0x76, 0x93, 0xf8, 0x65, 0xb8, 0x43,
+	0x42, 0x48, 0x88, 0x0b, 0x5e, 0x80, 0xd7, 0xe0, 0x19, 0x78, 0x09, 0x34, 0xff, 0xec, 0xb5, 0xe5,
+	0x54, 0xdc, 0xcd, 0x39, 0xe7, 0xfb, 0xce, 0x9c, 0xf9, 0xce, 0x99, 0xd9, 0x85, 0x4e, 0x31, 0x8b,
+	0x0b, 0xc1, 0x4b, 0x1e, 0xfd, 0xed, 0x41, 0xe7, 0x9c, 0x96, 0x24, 0x25, 0x25, 0x41, 0x21, 0xb4,
+	0xaf, 0xa9, 0x90, 0x19, 0x67, 0xa1, 0x37, 0xf0, 0x86, 0x4d, 0xec, 0x4c, 0x84, 0xa0, 0x31, 0x27,
+	0x72, 0x1e, 0xee, 0x0d, 0xbc, 0x61, 0x17, 0xeb, 0x35, 0x7a, 0x02, 0x20, 0x68, 0xc1, 0x65, 0x56,
+	0x72, 0xb1, 0x0c, 0x7d, 0x1d, 0xa9, 0x79, 0xd0, 0x87, 0x70, 0x30, 0xa3, 0x57, 0x19, 0x9b, 0x56,
+	0x2c, 0xbb, 0x9d, 0x96, 0xd9, 0x82, 0x86, 0x8d, 0x81, 0x37, 0xf4, 0x71, 0x5f, 0xbb, 0xdf, 0xb0,
+	0xec, 0xf6, 0x22, 0x5b, 0x50, 0x14, 0x41, 0x9f, 0xb2, 0xb4, 0x86, 0x6a, 0x6a, 0x54, 0x40, 0x59,
+	0xba, 0xc2, 0x84, 0xd0, 0x4e, 0xf8, 0x62, 0x91, 0x95, 0x32, 0x6c, 0x99, 0xca, 0xac, 0x89, 0x1e,
+	0x41, 0x47, 0x54, 0xcc, 0x10, 0xdb, 0x9a, 0xd8, 0x16, 0x15, 0x53, 0xa4, 0xe8, 0x05, 0x1c, 0x9e,
+	0x55, 0x82, 0xa5, 0xfc, 0x86, 0x4d, 0x0a, 0x22, 0x24, 0x3d, 0x27, 0xa5, 0xc8, 0x6e, 0x31, 0xbf,
+	0x31, 0xf9, 0xf2, 0x6a, 0xc1, 0x64, 0xe8, 0x0d, 0xfc, 0x61, 0x1f, 0x3b, 0x33, 0xfa, 0xcd, 0x83,
+	0x07, 0xbb, 0x58, 0x4a, 0x02, 0x46, 0x16, 0x54, 0x2b, 0xd3, 0xc5, 0x7a, 0x8d, 0x8e, 0x61, 0x9f,
+	0x55, 0x8b, 0x19, 0x15, 0x53, 0x7e, 0x39, 0x15, 0xfc, 0x46, 0x6a, 0x81, 0x9a, 0xb8, 0x67, 0xbc,
+	0xdf, 0x5d, 0x62, 0x7e, 0x23, 0xd1, 0x27, 0x70, 0x7f, 0x8d, 0x72, 0xdb, 0xfa, 0x1a, 0x78, 0xe0,
+	0x80, 0x63, 0xe3, 0x46, 0xcf, 0xa0, 0xa1, 0xf3, 0x34, 0x06, 0xfe, 0x30, 0x18, 0x85, 0xf1, 0x1d,
+	0x07, 0xc0, 0x1a, 0x15, 0xfd, 0xb1, 0xb7, 0x3e, 0xe2, 0x29, 0x23, 0xf9, 0x52, 0x66, 0x12, 0x53,
+	0x59, 0xe5, 0xa5, 0x44, 0x03, 0x08, 0xae, 0x04, 0x61, 0x55, 0x4e, 0x44, 0x56, 0x2e, 0x6d, 0x43,
+	0xeb, 0x2e, 0x74, 0x04, 0x1d, 0x49, 0x16, 0x45, 0x9e, 0xb1, 0x2b, 0x5b, 0xf7, 0xca, 0x46, 0xcf,
+	0xa1, 0x5d, 0x08, 0xfe, 0x23, 0x4d, 0x4a, 0x5d, 0x69, 0x30, 0xfa, 0xff, 0xee, 0x52, 0x1c, 0x0a,
+	0x3d, 0x85, 0xe6, 0x65, 0x96, 0x53, 0x57, 0xf9, 0x1d, 0x70, 0x83, 0x41, 0x9f, 0x42, 0xab, 0xa0,
+	0xbc, 0xc8, 0x55, 0xaf, 0xdf, 0x82, 0xb6, 0x20, 0xf4, 0x1a, 0x90, 0x59, 0x4d, 0x33, 0x56, 0x52,
+	0x41, 0x92, 0x52, 0x8d, 0x68, 0x4b, 0xd7, 0x75, 0x14, 0x8f, 0xf9, 0xa2, 0x10, 0x54, 0x4a, 0x9a,
+	0x1a, 0x32, 0xe6, 0x37, 0x96, 0x7f, 0xdf, 0xb0, 0x5e, 0xaf, 0x49, 0xd1, 0x9f, 0x1e, 0x3c, 0xba,
+	0x93, 0xb0, 0xa3, 0x9f, 0xde, 0x7f, 0xed, 0xe7, 0xde, 0xee, 0x7e, 0x22, 0x68, 0xa8, 0xab, 0x15,
+	0xfa, 0x03, 0x7f, 0xe8, 0xe3, 0x86, 0xbb, 0x66, 0x19, 0x4b, 0xb3, 0xc4, 0x8a, 0xd5, 0xc4, 0xce,
+	0x44, 0x0f, 0xa1, 0x95, 0xb1, 0xb4, 0x28, 0x85, 0xd6, 0xc5, 0xc7, 0xd6, 0x8a, 0x26, 0xd0, 0x1e,
+	0xf3, 0xaa, 0x50, 0xd2, 0x3d, 0x80, 0x66, 0xc6, 0x52, 0x7a, 0xab, 0xe7, 0xb6, 0x8b, 0x8d, 0x81,
+	0x46, 0xd0, 0x5a, 0xe8, 0x23, 0xe8, 0x3a, 0xde, 0xae, 0x8a, 0x45, 0x46, 0xc7, 0xd0, 0xbb, 0xe0,
+	0x55, 0x32, 0xa7, 0xe9, 0xab, 0xcc, 0x66, 0x36, 0x1d, 0xf4, 0x74, 0x51, 0xc6, 0x88, 0x7e, 0xf5,
+	0xe0, 0xa1, 0xdd, 0x7b, 0x7b, 0xc2, 0x9e, 0x42, 0x4f, 0x61, 0xa6, 0x89, 0x09, 0xdb, 0x86, 0x74,
+	0x62, 0x0b, 0xc7, 0x81, 0x8a, 0xba, 0xba, 0x9f, 0xc3, 0xbe, 0xed, 0xa1, 0x83, 0xb7, 0xb7, 0xe0,
+	0x7d, 0x13, 0x77, 0x84, 0xcf, 0xa0, 0x67, 0x09, 0xa6, 0xaa, 0x8e, 0x9e, 0x94, 0x7e, 0x5c, 0xaf,
+	0x19, 0x07, 0x06, 0xa2, 0x8d, 0xe8, 0x17, 0x0f, 0xe0, 0xcd, 0xe9, 0xe4, 0x62, 0x3c, 0x27, 0xec,
+	0x8a, 0xa2, 0x77, 0xa0, 0xab, 0xcb, 0xab, 0xdd, 0xda, 0x8e, 0x72, 0x7c, 0xab, 0x6e, 0xee, 0x63,
+	0x00, 0x29, 0x92, 0xe9, 0x8c, 0x5e, 0x72, 0x41, 0xed, 0xb3, 0xd6, 0x95, 0x22, 0x39, 0xd3, 0x0e,
+	0xc5, 0x55, 0x61, 0x72, 0x59, 0x52, 0x61, 0x9f, 0xb6, 0x8e, 0x14, 0xc9, 0xa9, 0xb2, 0xd1, 0xbb,
+	0x10, 0x54, 0x44, 0x96, 0x8e, 0xdc, 0x30, 0x2f, 0x9f, 0x72, 0x59, 0xf6, 0x63, 0xd0, 0x96, 0xa5,
+	0x37, 0x4d, 0x72, 0xe5, 0xd1, 0xfc, 0xe8, 0x2b, 0x38, 0x5c, 0x97, 0x29, 0x27, 0xe4, 0x9a, 0x0a,
+	0x27, 0xe9, 0x07, 0xd0, 0x4e, 0x8c, 0x5b, 0x77, 0x21, 0x18, 0x05, 0xf1, 0x1a, 0x8a, 0x5d, 0x2c,
+	0xfa, 0xc7, 0x83, 0xfd, 0xc9, 0x9c, 0x97, 0x8c, 0x4a, 0x89, 0x69, 0xc2, 0x45, 0x8a, 0xde, 0x87,
+	0xbe, 0xbe, 0x1c, 0x8c, 0xe4, 0x53, 0xc1, 0x73, 0x77, 0xe2, 0x9e, 0x73, 0x62, 0x9e, 0x53, 0xd5,
+	0x62, 0x15, 0x53, 0xd3, 0xaa, 0x5b, 0xac, 0x8d, 0xd5, 0xcb, 0xe6, 0xd7, 0x5e, 0x36, 0x04, 0x0d,
+	0xa5, 0x95, 0x3d, 0x9c, 0x5e, 0xa3, 0x2f, 0xa0, 0x93, 0xf0, 0x4a, 0xe5, 0x93, 0xf6, 0xde, 0x3e,
+	0x8e, 0x37, 0xab, 0x50, 0xbd, 0xd4, 0xf1, 0xaf, 0x59, 0x29, 0x96, 0x78, 0x05, 0x3f, 0xfa, 0x12,
+	0xfa, 0x1b, 0x21, 0x74, 0x0f, 0xfc, 0x9f, 0xa8, 0x7b, 0x95, 0xd4, 0x52, 0xd5, 0x76, 0x4d, 0xf2,
+	0x8a, 0xda, 0x9b, 0x64, 0x8c, 0x93, 0xbd, 0xcf, 0xbd, 0xe8, 0x25, 0x1c, 0xba, 0x6d, 0xb6, 0x47,
+	0xf0, 0x63, 0x68, 0x0b, 0xbd, 0xb3, 0xd3, 0xeb, 0x60, 0xab, 0x22, 0xec, 0xe2, 0xd1, 0x47, 0x10,
+	0xa8, 0x31, 0xf9, 0x26, 0x93, 0xfa, 0xeb, 0x54, 0xfb, 0xa2, 0x98, 0x9b, 0xe4, 0xcc, 0xe8, 0x67,
+	0x0f, 0xc2, 0x1a, 0xd2, 0x6c, 0x75, 0x4e, 0xa5, 0x24, 0x57, 0x14, 0x9d, 0xd4, 0x2f, 0x49, 0x30,
+	0x3a, 0x8e, 0xef, 0x42, 0xea, 0x80, 0xd5, 0xc1, 0x50, 0x8e, 0x5e, 0x01, 0xac, 0x9d, 0x75, 0x05,
+	0xba, 0x46, 0x81, 0xa8, 0xae, 0x40, 0x30, 0xea, 0x6d, 0xe4, 0xae, 0xe9, 0xf1, 0x03, 0x74, 0x27,
+	0x94, 0xa9, 0x2f, 0x1e, 0x2b, 0xd7, 0xb2, 0xa9, 0x44, 0x7b, 0x16, 0xa6, 0x9e, 0x76, 0x75, 0x1c,
+	0xca, 0x4a, 0xd3, 0xeb, 0x2e, 0x5e, 0xd9, 0xf5, 0x93, 0xfb, 0x9b, 0x27, 0xff, 0xcb, 0x83, 0xc3,
+	0xb1, 0x81, 0xad, 0x36, 0x70, 0x4a, 0x7f, 0x0f, 0xf7, 0xa4, 0xf3, 0x4d, 0x67, 0xcb, 0x69, 0x4a,
+	0x96, 0x56, 0x83, 0x67, 0xf1, 0x1d, 0x9c, 0x78, 0xe5, 0x38, 0x5b, 0xbe, 0x24, 0x4b, 0xa3, 0xc5,
+	0xbe, 0xdc, 0x70, 0x1e, 0x9d, 0xc3, 0xff, 0x76, 0xc0, 0x76, 0xcc, 0xc7, 0x60, 0x53, 0x1d, 0x58,
+	0x67, 0xaf, 0x6b, 0xf3, 0xbb, 0x07, 0x07, 0xdb, 0x43, 0xf2, 0x1e, 0xb4, 0xe6, 0x94, 0xa4, 0x54,
+	0xe8, 0x74, 0xc1, 0xa8, 0x1b, 0xbb, 0x3f, 0x1e, 0x6c, 0x03, 0xe8, 0x44, 0xe9, 0xc5, 0xca, 0x95,
+	0x5e, 0xc1, 0xe8, 0x49, 0xbc, 0x95, 0x26, 0x1e, 0x5b, 0xc0, 0x6a, 0xb6, 0x8d, 0x69, 0x66, 0xbb,
+	0x16, 0xda, 0xd1, 0xd9, 0x8d, 0xd9, 0xee, 0xd5, 0xea, 0x9d, 0xb5, 0xf4, 0x6f, 0xd8, 0x8b, 0x7f,
+	0x03, 0x00, 0x00, 0xff, 0xff, 0x7e, 0x55, 0x2d, 0x51, 0x92, 0x09, 0x00, 0x00,
+}

pb/pb.proto → internal/pb/pb.proto


File diff suppressed because it is too large
+ 1127 - 0
internal/pb/pb_pb2.py


pb/utils.go → internal/pb/utils.go


+ 11 - 9
blob_cache.go

@@ -1,4 +1,4 @@
-package hercules
+package plumbing
 
 import (
 	"log"
@@ -8,6 +8,8 @@ import (
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
 	"gopkg.in/src-d/go-git.v4/utils/merkletrie"
+	"gopkg.in/src-d/hercules.v4/internal"
+	"gopkg.in/src-d/hercules.v4/internal/core"
 )
 
 // BlobCache loads the blobs which correspond to the changed files in a commit.
@@ -39,7 +41,7 @@ func (blobCache *BlobCache) Name() string {
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
+// to this list. Also used by core.Registry to build the global map of providers.
 func (blobCache *BlobCache) Provides() []string {
 	arr := [...]string{DependencyBlobCache}
 	return arr[:]
@@ -54,14 +56,14 @@ func (blobCache *BlobCache) Requires() []string {
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (blobCache *BlobCache) ListConfigurationOptions() []ConfigurationOption {
-	options := [...]ConfigurationOption{{
+func (blobCache *BlobCache) ListConfigurationOptions() []core.ConfigurationOption {
+	options := [...]core.ConfigurationOption{{
 		Name: ConfigBlobCacheIgnoreMissingSubmodules,
 		Description: "Specifies whether to panic if some referenced submodules do not exist and thus" +
 			" the corresponding Git objects cannot be loaded. Override this if you know that the " +
 			"history is dirty and you want to get things done.",
 		Flag:    "ignore-missing-submodules",
-		Type:    BoolConfigurationOption,
+		Type:    core.BoolConfigurationOption,
 		Default: false}}
 	return options[:]
 }
@@ -116,7 +118,7 @@ func (blobCache *BlobCache) Consume(deps map[string]interface{}) (map[string]int
 						log.Printf("file from %s %s\n", change.From.Name,
 							change.From.TreeEntry.Hash)
 					} else {
-						cache[change.From.TreeEntry.Hash], err = createDummyBlob(
+						cache[change.From.TreeEntry.Hash], err = internal.CreateDummyBlob(
 							change.From.TreeEntry.Hash)
 					}
 				}
@@ -163,7 +165,7 @@ func (blobCache *BlobCache) getBlob(entry *object.ChangeEntry, fileGetter FileGe
 			// this is not a submodule
 			return nil, err
 		} else if blobCache.IgnoreMissingSubmodules {
-			return createDummyBlob(entry.TreeEntry.Hash)
+			return internal.CreateDummyBlob(entry.TreeEntry.Hash)
 		}
 		file, errModules := fileGetter(".gitmodules")
 		if errModules != nil {
@@ -181,7 +183,7 @@ func (blobCache *BlobCache) getBlob(entry *object.ChangeEntry, fileGetter FileGe
 		_, exists := modules.Submodules[entry.Name]
 		if exists {
 			// we found that this is a submodule
-			return createDummyBlob(entry.TreeEntry.Hash)
+			return internal.CreateDummyBlob(entry.TreeEntry.Hash)
 		}
 		return nil, err
 	}
@@ -189,5 +191,5 @@ func (blobCache *BlobCache) getBlob(entry *object.ChangeEntry, fileGetter FileGe
 }
 
 func init() {
-	Registry.Register(&BlobCache{})
+	core.Registry.Register(&BlobCache{})
 }

+ 39 - 39
blob_cache_test.go

@@ -1,25 +1,25 @@
-package hercules
+package plumbing
 
 import (
 	"testing"
 
 	"github.com/stretchr/testify/assert"
-	"gopkg.in/src-d/go-git.v4"
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
+	"gopkg.in/src-d/hercules.v4/internal"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/test"
 )
 
-var testRepository *git.Repository
-
 func fixtureBlobCache() *BlobCache {
 	cache := &BlobCache{}
-	cache.Initialize(testRepository)
+	cache.Initialize(test.Repository)
 	return cache
 }
 
 func TestBlobCacheConfigureInitialize(t *testing.T) {
 	cache := fixtureBlobCache()
-	assert.Equal(t, testRepository, cache.repository)
+	assert.Equal(t, test.Repository, cache.repository)
 	assert.False(t, cache.IgnoreMissingSubmodules)
 	facts := map[string]interface{}{}
 	facts[ConfigBlobCacheIgnoreMissingSubmodules] = true
@@ -44,22 +44,21 @@ func TestBlobCacheMetadata(t *testing.T) {
 }
 
 func TestBlobCacheRegistration(t *testing.T) {
-	tp, exists := Registry.registered[(&BlobCache{}).Name()]
-	assert.True(t, exists)
-	assert.Equal(t, tp.Elem().Name(), "BlobCache")
-	tps, exists := Registry.provided[(&BlobCache{}).Provides()[0]]
-	assert.True(t, exists)
-	assert.Len(t, tps, 1)
-	assert.Equal(t, tps[0].Elem().Name(), "BlobCache")
+	summoned := core.Registry.Summon((&BlobCache{}).Name())
+	assert.Len(t, summoned, 1)
+	assert.Equal(t, summoned[0].Name(), "BlobCache")
+	summoned = core.Registry.Summon((&BlobCache{}).Provides()[0])
+	assert.Len(t, summoned, 1)
+	assert.Equal(t, summoned[0].Name(), "BlobCache")
 }
 
 func TestBlobCacheConsumeModification(t *testing.T) {
-	commit, _ := testRepository.CommitObject(plumbing.NewHash(
+	commit, _ := test.Repository.CommitObject(plumbing.NewHash(
 		"af2d8db70f287b52d2428d9887a69a10bc4d1f46"))
 	changes := make(object.Changes, 1)
-	treeFrom, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeFrom, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"80fe25955b8e725feee25c08ea5759d74f8b670d"))
-	treeTo, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeTo, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"63076fa0dfd93e94b6d2ef0fc8b1fdf9092f83c4"))
 	changes[0] = &object.Change{From: object.ChangeEntry{
 		Name: "labours.py",
@@ -97,12 +96,12 @@ func TestBlobCacheConsumeModification(t *testing.T) {
 }
 
 func TestBlobCacheConsumeInsertionDeletion(t *testing.T) {
-	commit, _ := testRepository.CommitObject(plumbing.NewHash(
+	commit, _ := test.Repository.CommitObject(plumbing.NewHash(
 		"2b1ed978194a94edeabbca6de7ff3b5771d4d665"))
 	changes := make(object.Changes, 2)
-	treeFrom, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeFrom, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"96c6ece9b2f3c7c51b83516400d278dea5605100"))
-	treeTo, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeTo, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"251f2094d7b523d5bcc60e663b6cf38151bf8844"))
 	changes[0] = &object.Change{From: object.ChangeEntry{
 		Name: "analyser.go",
@@ -143,12 +142,12 @@ func TestBlobCacheConsumeInsertionDeletion(t *testing.T) {
 }
 
 func TestBlobCacheConsumeNoAction(t *testing.T) {
-	commit, _ := testRepository.CommitObject(plumbing.NewHash(
+	commit, _ := test.Repository.CommitObject(plumbing.NewHash(
 		"af2d8db70f287b52d2428d9887a69a10bc4d1f46"))
 	changes := make(object.Changes, 1)
-	treeFrom, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeFrom, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"80fe25955b8e725feee25c08ea5759d74f8b670d"))
-	treeTo, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeTo, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"63076fa0dfd93e94b6d2ef0fc8b1fdf9092f83c4"))
 	changes[0] = &object.Change{From: object.ChangeEntry{}, To: object.ChangeEntry{}}
 	deps := map[string]interface{}{}
@@ -172,12 +171,12 @@ func TestBlobCacheConsumeNoAction(t *testing.T) {
 }
 
 func TestBlobCacheConsumeBadHashes(t *testing.T) {
-	commit, _ := testRepository.CommitObject(plumbing.NewHash(
+	commit, _ := test.Repository.CommitObject(plumbing.NewHash(
 		"af2d8db70f287b52d2428d9887a69a10bc4d1f46"))
 	changes := make(object.Changes, 1)
-	treeFrom, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeFrom, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"80fe25955b8e725feee25c08ea5759d74f8b670d"))
-	treeTo, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeTo, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"63076fa0dfd93e94b6d2ef0fc8b1fdf9092f83c4"))
 	changes[0] = &object.Change{From: object.ChangeEntry{
 		Name:      "labours.py",
@@ -215,12 +214,12 @@ func TestBlobCacheConsumeBadHashes(t *testing.T) {
 }
 
 func TestBlobCacheConsumeInvalidHash(t *testing.T) {
-	commit, _ := testRepository.CommitObject(plumbing.NewHash(
+	commit, _ := test.Repository.CommitObject(plumbing.NewHash(
 		"af2d8db70f287b52d2428d9887a69a10bc4d1f46"))
 	changes := make(object.Changes, 1)
-	treeFrom, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeFrom, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"80fe25955b8e725feee25c08ea5759d74f8b670d"))
-	treeTo, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeTo, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"63076fa0dfd93e94b6d2ef0fc8b1fdf9092f83c4"))
 	changes[0] = &object.Change{From: object.ChangeEntry{
 		Name: "labours.py",
@@ -245,7 +244,7 @@ func TestBlobCacheConsumeInvalidHash(t *testing.T) {
 
 func TestBlobCacheGetBlob(t *testing.T) {
 	cache := fixtureBlobCache()
-	treeFrom, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeFrom, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"80fe25955b8e725feee25c08ea5759d74f8b670d"))
 	entry := object.ChangeEntry{
 		Name: "labours.py",
@@ -258,7 +257,7 @@ func TestBlobCacheGetBlob(t *testing.T) {
 	}
 	getter := func(path string) (*object.File, error) {
 		assert.Equal(t, path, ".gitmodules")
-		commit, _ := testRepository.CommitObject(plumbing.NewHash(
+		commit, _ := test.Repository.CommitObject(plumbing.NewHash(
 			"13272b66c55e1ba1237a34104f30b84d7f6e4082"))
 		return commit.File("test_data/gitmodules")
 	}
@@ -268,7 +267,7 @@ func TestBlobCacheGetBlob(t *testing.T) {
 	assert.Equal(t, err.Error(), plumbing.ErrObjectNotFound.Error())
 	getter = func(path string) (*object.File, error) {
 		assert.Equal(t, path, ".gitmodules")
-		commit, _ := testRepository.CommitObject(plumbing.NewHash(
+		commit, _ := test.Repository.CommitObject(plumbing.NewHash(
 			"13272b66c55e1ba1237a34104f30b84d7f6e4082"))
 		return commit.File("test_data/gitmodules_empty")
 	}
@@ -279,10 +278,10 @@ func TestBlobCacheGetBlob(t *testing.T) {
 }
 
 func TestBlobCacheDeleteInvalidBlob(t *testing.T) {
-	commit, _ := testRepository.CommitObject(plumbing.NewHash(
+	commit, _ := test.Repository.CommitObject(plumbing.NewHash(
 		"2b1ed978194a94edeabbca6de7ff3b5771d4d665"))
 	changes := make(object.Changes, 1)
-	treeFrom, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeFrom, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"96c6ece9b2f3c7c51b83516400d278dea5605100"))
 	changes[0] = &object.Change{From: object.ChangeEntry{
 		Name: "analyser.go",
@@ -310,10 +309,10 @@ func TestBlobCacheDeleteInvalidBlob(t *testing.T) {
 }
 
 func TestBlobCacheInsertInvalidBlob(t *testing.T) {
-	commit, _ := testRepository.CommitObject(plumbing.NewHash(
+	commit, _ := test.Repository.CommitObject(plumbing.NewHash(
 		"2b1ed978194a94edeabbca6de7ff3b5771d4d665"))
 	changes := make(object.Changes, 1)
-	treeTo, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeTo, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"251f2094d7b523d5bcc60e663b6cf38151bf8844"))
 	changes[0] = &object.Change{From: object.ChangeEntry{}, To: object.ChangeEntry{
 		Name: "pipeline.go",
@@ -336,7 +335,7 @@ func TestBlobCacheInsertInvalidBlob(t *testing.T) {
 func TestBlobCacheGetBlobIgnoreMissing(t *testing.T) {
 	cache := fixtureBlobCache()
 	cache.IgnoreMissingSubmodules = true
-	treeFrom, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeFrom, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"80fe25955b8e725feee25c08ea5759d74f8b670d"))
 	entry := object.ChangeEntry{
 		Name: "commit",
@@ -357,7 +356,7 @@ func TestBlobCacheGetBlobIgnoreMissing(t *testing.T) {
 	cache.IgnoreMissingSubmodules = false
 	getter = func(path string) (*object.File, error) {
 		assert.Equal(t, path, ".gitmodules")
-		commit, _ := testRepository.CommitObject(plumbing.NewHash(
+		commit, _ := test.Repository.CommitObject(plumbing.NewHash(
 			"13272b66c55e1ba1237a34104f30b84d7f6e4082"))
 		return commit.File("test_data/gitmodules")
 	}
@@ -385,7 +384,8 @@ func TestBlobCacheGetBlobGitModulesErrors(t *testing.T) {
 	assert.NotNil(t, err)
 	assert.Equal(t, err.Error(), plumbing.ErrInvalidType.Error())
 	getter = func(path string) (*object.File, error) {
-		blob, _ := createDummyBlob(plumbing.NewHash("ffffffffffffffffffffffffffffffffffffffff"), true)
+		blob, _ := internal.CreateDummyBlob(
+			plumbing.NewHash("ffffffffffffffffffffffffffffffffffffffff"), true)
 		return &object.File{Name: "fake", Blob: *blob}, nil
 	}
 	blob, err = cache.getBlob(&entry, getter)
@@ -393,7 +393,7 @@ func TestBlobCacheGetBlobGitModulesErrors(t *testing.T) {
 	assert.NotNil(t, err)
 	assert.Equal(t, err.Error(), "dummy failure")
 	getter = func(path string) (*object.File, error) {
-		blob, _ := testRepository.BlobObject(plumbing.NewHash(
+		blob, _ := test.Repository.BlobObject(plumbing.NewHash(
 			"4434197c2b0509d990f09d53a3cabb910bfd34b7"))
 		return &object.File{Name: ".gitmodules", Blob: *blob}, nil
 	}

+ 6 - 5
day.go

@@ -1,4 +1,4 @@
-package hercules
+package plumbing
 
 import (
 	"time"
@@ -6,6 +6,7 @@ import (
 	"gopkg.in/src-d/go-git.v4"
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
+	"gopkg.in/src-d/hercules.v4/internal/core"
 )
 
 // DaysSinceStart provides the relative date information for every commit.
@@ -32,7 +33,7 @@ func (days *DaysSinceStart) Name() string {
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
+// to this list. Also used by core.Registry to build the global map of providers.
 func (days *DaysSinceStart) Provides() []string {
 	arr := [...]string{DependencyDay}
 	return arr[:]
@@ -46,8 +47,8 @@ func (days *DaysSinceStart) Requires() []string {
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (days *DaysSinceStart) ListConfigurationOptions() []ConfigurationOption {
-	return []ConfigurationOption{}
+func (days *DaysSinceStart) ListConfigurationOptions() []core.ConfigurationOption {
+	return []core.ConfigurationOption{}
 }
 
 // Configure sets the properties previously published by ListConfigurationOptions().
@@ -103,5 +104,5 @@ func (days *DaysSinceStart) Consume(deps map[string]interface{}) (map[string]int
 }
 
 func init() {
-	Registry.Register(&DaysSinceStart{})
+	core.Registry.Register(&DaysSinceStart{})
 }

+ 16 - 15
day_test.go

@@ -1,16 +1,18 @@
-package hercules
+package plumbing
 
 import (
 	"testing"
 
 	"github.com/stretchr/testify/assert"
 	"gopkg.in/src-d/go-git.v4/plumbing"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/test"
 )
 
 func fixtureDaysSinceStart() *DaysSinceStart {
 	dss := DaysSinceStart{}
 	dss.Configure(map[string]interface{}{})
-	dss.Initialize(testRepository)
+	dss.Initialize(test.Repository)
 	return &dss
 }
 
@@ -25,19 +27,18 @@ func TestDaysSinceStartMeta(t *testing.T) {
 }
 
 func TestDaysSinceStartRegistration(t *testing.T) {
-	tp, exists := Registry.registered[(&DaysSinceStart{}).Name()]
-	assert.True(t, exists)
-	assert.Equal(t, tp.Elem().Name(), "DaysSinceStart")
-	tps, exists := Registry.provided[(&DaysSinceStart{}).Provides()[0]]
-	assert.True(t, exists)
-	assert.Len(t, tps, 1)
-	assert.Equal(t, tps[0].Elem().Name(), "DaysSinceStart")
+	summoned := core.Registry.Summon((&DaysSinceStart{}).Name())
+	assert.Len(t, summoned, 1)
+	assert.Equal(t, summoned[0].Name(), "DaysSinceStart")
+	summoned = core.Registry.Summon((&DaysSinceStart{}).Provides()[0])
+	assert.Len(t, summoned, 1)
+	assert.Equal(t, summoned[0].Name(), "DaysSinceStart")
 }
 
 func TestDaysSinceStartConsume(t *testing.T) {
 	dss := fixtureDaysSinceStart()
 	deps := map[string]interface{}{}
-	commit, _ := testRepository.CommitObject(plumbing.NewHash(
+	commit, _ := test.Repository.CommitObject(plumbing.NewHash(
 		"cce947b98a050c6d356bc6ba95030254914027b1"))
 	deps["commit"] = commit
 	deps["index"] = 0
@@ -49,7 +50,7 @@ func TestDaysSinceStartConsume(t *testing.T) {
 	assert.Equal(t, dss.day0.Minute(), 0) // 30
 	assert.Equal(t, dss.day0.Second(), 0) // 29
 
-	commit, _ = testRepository.CommitObject(plumbing.NewHash(
+	commit, _ = test.Repository.CommitObject(plumbing.NewHash(
 		"fc9ceecb6dabcb2aab60e8619d972e8d8208a7df"))
 	deps["commit"] = commit
 	deps["index"] = 10
@@ -58,7 +59,7 @@ func TestDaysSinceStartConsume(t *testing.T) {
 	assert.Equal(t, res[DependencyDay].(int), 1)
 	assert.Equal(t, dss.previousDay, 1)
 
-	commit, _ = testRepository.CommitObject(plumbing.NewHash(
+	commit, _ = test.Repository.CommitObject(plumbing.NewHash(
 		"a3ee37f91f0d705ec9c41ae88426f0ae44b2fbc3"))
 	deps["commit"] = commit
 	deps["index"] = 20
@@ -67,7 +68,7 @@ func TestDaysSinceStartConsume(t *testing.T) {
 	assert.Equal(t, res[DependencyDay].(int), 1)
 	assert.Equal(t, dss.previousDay, 1)
 
-	commit, _ = testRepository.CommitObject(plumbing.NewHash(
+	commit, _ = test.Repository.CommitObject(plumbing.NewHash(
 		"a8b665a65d7aced63f5ba2ff6d9b71dac227f8cf"))
 	deps["commit"] = commit
 	deps["index"] = 20
@@ -76,7 +77,7 @@ func TestDaysSinceStartConsume(t *testing.T) {
 	assert.Equal(t, res[DependencyDay].(int), 2)
 	assert.Equal(t, dss.previousDay, 2)
 
-	commit, _ = testRepository.CommitObject(plumbing.NewHash(
+	commit, _ = test.Repository.CommitObject(plumbing.NewHash(
 		"186ff0d7e4983637bb3762a24d6d0a658e7f4712"))
 	deps["commit"] = commit
 	deps["index"] = 30
@@ -101,7 +102,7 @@ func TestDaysCommits(t *testing.T) {
 	dss.commits[0] = []plumbing.Hash{plumbing.NewHash(
 		"cce947b98a050c6d356bc6ba95030254914027b1")}
 	commits := dss.commits
-	dss.Initialize(testRepository)
+	dss.Initialize(test.Repository)
 	assert.Len(t, dss.commits, 0)
 	assert.Equal(t, dss.commits, commits)
 }

+ 14 - 6
diff.go

@@ -1,9 +1,10 @@
-package hercules
+package plumbing
 
 import (
 	"bufio"
 	"bytes"
 	"errors"
+	"io"
 	"unicode/utf8"
 
 	"github.com/sergi/go-diff/diffmatchpatch"
@@ -11,6 +12,7 @@ import (
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
 	"gopkg.in/src-d/go-git.v4/utils/merkletrie"
+	"gopkg.in/src-d/hercules.v4/internal/core"
 )
 
 // FileDiff calculates the difference of files which were modified.
@@ -43,7 +45,7 @@ func (diff *FileDiff) Name() string {
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
+// to this list. Also used by core.Registry to build the global map of providers.
 func (diff *FileDiff) Provides() []string {
 	arr := [...]string{DependencyFileDiff}
 	return arr[:]
@@ -58,12 +60,12 @@ func (diff *FileDiff) Requires() []string {
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (diff *FileDiff) ListConfigurationOptions() []ConfigurationOption {
-	options := [...]ConfigurationOption{{
+func (diff *FileDiff) ListConfigurationOptions() []core.ConfigurationOption {
+	options := [...]core.ConfigurationOption{{
 		Name:        ConfigFileDiffDisableCleanup,
 		Description: "Do not apply additional heuristics to improve diffs.",
 		Flag:        "no-diff-cleanup",
-		Type:        BoolConfigurationOption,
+		Type:        core.BoolConfigurationOption,
 		Default:     false},
 	}
 	return options[:]
@@ -170,6 +172,12 @@ func BlobToString(file *object.Blob) (string, error) {
 	return buf.String(), nil
 }
 
+func checkClose(c io.Closer) {
+	if err := c.Close(); err != nil {
+		panic(err)
+	}
+}
+
 func init() {
-	Registry.Register(&FileDiff{})
+	core.Registry.Register(&FileDiff{})
 }

+ 58 - 60
diff_test.go

@@ -1,66 +1,64 @@
-package hercules
+package plumbing_test
 
 import (
 	"testing"
+	"unicode/utf8"
 
 	"github.com/sergi/go-diff/diffmatchpatch"
 	"github.com/stretchr/testify/assert"
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
-	"unicode/utf8"
+	"gopkg.in/src-d/hercules.v4/internal"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	items "gopkg.in/src-d/hercules.v4/internal/plumbing"
+	"gopkg.in/src-d/hercules.v4/internal/test"
+	"gopkg.in/src-d/hercules.v4/internal/test/fixtures"
 )
 
-func fixtureFileDiff() *FileDiff {
-	fd := &FileDiff{}
-	fd.Initialize(testRepository)
-	return fd
-}
-
 func TestFileDiffMeta(t *testing.T) {
-	fd := fixtureFileDiff()
+	fd := fixtures.FileDiff()
 	assert.Equal(t, fd.Name(), "FileDiff")
 	assert.Equal(t, len(fd.Provides()), 1)
-	assert.Equal(t, fd.Provides()[0], DependencyFileDiff)
+	assert.Equal(t, fd.Provides()[0], items.DependencyFileDiff)
 	assert.Equal(t, len(fd.Requires()), 2)
-	assert.Equal(t, fd.Requires()[0], DependencyTreeChanges)
-	assert.Equal(t, fd.Requires()[1], DependencyBlobCache)
+	assert.Equal(t, fd.Requires()[0], items.DependencyTreeChanges)
+	assert.Equal(t, fd.Requires()[1], items.DependencyBlobCache)
 	assert.Len(t, fd.ListConfigurationOptions(), 1)
-	assert.Equal(t, fd.ListConfigurationOptions()[0].Name, ConfigFileDiffDisableCleanup)
+	assert.Equal(t, fd.ListConfigurationOptions()[0].Name, items.ConfigFileDiffDisableCleanup)
 	facts := map[string]interface{}{}
-	facts[ConfigFileDiffDisableCleanup] = true
+	facts[items.ConfigFileDiffDisableCleanup] = true
 	fd.Configure(facts)
 	assert.True(t, fd.CleanupDisabled)
 }
 
 func TestFileDiffRegistration(t *testing.T) {
-	tp, exists := Registry.registered[(&FileDiff{}).Name()]
-	assert.True(t, exists)
-	assert.Equal(t, tp.Elem().Name(), "FileDiff")
-	tps, exists := Registry.provided[(&FileDiff{}).Provides()[0]]
-	assert.True(t, exists)
-	assert.True(t, len(tps) >= 1)
+	summoned := core.Registry.Summon((&items.FileDiff{}).Name())
+	assert.Len(t, summoned, 1)
+	assert.Equal(t, summoned[0].Name(), "FileDiff")
+	summoned = core.Registry.Summon((&items.FileDiff{}).Provides()[0])
+	assert.True(t, len(summoned) >= 1)
 	matched := false
-	for _, tp := range tps {
-		matched = matched || tp.Elem().Name() == "FileDiff"
+	for _, tp := range summoned {
+		matched = matched || tp.Name() == "FileDiff"
 	}
 	assert.True(t, matched)
 }
 
 func TestFileDiffConsume(t *testing.T) {
-	fd := fixtureFileDiff()
+	fd := fixtures.FileDiff()
 	deps := map[string]interface{}{}
 	cache := map[plumbing.Hash]*object.Blob{}
 	hash := plumbing.NewHash("291286b4ac41952cbd1389fda66420ec03c1a9fe")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	hash = plumbing.NewHash("334cde09da4afcb74f8d2b3e6fd6cce61228b485")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	hash = plumbing.NewHash("dc248ba2b22048cc730c571a748e8ffcf7085ab9")
-	cache[hash], _ = testRepository.BlobObject(hash)
-	deps[DependencyBlobCache] = cache
+	cache[hash], _ = test.Repository.BlobObject(hash)
+	deps[items.DependencyBlobCache] = cache
 	changes := make(object.Changes, 3)
-	treeFrom, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeFrom, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"a1eb2ea76eb7f9bfbde9b243861474421000eb96"))
-	treeTo, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeTo, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"994eac1cd07235bb9815e547a75c84265dea00f5"))
 	changes[0] = &object.Change{From: object.ChangeEntry{
 		Name: "analyser.go",
@@ -99,10 +97,10 @@ func TestFileDiffConsume(t *testing.T) {
 		},
 	}, To: object.ChangeEntry{},
 	}
-	deps[DependencyTreeChanges] = changes
+	deps[items.DependencyTreeChanges] = changes
 	res, err := fd.Consume(deps)
 	assert.Nil(t, err)
-	diffs := res[DependencyFileDiff].(map[string]FileDiffData)
+	diffs := res[items.DependencyFileDiff].(map[string]items.FileDiffData)
 	assert.Equal(t, len(diffs), 1)
 	diff := diffs["analyser.go"]
 	assert.Equal(t, diff.OldLinesOfCode, 307)
@@ -124,20 +122,20 @@ func TestFileDiffConsume(t *testing.T) {
 }
 
 func TestFileDiffConsumeInvalidBlob(t *testing.T) {
-	fd := fixtureFileDiff()
+	fd := fixtures.FileDiff()
 	deps := map[string]interface{}{}
 	cache := map[plumbing.Hash]*object.Blob{}
 	hash := plumbing.NewHash("291286b4ac41952cbd1389fda66420ec03c1a9fe")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	hash = plumbing.NewHash("334cde09da4afcb74f8d2b3e6fd6cce61228b485")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	hash = plumbing.NewHash("dc248ba2b22048cc730c571a748e8ffcf7085ab9")
-	cache[hash], _ = testRepository.BlobObject(hash)
-	deps[DependencyBlobCache] = cache
+	cache[hash], _ = test.Repository.BlobObject(hash)
+	deps[items.DependencyBlobCache] = cache
 	changes := make(object.Changes, 1)
-	treeFrom, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeFrom, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"a1eb2ea76eb7f9bfbde9b243861474421000eb96"))
-	treeTo, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeTo, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"994eac1cd07235bb9815e547a75c84265dea00f5"))
 	changes[0] = &object.Change{From: object.ChangeEntry{
 		Name: "analyser.go",
@@ -156,7 +154,7 @@ func TestFileDiffConsumeInvalidBlob(t *testing.T) {
 			Hash: plumbing.NewHash("334cde09da4afcb74f8d2b3e6fd6cce61228b485"),
 		},
 	}}
-	deps[DependencyTreeChanges] = changes
+	deps[items.DependencyTreeChanges] = changes
 	res, err := fd.Consume(deps)
 	assert.Nil(t, res)
 	assert.NotNil(t, err)
@@ -183,32 +181,32 @@ func TestFileDiffConsumeInvalidBlob(t *testing.T) {
 }
 
 func TestCountLines(t *testing.T) {
-	blob, _ := testRepository.BlobObject(
+	blob, _ := test.Repository.BlobObject(
 		plumbing.NewHash("291286b4ac41952cbd1389fda66420ec03c1a9fe"))
-	lines, err := CountLines(blob)
+	lines, err := items.CountLines(blob)
 	assert.Equal(t, lines, 12)
 	assert.Nil(t, err)
-	lines, err = CountLines(nil)
+	lines, err = items.CountLines(nil)
 	assert.Equal(t, lines, -1)
 	assert.NotNil(t, err)
-	blob, _ = createDummyBlob(plumbing.NewHash("291286b4ac41952cbd1389fda66420ec03c1a9fe"), true)
-	lines, err = CountLines(blob)
+	blob, _ = internal.CreateDummyBlob(plumbing.NewHash("291286b4ac41952cbd1389fda66420ec03c1a9fe"), true)
+	lines, err = items.CountLines(blob)
 	assert.Equal(t, lines, -1)
 	assert.NotNil(t, err)
 	// test_data/blob
-	blob, err = testRepository.BlobObject(
+	blob, err = test.Repository.BlobObject(
 		plumbing.NewHash("c86626638e0bc8cf47ca49bb1525b40e9737ee64"))
 	assert.Nil(t, err)
-	lines, err = CountLines(blob)
+	lines, err = items.CountLines(blob)
 	assert.Equal(t, lines, -1)
 	assert.NotNil(t, err)
 	assert.EqualError(t, err, "binary")
 }
 
 func TestBlobToString(t *testing.T) {
-	blob, _ := testRepository.BlobObject(
+	blob, _ := test.Repository.BlobObject(
 		plumbing.NewHash("291286b4ac41952cbd1389fda66420ec03c1a9fe"))
-	str, err := BlobToString(blob)
+	str, err := items.BlobToString(blob)
 	assert.Nil(t, err)
 	assert.Equal(t, str, `language: go
 
@@ -223,28 +221,28 @@ script:
 notifications:
   email: false
 `)
-	str, err = BlobToString(nil)
+	str, err = items.BlobToString(nil)
 	assert.Equal(t, str, "")
 	assert.NotNil(t, err)
-	blob, _ = createDummyBlob(plumbing.NewHash("291286b4ac41952cbd1389fda66420ec03c1a9fe"), true)
-	str, err = BlobToString(blob)
+	blob, _ = internal.CreateDummyBlob(plumbing.NewHash("291286b4ac41952cbd1389fda66420ec03c1a9fe"), true)
+	str, err = items.BlobToString(blob)
 	assert.Equal(t, str, "")
 	assert.NotNil(t, err)
 }
 
 func TestFileDiffDarkMagic(t *testing.T) {
-	fd := fixtureFileDiff()
+	fd := fixtures.FileDiff()
 	deps := map[string]interface{}{}
 	cache := map[plumbing.Hash]*object.Blob{}
 	hash := plumbing.NewHash("448eb3f312849b0ca766063d06b09481c987b309")
-	cache[hash], _ = testRepository.BlobObject(hash) // 1.java
+	cache[hash], _ = test.Repository.BlobObject(hash) // 1.java
 	hash = plumbing.NewHash("3312c92f3e8bdfbbdb30bccb6acd1b85bc338dfc")
-	cache[hash], _ = testRepository.BlobObject(hash) // 2.java
-	deps[DependencyBlobCache] = cache
+	cache[hash], _ = test.Repository.BlobObject(hash) // 2.java
+	deps[items.DependencyBlobCache] = cache
 	changes := make(object.Changes, 1)
-	treeFrom, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeFrom, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"f02289bfe843388a1bb3c7dea210374082dd86b9"))
-	treeTo, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeTo, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"eca91acf1fd828f20dcb653a061d8c97d965bc6c"))
 	changes[0] = &object.Change{From: object.ChangeEntry{
 		Name: "test.java",
@@ -263,14 +261,14 @@ func TestFileDiffDarkMagic(t *testing.T) {
 			Hash: plumbing.NewHash("3312c92f3e8bdfbbdb30bccb6acd1b85bc338dfc"),
 		},
 	}}
-	deps[DependencyTreeChanges] = changes
+	deps[items.DependencyTreeChanges] = changes
 	res, err := fd.Consume(deps)
 	assert.Nil(t, err)
-	magicDiffs := res[DependencyFileDiff].(map[string]FileDiffData)["test.java"]
+	magicDiffs := res[items.DependencyFileDiff].(map[string]items.FileDiffData)["test.java"]
 	fd.CleanupDisabled = true
 	res, err = fd.Consume(deps)
 	assert.Nil(t, err)
-	plainDiffs := res[DependencyFileDiff].(map[string]FileDiffData)["test.java"]
+	plainDiffs := res[items.DependencyFileDiff].(map[string]items.FileDiffData)["test.java"]
 	assert.NotEqual(t, magicDiffs.Diffs, plainDiffs.Diffs)
 	assert.Equal(t, magicDiffs.OldLinesOfCode, plainDiffs.OldLinesOfCode)
 	assert.Equal(t, magicDiffs.NewLinesOfCode, plainDiffs.NewLinesOfCode)

+ 26 - 25
identity.go

@@ -1,4 +1,4 @@
-package hercules
+package identity
 
 import (
 	"bufio"
@@ -8,12 +8,13 @@ import (
 
 	"gopkg.in/src-d/go-git.v4"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
+	"gopkg.in/src-d/hercules.v4/internal/core"
 )
 
-// IdentityDetector determines the author of a commit. Same person can commit under different
+// Detector determines the author of a commit. Same person can commit under different
 // signatures, and we apply some heuristics to merge those together.
 // It is a PipelineItem.
-type IdentityDetector struct {
+type Detector struct {
 	// PeopleDict maps email || name  -> developer id.
 	PeopleDict map[string]int
 	// ReversedPeopleDict maps developer id -> description
@@ -22,40 +23,40 @@ type IdentityDetector struct {
 
 const (
 	// AuthorMissing is the internal author index which denotes any unmatched identities
-	// (IdentityDetector.Consume()).
+	// (Detector.Consume()).
 	AuthorMissing = (1 << 18) - 1
 	// AuthorMissingName is the string name which corresponds to AuthorMissing.
 	AuthorMissingName = "<unmatched>"
 
 	// FactIdentityDetectorPeopleDict is the name of the fact which is inserted in
-	// IdentityDetector.Configure(). It corresponds to IdentityDetector.PeopleDict - the mapping
+	// Detector.Configure(). It corresponds to Detector.PeopleDict - the mapping
 	// from the signatures to the author indices.
 	FactIdentityDetectorPeopleDict = "IdentityDetector.PeopleDict"
 	// FactIdentityDetectorReversedPeopleDict is the name of the fact which is inserted in
-	// IdentityDetector.Configure(). It corresponds to IdentityDetector.ReversedPeopleDict -
+	// Detector.Configure(). It corresponds to Detector.ReversedPeopleDict -
 	// the mapping from the author indices to the main signature.
 	FactIdentityDetectorReversedPeopleDict = "IdentityDetector.ReversedPeopleDict"
 	// ConfigIdentityDetectorPeopleDictPath is the name of the configuration option
-	// (IdentityDetector.Configure()) which allows to set the external PeopleDict mapping from a file.
+	// (Detector.Configure()) which allows to set the external PeopleDict mapping from a file.
 	ConfigIdentityDetectorPeopleDictPath = "IdentityDetector.PeopleDictPath"
 	// FactIdentityDetectorPeopleCount is the name of the fact which is inserted in
-	// IdentityDetector.Configure(). It is equal to the overall number of unique authors
+	// Detector.Configure(). It is equal to the overall number of unique authors
 	// (the length of ReversedPeopleDict).
 	FactIdentityDetectorPeopleCount = "IdentityDetector.PeopleCount"
 
-	// DependencyAuthor is the name of the dependency provided by IdentityDetector.
+	// DependencyAuthor is the name of the dependency provided by Detector.
 	DependencyAuthor = "author"
 )
 
 // Name of this PipelineItem. Uniquely identifies the type, used for mapping keys, etc.
-func (id *IdentityDetector) Name() string {
+func (id *Detector) Name() string {
 	return "IdentityDetector"
 }
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
-func (id *IdentityDetector) Provides() []string {
+// to this list. Also used by core.Registry to build the global map of providers.
+func (id *Detector) Provides() []string {
 	arr := [...]string{DependencyAuthor}
 	return arr[:]
 }
@@ -63,24 +64,24 @@ func (id *IdentityDetector) Provides() []string {
 // Requires returns the list of names of entities which are needed by this PipelineItem.
 // Each requested entity will be inserted into `deps` of Consume(). In turn, those
 // entities are Provides() upstream.
-func (id *IdentityDetector) Requires() []string {
+func (id *Detector) Requires() []string {
 	return []string{}
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (id *IdentityDetector) ListConfigurationOptions() []ConfigurationOption {
-	options := [...]ConfigurationOption{{
+func (id *Detector) ListConfigurationOptions() []core.ConfigurationOption {
+	options := [...]core.ConfigurationOption{{
 		Name:        ConfigIdentityDetectorPeopleDictPath,
 		Description: "Path to the developers' email associations.",
 		Flag:        "people-dict",
-		Type:        StringConfigurationOption,
+		Type:        core.StringConfigurationOption,
 		Default:     ""},
 	}
 	return options[:]
 }
 
 // Configure sets the properties previously published by ListConfigurationOptions().
-func (id *IdentityDetector) Configure(facts map[string]interface{}) {
+func (id *Detector) Configure(facts map[string]interface{}) {
 	if val, exists := facts[FactIdentityDetectorPeopleDict].(map[string]int); exists {
 		id.PeopleDict = val
 	}
@@ -93,10 +94,10 @@ func (id *IdentityDetector) Configure(facts map[string]interface{}) {
 			id.LoadPeopleDict(peopleDictPath)
 			facts[FactIdentityDetectorPeopleCount] = len(id.ReversedPeopleDict) - 1
 		} else {
-			if _, exists := facts[ConfigPipelineCommits]; !exists {
+			if _, exists := facts[core.ConfigPipelineCommits]; !exists {
 				panic("IdentityDetector needs a list of commits to initialize.")
 			}
-			id.GeneratePeopleDict(facts[ConfigPipelineCommits].([]*object.Commit))
+			id.GeneratePeopleDict(facts[core.ConfigPipelineCommits].([]*object.Commit))
 			facts[FactIdentityDetectorPeopleCount] = len(id.ReversedPeopleDict)
 		}
 	} else {
@@ -108,7 +109,7 @@ func (id *IdentityDetector) Configure(facts map[string]interface{}) {
 
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
-func (id *IdentityDetector) Initialize(repository *git.Repository) {
+func (id *Detector) Initialize(repository *git.Repository) {
 }
 
 // Consume runs this PipelineItem on the next commit data.
@@ -116,7 +117,7 @@ func (id *IdentityDetector) Initialize(repository *git.Repository) {
 // Additionally, "commit" is always present there and represents the analysed *object.Commit.
 // This function returns the mapping with analysis results. The keys must be the same as
 // in Provides(). If there was an error, nil is returned.
-func (id *IdentityDetector) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
+func (id *Detector) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
 	commit := deps["commit"].(*object.Commit)
 	signature := commit.Author
 	authorID, exists := id.PeopleDict[strings.ToLower(signature.Email)]
@@ -132,7 +133,7 @@ func (id *IdentityDetector) Consume(deps map[string]interface{}) (map[string]int
 // LoadPeopleDict loads author signatures from a text file.
 // The format is one signature per line, and the signature consists of several
 // keys separated by "|". The first key is the main one and used to reference all the rest.
-func (id *IdentityDetector) LoadPeopleDict(path string) error {
+func (id *Detector) LoadPeopleDict(path string) error {
 	file, err := os.Open(path)
 	if err != nil {
 		return err
@@ -157,7 +158,7 @@ func (id *IdentityDetector) LoadPeopleDict(path string) error {
 }
 
 // GeneratePeopleDict loads author signatures from the specified list of Git commits.
-func (id *IdentityDetector) GeneratePeopleDict(commits []*object.Commit) {
+func (id *Detector) GeneratePeopleDict(commits []*object.Commit) {
 	dict := map[string]int{}
 	emails := map[int][]string{}
 	names := map[int][]string{}
@@ -253,7 +254,7 @@ func (id *IdentityDetector) GeneratePeopleDict(commits []*object.Commit) {
 }
 
 // MergeReversedDicts joins two identity lists together, excluding duplicates, in-order.
-func (id IdentityDetector) MergeReversedDicts(rd1, rd2 []string) (map[string][3]int, []string) {
+func (id Detector) MergeReversedDicts(rd1, rd2 []string) (map[string][3]int, []string) {
 	people := map[string][3]int{}
 	for i, pid := range rd1 {
 		ptrs := people[pid]
@@ -279,5 +280,5 @@ func (id IdentityDetector) MergeReversedDicts(rd1, rd2 []string) (map[string][3]
 }
 
 func init() {
-	Registry.Register(&IdentityDetector{})
+	core.Registry.Register(&Detector{})
 }

+ 22 - 21
identity_test.go

@@ -1,4 +1,4 @@
-package hercules
+package identity
 
 import (
 	"io"
@@ -14,19 +14,21 @@ import (
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
 	"gopkg.in/src-d/go-git.v4/plumbing/storer"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/test"
 )
 
-func fixtureIdentityDetector() *IdentityDetector {
+func fixtureIdentityDetector() *Detector {
 	peopleDict := map[string]int{}
 	peopleDict["vadim@sourced.tech"] = 0
 	peopleDict["gmarkhor@gmail.com"] = 0
 	reversePeopleDict := make([]string, 1)
 	reversePeopleDict[0] = "Vadim"
-	id := IdentityDetector{
+	id := Detector{
 		PeopleDict:         peopleDict,
 		ReversedPeopleDict: reversePeopleDict,
 	}
-	id.Initialize(testRepository)
+	id.Initialize(test.Repository)
 	return &id
 }
 
@@ -94,7 +96,7 @@ Vadim|vadim@sourced.tech`)
 	delete(facts, FactIdentityDetectorReversedPeopleDict)
 	delete(facts, ConfigIdentityDetectorPeopleDictPath)
 	commits := make([]*object.Commit, 0)
-	iter, err := testRepository.CommitObjects()
+	iter, err := test.Repository.CommitObjects()
 	commit, err := iter.Next()
 	for ; err != io.EOF; commit, err = iter.Next() {
 		if err != nil {
@@ -114,29 +116,28 @@ Vadim|vadim@sourced.tech`)
 }
 
 func TestIdentityDetectorRegistration(t *testing.T) {
-	tp, exists := Registry.registered[(&IdentityDetector{}).Name()]
-	assert.True(t, exists)
-	assert.Equal(t, tp.Elem().Name(), "IdentityDetector")
-	tps, exists := Registry.provided[(&IdentityDetector{}).Provides()[0]]
-	assert.True(t, exists)
-	assert.Len(t, tps, 1)
-	assert.Equal(t, tps[0].Elem().Name(), "IdentityDetector")
+	summoned := core.Registry.Summon((&Detector{}).Name())
+	assert.Len(t, summoned, 1)
+	assert.Equal(t, summoned[0].Name(), "IdentityDetector")
+	summoned = core.Registry.Summon((&Detector{}).Provides()[0])
+	assert.Len(t, summoned, 1)
+	assert.Equal(t, summoned[0].Name(), "IdentityDetector")
 }
 
 func TestIdentityDetectorConfigureEmpty(t *testing.T) {
-	id := IdentityDetector{}
+	id := Detector{}
 	assert.Panics(t, func() { id.Configure(map[string]interface{}{}) })
 }
 
 func TestIdentityDetectorConsume(t *testing.T) {
-	commit, _ := testRepository.CommitObject(plumbing.NewHash(
+	commit, _ := test.Repository.CommitObject(plumbing.NewHash(
 		"5c0e755dd85ac74584d9988cc361eccf02ce1a48"))
 	deps := map[string]interface{}{}
 	deps["commit"] = commit
 	res, err := fixtureIdentityDetector().Consume(deps)
 	assert.Nil(t, err)
 	assert.Equal(t, res[DependencyAuthor].(int), 0)
-	commit, _ = testRepository.CommitObject(plumbing.NewHash(
+	commit, _ = test.Repository.CommitObject(plumbing.NewHash(
 		"8a03b5620b1caa72ec9cb847ea88332621e2950a"))
 	deps["commit"] = commit
 	res, err = fixtureIdentityDetector().Consume(deps)
@@ -146,7 +147,7 @@ func TestIdentityDetectorConsume(t *testing.T) {
 
 func TestIdentityDetectorLoadPeopleDict(t *testing.T) {
 	id := fixtureIdentityDetector()
-	err := id.LoadPeopleDict(path.Join("test_data", "identities"))
+	err := id.LoadPeopleDict(path.Join("..", "..", "test_data", "identities"))
 	assert.Nil(t, err)
 	assert.Equal(t, len(id.PeopleDict), 7)
 	assert.Contains(t, id.PeopleDict, "linus torvalds")
@@ -168,7 +169,7 @@ func TestIdentityDetectorLoadPeopleDict(t *testing.T) {
 func TestGeneratePeopleDict(t *testing.T) {
 	id := fixtureIdentityDetector()
 	commits := make([]*object.Commit, 0)
-	iter, err := testRepository.CommitObjects()
+	iter, err := test.Repository.CommitObjects()
 	for ; err != io.EOF; commit, err := iter.Next() {
 		if err != nil {
 			panic(err)
@@ -182,7 +183,7 @@ func TestGeneratePeopleDict(t *testing.T) {
 func TestIdentityDetectorGeneratePeopleDict(t *testing.T) {
 	id := fixtureIdentityDetector()
 	commits := make([]*object.Commit, 0)
-	iter, err := testRepository.CommitObjects()
+	iter, err := test.Repository.CommitObjects()
 	commit, err := iter.Next()
 	for ; err != io.EOF; commit, err = iter.Next() {
 		if err != nil {
@@ -353,7 +354,7 @@ func getFakeCommitWithFile(name string, contents string) *object.Commit {
 func TestIdentityDetectorGeneratePeopleDictMailmap(t *testing.T) {
 	id := fixtureIdentityDetector()
 	commits := make([]*object.Commit, 0)
-	iter, err := testRepository.CommitObjects()
+	iter, err := test.Repository.CommitObjects()
 	commit, err := iter.Next()
 	for ; err != io.EOF; commit, err = iter.Next() {
 		if err != nil {
@@ -373,7 +374,7 @@ func TestIdentityDetectorGeneratePeopleDictMailmap(t *testing.T) {
 func TestIdentityDetectorMergeReversedDicts(t *testing.T) {
 	pa1 := [...]string{"one", "two"}
 	pa2 := [...]string{"two", "three"}
-	people, merged := IdentityDetector{}.MergeReversedDicts(pa1[:], pa2[:])
+	people, merged := Detector{}.MergeReversedDicts(pa1[:], pa2[:])
 	assert.Len(t, people, 3)
 	assert.Len(t, merged, 3)
 	assert.Equal(t, people["one"], [3]int{0, 0, -1})
@@ -382,7 +383,7 @@ func TestIdentityDetectorMergeReversedDicts(t *testing.T) {
 	vm := [...]string{"one", "two", "three"}
 	assert.Equal(t, merged, vm[:])
 	pa1 = [...]string{"two", "one"}
-	people, merged = IdentityDetector{}.MergeReversedDicts(pa1[:], pa2[:])
+	people, merged = Detector{}.MergeReversedDicts(pa1[:], pa2[:])
 	assert.Len(t, people, 3)
 	assert.Len(t, merged, 3)
 	assert.Equal(t, people["one"], [3]int{1, 1, -1})

+ 1 - 1
mailmap.go

@@ -1,4 +1,4 @@
-package hercules
+package identity
 
 import (
 	"strings"

+ 3 - 2
mailmap_test.go

@@ -1,8 +1,9 @@
-package hercules
+package identity
 
 import (
-	"github.com/stretchr/testify/assert"
 	"testing"
+
+	"github.com/stretchr/testify/assert"
 )
 
 func TestParseMailmap(t *testing.T) {

+ 11 - 8
renames.go

@@ -1,4 +1,4 @@
-package hercules
+package plumbing
 
 import (
 	"log"
@@ -10,6 +10,8 @@ import (
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
 	"gopkg.in/src-d/go-git.v4/utils/merkletrie"
+	"gopkg.in/src-d/hercules.v4/internal"
+	"gopkg.in/src-d/hercules.v4/internal/core"
 )
 
 // RenameAnalysis improves TreeDiff's results by searching for changed blobs under different
@@ -41,7 +43,7 @@ func (ra *RenameAnalysis) Name() string {
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
+// to this list. Also used by core.Registry to build the global map of providers.
 func (ra *RenameAnalysis) Provides() []string {
 	arr := [...]string{DependencyTreeChanges}
 	return arr[:]
@@ -56,12 +58,12 @@ func (ra *RenameAnalysis) Requires() []string {
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (ra *RenameAnalysis) ListConfigurationOptions() []ConfigurationOption {
-	options := [...]ConfigurationOption{{
+func (ra *RenameAnalysis) ListConfigurationOptions() []core.ConfigurationOption {
+	options := [...]core.ConfigurationOption{{
 		Name:        ConfigRenameAnalysisSimilarityThreshold,
 		Description: "The threshold on the similarity index used to detect renames.",
 		Flag:        "M",
-		Type:        IntConfigurationOption,
+		Type:        core.IntConfigurationOption,
 		Default:     RenameAnalysisDefaultThreshold},
 	}
 	return options[:]
@@ -202,7 +204,7 @@ func (ra *RenameAnalysis) Consume(deps map[string]interface{}) (map[string]inter
 }
 
 func (ra *RenameAnalysis) sizesAreClose(size1 int64, size2 int64) bool {
-	return abs64(size1-size2)*100/max64(1, min64(size1, size2)) <=
+	return internal.Abs64(size1-size2)*100/internal.Max64(1, internal.Min64(size1, size2)) <=
 		int64(100-ra.SimilarityThreshold)
 }
 
@@ -225,7 +227,8 @@ func (ra *RenameAnalysis) blobsAreClose(
 			common += utf8.RuneCountInString(edit.Text)
 		}
 	}
-	return common*100/max(1, min(len(src), len(dst))) >= ra.SimilarityThreshold, nil
+	similarity := common * 100 / internal.Max(1, internal.Min(len(src), len(dst)))
+	return similarity >= ra.SimilarityThreshold, nil
 }
 
 type sortableChange struct {
@@ -280,5 +283,5 @@ func (slice sortableBlobs) Swap(i, j int) {
 }
 
 func init() {
-	Registry.Register(&RenameAnalysis{})
+	core.Registry.Register(&RenameAnalysis{})
 }

+ 23 - 21
renames_test.go

@@ -1,15 +1,18 @@
-package hercules
+package plumbing
 
 import (
+	"testing"
+
 	"github.com/stretchr/testify/assert"
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
-	"testing"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/test"
 )
 
 func fixtureRenameAnalysis() *RenameAnalysis {
 	ra := RenameAnalysis{SimilarityThreshold: 80}
-	ra.Initialize(testRepository)
+	ra.Initialize(test.Repository)
 	return &ra
 }
 
@@ -35,39 +38,38 @@ func TestRenameAnalysisMeta(t *testing.T) {
 }
 
 func TestRenameAnalysisRegistration(t *testing.T) {
-	tp, exists := Registry.registered[(&RenameAnalysis{}).Name()]
-	assert.True(t, exists)
-	assert.Equal(t, tp.Elem().Name(), "RenameAnalysis")
-	tps, exists := Registry.provided[(&RenameAnalysis{}).Provides()[0]]
-	assert.True(t, exists)
-	assert.True(t, len(tps) >= 1)
+	summoned := core.Registry.Summon((&RenameAnalysis{}).Name())
+	assert.Len(t, summoned, 1)
+	assert.Equal(t, summoned[0].Name(), "RenameAnalysis")
+	summoned = core.Registry.Summon((&RenameAnalysis{}).Provides()[0])
+	assert.True(t, len(summoned) >= 1)
 	matched := false
-	for _, tp := range tps {
-		matched = matched || tp.Elem().Name() == "RenameAnalysis"
+	for _, tp := range summoned {
+		matched = matched || tp.Name() == "RenameAnalysis"
 	}
 	assert.True(t, matched)
 }
 
 func TestRenameAnalysisInitializeInvalidThreshold(t *testing.T) {
 	ra := RenameAnalysis{SimilarityThreshold: -10}
-	ra.Initialize(testRepository)
+	ra.Initialize(test.Repository)
 	assert.Equal(t, ra.SimilarityThreshold, RenameAnalysisDefaultThreshold)
 	ra = RenameAnalysis{SimilarityThreshold: 110}
-	ra.Initialize(testRepository)
+	ra.Initialize(test.Repository)
 	assert.Equal(t, ra.SimilarityThreshold, RenameAnalysisDefaultThreshold)
 	ra = RenameAnalysis{SimilarityThreshold: 0}
-	ra.Initialize(testRepository)
+	ra.Initialize(test.Repository)
 	ra = RenameAnalysis{SimilarityThreshold: 100}
-	ra.Initialize(testRepository)
+	ra.Initialize(test.Repository)
 }
 
 func TestRenameAnalysisConsume(t *testing.T) {
 	ra := fixtureRenameAnalysis()
 	changes := make(object.Changes, 3)
 	// 2b1ed978194a94edeabbca6de7ff3b5771d4d665
-	treeFrom, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeFrom, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"96c6ece9b2f3c7c51b83516400d278dea5605100"))
-	treeTo, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeTo, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"251f2094d7b523d5bcc60e663b6cf38151bf8844"))
 	changes[0] = &object.Change{From: object.ChangeEntry{
 		Name: "analyser.go",
@@ -109,13 +111,13 @@ func TestRenameAnalysisConsume(t *testing.T) {
 	}
 	cache := map[plumbing.Hash]*object.Blob{}
 	hash := plumbing.NewHash("baa64828831d174f40140e4b3cfa77d1e917a2c1")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	hash = plumbing.NewHash("29c9fafd6a2fae8cd20298c3f60115bc31a4c0f2")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	hash = plumbing.NewHash("c29112dbd697ad9b401333b80c18a63951bc18d9")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	hash = plumbing.NewHash("f7d918ec500e2f925ecde79b51cc007bac27de72")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	deps := map[string]interface{}{}
 	deps[DependencyBlobCache] = cache
 	deps[DependencyTreeChanges] = changes

+ 8 - 7
tree_diff.go

@@ -1,4 +1,4 @@
-package hercules
+package plumbing
 
 import (
 	"io"
@@ -6,6 +6,7 @@ import (
 
 	"gopkg.in/src-d/go-git.v4"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
+	"gopkg.in/src-d/hercules.v4/internal/core"
 )
 
 // TreeDiff generates the list of changes for a commit. A change can be either one or two blobs
@@ -37,7 +38,7 @@ func (treediff *TreeDiff) Name() string {
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
+// to this list. Also used by core.Registry to build the global map of providers.
 func (treediff *TreeDiff) Provides() []string {
 	arr := [...]string{DependencyTreeChanges}
 	return arr[:]
@@ -51,17 +52,17 @@ func (treediff *TreeDiff) Requires() []string {
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (treediff *TreeDiff) ListConfigurationOptions() []ConfigurationOption {
-	options := [...]ConfigurationOption{{
+func (treediff *TreeDiff) ListConfigurationOptions() []core.ConfigurationOption {
+	options := [...]core.ConfigurationOption{{
 		Name:        ConfigTreeDiffEnableBlacklist,
 		Description: "Skip blacklisted directories.",
 		Flag:        "skip-blacklist",
-		Type:        BoolConfigurationOption,
+		Type:        core.BoolConfigurationOption,
 		Default:     false}, {
 		Name:        ConfigTreeDiffBlacklistedDirs,
 		Description: "List of blacklisted directories. Separated by comma \",\".",
 		Flag:        "blacklisted-dirs",
-		Type:        StringsConfigurationOption,
+		Type:        core.StringsConfigurationOption,
 		Default:     defaultBlacklistedDirs},
 	}
 	return options[:]
@@ -141,5 +142,5 @@ func (treediff *TreeDiff) Consume(deps map[string]interface{}) (map[string]inter
 }
 
 func init() {
-	Registry.Register(&TreeDiff{})
+	core.Registry.Register(&TreeDiff{})
 }

+ 17 - 16
tree_diff_test.go

@@ -1,4 +1,4 @@
-package hercules
+package plumbing
 
 import (
 	"testing"
@@ -7,12 +7,14 @@ import (
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
 	"gopkg.in/src-d/go-git.v4/utils/merkletrie"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/test"
 )
 
 func fixtureTreeDiff() *TreeDiff {
 	td := TreeDiff{}
 	td.Configure(nil)
-	td.Initialize(testRepository)
+	td.Initialize(test.Repository)
 	return &td
 }
 
@@ -27,26 +29,25 @@ func TestTreeDiffMeta(t *testing.T) {
 }
 
 func TestTreeDiffRegistration(t *testing.T) {
-	tp, exists := Registry.registered[(&TreeDiff{}).Name()]
-	assert.True(t, exists)
-	assert.Equal(t, tp.Elem().Name(), "TreeDiff")
-	tps, exists := Registry.provided[(&TreeDiff{}).Provides()[0]]
-	assert.True(t, exists)
-	assert.True(t, len(tps) >= 1)
+	summoned := core.Registry.Summon((&TreeDiff{}).Name())
+	assert.Len(t, summoned, 1)
+	assert.Equal(t, summoned[0].Name(), "TreeDiff")
+	summoned = core.Registry.Summon((&TreeDiff{}).Provides()[0])
+	assert.True(t, len(summoned) >= 1)
 	matched := false
-	for _, tp := range tps {
-		matched = matched || tp.Elem().Name() == "TreeDiff"
+	for _, tp := range summoned {
+		matched = matched || tp.Name() == "TreeDiff"
 	}
 	assert.True(t, matched)
 }
 
 func TestTreeDiffConsume(t *testing.T) {
 	td := fixtureTreeDiff()
-	commit, _ := testRepository.CommitObject(plumbing.NewHash(
+	commit, _ := test.Repository.CommitObject(plumbing.NewHash(
 		"2b1ed978194a94edeabbca6de7ff3b5771d4d665"))
 	deps := map[string]interface{}{}
 	deps["commit"] = commit
-	prevCommit, _ := testRepository.CommitObject(plumbing.NewHash(
+	prevCommit, _ := test.Repository.CommitObject(plumbing.NewHash(
 		"fbe766ffdc3f87f6affddc051c6f8b419beea6a2"))
 	td.previousTree, _ = prevCommit.Tree()
 	res, err := td.Consume(deps)
@@ -83,7 +84,7 @@ func TestTreeDiffConsume(t *testing.T) {
 
 func TestTreeDiffConsumeFirst(t *testing.T) {
 	td := fixtureTreeDiff()
-	commit, _ := testRepository.CommitObject(plumbing.NewHash(
+	commit, _ := test.Repository.CommitObject(plumbing.NewHash(
 		"2b1ed978194a94edeabbca6de7ff3b5771d4d665"))
 	deps := map[string]interface{}{}
 	deps["commit"] = commit
@@ -101,7 +102,7 @@ func TestTreeDiffConsumeFirst(t *testing.T) {
 
 func TestTreeDiffBadCommit(t *testing.T) {
 	td := fixtureTreeDiff()
-	commit, _ := testRepository.CommitObject(plumbing.NewHash(
+	commit, _ := test.Repository.CommitObject(plumbing.NewHash(
 		"2b1ed978194a94edeabbca6de7ff3b5771d4d665"))
 	commit.TreeHash = plumbing.NewHash("0000000000000000000000000000000000000000")
 	deps := map[string]interface{}{}
@@ -114,11 +115,11 @@ func TestTreeDiffBadCommit(t *testing.T) {
 func TestTreeDiffConsumeSkip(t *testing.T) {
 	// consume without skiping
 	td := fixtureTreeDiff()
-	commit, _ := testRepository.CommitObject(plumbing.NewHash(
+	commit, _ := test.Repository.CommitObject(plumbing.NewHash(
 		"aefdedf7cafa6ee110bae9a3910bf5088fdeb5a9"))
 	deps := map[string]interface{}{}
 	deps["commit"] = commit
-	prevCommit, _ := testRepository.CommitObject(plumbing.NewHash(
+	prevCommit, _ := test.Repository.CommitObject(plumbing.NewHash(
 		"1e076dc56989bc6aa1ef5f55901696e9e01423d4"))
 	td.previousTree, _ = prevCommit.Tree()
 	res, err := td.Consume(deps)

+ 3 - 3
changes_xpather.go

@@ -1,9 +1,9 @@
-package hercules
+package uast
 
 import (
 	"bytes"
-	"log"
 	"io"
+	"log"
 
 	"github.com/minio/highwayhash"
 	"gopkg.in/bblfsh/client-go.v2/tools"
@@ -22,7 +22,7 @@ var hashKey = []byte{
 }
 
 // Extract returns the list of new or changed UAST nodes filtered by XPath.
-func (xpather ChangesXPather) Extract(changes []UASTChange) []*uast.Node {
+func (xpather ChangesXPather) Extract(changes []Change) []*uast.Node {
 	result := []*uast.Node{}
 	for _, change := range changes {
 		if change.After == nil {

+ 33 - 0
internal/plumbing/uast/changes_xpather_test.go

@@ -0,0 +1,33 @@
+// +build !disable_babelfish
+
+package uast
+
+import (
+	"log"
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+	"gopkg.in/bblfsh/client-go.v2"
+	uast_test "gopkg.in/src-d/hercules.v4/internal/plumbing/uast/test"
+	"gopkg.in/src-d/hercules.v4/internal/test"
+)
+
+func TestChangesXPatherExtractChanged(t *testing.T) {
+	client, err := bblfsh.NewClient("0.0.0.0:9432")
+	if err != nil {
+		log.Panicf("Failed to connect to the Babelfish server at 0.0.0.0:9432: %v", err)
+	}
+	hash1 := "a98a6940eb4cfb1eb635c3232485a75c4b63fff3"
+	hash2 := "42457dc695fa73ec9621b47832d5711f6325410d"
+	root1 := uast_test.ParseBlobFromTestRepo(hash1, "burndown.go", client)
+	root2 := uast_test.ParseBlobFromTestRepo(hash2, "burndown.go", client)
+	gitChange := test.FakeChangeForName("burndown.go", hash1, hash2)
+	uastChanges := []Change{
+		{Before: root1, After: root2, Change: gitChange},
+		{Before: nil, After: root2, Change: gitChange},
+		{Before: root1, After: nil, Change: gitChange},
+	}
+	xpather := ChangesXPather{XPath: "//*[@roleComment]"}
+	nodes := xpather.Extract(uastChanges)
+	assert.True(t, len(nodes) > 0)
+}

+ 15 - 13
diff_refiner.go

@@ -1,4 +1,4 @@
-package hercules
+package uast
 
 import (
 	"unicode/utf8"
@@ -6,6 +6,8 @@ import (
 	"github.com/sergi/go-diff/diffmatchpatch"
 	"gopkg.in/bblfsh/sdk.v1/uast"
 	"gopkg.in/src-d/go-git.v4"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/plumbing"
 )
 
 // FileDiffRefiner uses UASTs to improve the human interpretability of diffs.
@@ -22,9 +24,9 @@ func (ref *FileDiffRefiner) Name() string {
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
+// to this list. Also used by core.Registry to build the global map of providers.
 func (ref *FileDiffRefiner) Provides() []string {
-	arr := [...]string{DependencyFileDiff}
+	arr := [...]string{plumbing.DependencyFileDiff}
 	return arr[:]
 }
 
@@ -32,7 +34,7 @@ func (ref *FileDiffRefiner) Provides() []string {
 // Each requested entity will be inserted into `deps` of Consume(). In turn, those
 // entities are Provides() upstream.
 func (ref *FileDiffRefiner) Requires() []string {
-	arr := [...]string{DependencyFileDiff, DependencyUastChanges}
+	arr := [...]string{plumbing.DependencyFileDiff, DependencyUastChanges}
 	return arr[:]
 }
 
@@ -43,8 +45,8 @@ func (ref *FileDiffRefiner) Features() []string {
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (ref *FileDiffRefiner) ListConfigurationOptions() []ConfigurationOption {
-	return []ConfigurationOption{}
+func (ref *FileDiffRefiner) ListConfigurationOptions() []core.ConfigurationOption {
+	return []core.ConfigurationOption{}
 }
 
 // Configure sets the properties previously published by ListConfigurationOptions().
@@ -61,15 +63,15 @@ func (ref *FileDiffRefiner) Initialize(repository *git.Repository) {
 // This function returns the mapping with analysis results. The keys must be the same as
 // in Provides(). If there was an error, nil is returned.
 func (ref *FileDiffRefiner) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
-	changesList := deps[DependencyUastChanges].([]UASTChange)
-	changes := map[string]UASTChange{}
+	changesList := deps[DependencyUastChanges].([]Change)
+	changes := map[string]Change{}
 	for _, change := range changesList {
 		if change.Before != nil && change.After != nil {
 			changes[change.Change.To.Name] = change
 		}
 	}
-	diffs := deps[DependencyFileDiff].(map[string]FileDiffData)
-	result := map[string]FileDiffData{}
+	diffs := deps[plumbing.DependencyFileDiff].(map[string]plumbing.FileDiffData)
+	result := map[string]plumbing.FileDiffData{}
 	for fileName, oldDiff := range diffs {
 		uastChange, exists := changes[fileName]
 		if !exists {
@@ -114,7 +116,7 @@ func (ref *FileDiffRefiner) Consume(deps map[string]interface{}) (map[string]int
 				}
 			}
 		})
-		newDiff := FileDiffData{
+		newDiff := plumbing.FileDiffData{
 			OldLinesOfCode: oldDiff.OldLinesOfCode,
 			NewLinesOfCode: oldDiff.NewLinesOfCode,
 			Diffs:          []diffmatchpatch.Diff{},
@@ -156,7 +158,7 @@ func (ref *FileDiffRefiner) Consume(deps map[string]interface{}) (map[string]int
 		}
 		result[fileName] = newDiff
 	}
-	return map[string]interface{}{DependencyFileDiff: result}, nil
+	return map[string]interface{}{plumbing.DependencyFileDiff: result}, nil
 }
 
 // VisitEachNode is a handy routine to execute a callback on every node in the subtree,
@@ -185,5 +187,5 @@ func countNodesInInterval(occupiedMap [][]*uast.Node, start, end int) int {
 }
 
 func init() {
-	Registry.Register(&FileDiffRefiner{})
+	core.Registry.Register(&FileDiffRefiner{})
 }

+ 35 - 33
diff_refiner_test.go

@@ -1,4 +1,4 @@
-package hercules
+package uast
 
 import (
 	"io/ioutil"
@@ -11,11 +11,14 @@ import (
 	"github.com/stretchr/testify/assert"
 	"gopkg.in/bblfsh/sdk.v1/uast"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/plumbing"
+	"gopkg.in/src-d/hercules.v4/internal/test"
 )
 
 func fixtureFileDiffRefiner() *FileDiffRefiner {
 	fd := &FileDiffRefiner{}
-	fd.Initialize(testRepository)
+	fd.Initialize(test.Repository)
 	return fd
 }
 
@@ -23,9 +26,9 @@ func TestFileDiffRefinerMeta(t *testing.T) {
 	fd := fixtureFileDiffRefiner()
 	assert.Equal(t, fd.Name(), "FileDiffRefiner")
 	assert.Equal(t, len(fd.Provides()), 1)
-	assert.Equal(t, fd.Provides()[0], DependencyFileDiff)
+	assert.Equal(t, fd.Provides()[0], plumbing.DependencyFileDiff)
 	assert.Equal(t, len(fd.Requires()), 2)
-	assert.Equal(t, fd.Requires()[0], DependencyFileDiff)
+	assert.Equal(t, fd.Requires()[0], plumbing.DependencyFileDiff)
 	assert.Equal(t, fd.Requires()[1], DependencyUastChanges)
 	assert.Len(t, fd.ListConfigurationOptions(), 0)
 	fd.Configure(nil)
@@ -35,45 +38,44 @@ func TestFileDiffRefinerMeta(t *testing.T) {
 }
 
 func TestFileDiffRefinerRegistration(t *testing.T) {
-	tp, exists := Registry.registered[(&FileDiffRefiner{}).Name()]
-	assert.True(t, exists)
-	assert.Equal(t, tp.Elem().Name(), "FileDiffRefiner")
-	tps, exists := Registry.provided[(&FileDiffRefiner{}).Provides()[0]]
-	assert.True(t, exists)
-	assert.True(t, len(tps) >= 1)
+	summoned := core.Registry.Summon((&FileDiffRefiner{}).Name())
+	assert.Len(t, summoned, 1)
+	assert.Equal(t, summoned[0].Name(), "FileDiffRefiner")
+	summoned = core.Registry.Summon((&FileDiffRefiner{}).Provides()[0])
+	assert.True(t, len(summoned) >= 1)
 	matched := false
-	for _, tp := range tps {
-		matched = matched || tp.Elem().Name() == "FileDiffRefiner"
+	for _, tp := range summoned {
+		matched = matched || tp.Name() == "FileDiffRefiner"
 	}
 	assert.True(t, matched)
 }
 
 func TestFileDiffRefinerConsume(t *testing.T) {
-	bytes1, err := ioutil.ReadFile(path.Join("test_data", "1.java"))
+	bytes1, err := ioutil.ReadFile(path.Join("..", "..", "test_data", "1.java"))
 	assert.Nil(t, err)
-	bytes2, err := ioutil.ReadFile(path.Join("test_data", "2.java"))
+	bytes2, err := ioutil.ReadFile(path.Join("..", "..", "test_data", "2.java"))
 	assert.Nil(t, err)
 	dmp := diffmatchpatch.New()
 	src, dst, _ := dmp.DiffLinesToRunes(string(bytes1), string(bytes2))
 	state := map[string]interface{}{}
-	fileDiffs := map[string]FileDiffData{}
+	fileDiffs := map[string]plumbing.FileDiffData{}
 	const fileName = "test.java"
-	fileDiffs[fileName] = FileDiffData{
+	fileDiffs[fileName] = plumbing.FileDiffData{
 		OldLinesOfCode: len(src),
 		NewLinesOfCode: len(dst),
 		Diffs:          dmp.DiffMainRunes(src, dst, false),
 	}
-	state[DependencyFileDiff] = fileDiffs
-	uastChanges := make([]UASTChange, 1)
+	state[plumbing.DependencyFileDiff] = fileDiffs
+	uastChanges := make([]Change, 1)
 	loadUast := func(name string) *uast.Node {
-		bytes, err := ioutil.ReadFile(path.Join("test_data", name))
+		bytes, err := ioutil.ReadFile(path.Join("..", "..", "test_data", name))
 		assert.Nil(t, err)
 		node := uast.Node{}
 		proto.Unmarshal(bytes, &node)
 		return &node
 	}
 	state[DependencyUastChanges] = uastChanges
-	uastChanges[0] = UASTChange{
+	uastChanges[0] = Change{
 		Change: &object.Change{
 			From: object.ChangeEntry{Name: fileName},
 			To:   object.ChangeEntry{Name: fileName}},
@@ -82,7 +84,7 @@ func TestFileDiffRefinerConsume(t *testing.T) {
 	fd := fixtureFileDiffRefiner()
 	iresult, err := fd.Consume(state)
 	assert.Nil(t, err)
-	result := iresult[DependencyFileDiff].(map[string]FileDiffData)
+	result := iresult[plumbing.DependencyFileDiff].(map[string]plumbing.FileDiffData)
 	assert.Len(t, result, 1)
 
 	oldDiff := fileDiffs[fileName]
@@ -98,31 +100,31 @@ func TestFileDiffRefinerConsume(t *testing.T) {
 }
 
 func TestFileDiffRefinerConsumeNoUast(t *testing.T) {
-	bytes1, err := ioutil.ReadFile(path.Join("test_data", "1.java"))
+	bytes1, err := ioutil.ReadFile(path.Join("..", "..", "test_data", "1.java"))
 	assert.Nil(t, err)
-	bytes2, err := ioutil.ReadFile(path.Join("test_data", "2.java"))
+	bytes2, err := ioutil.ReadFile(path.Join("..", "..", "test_data", "2.java"))
 	assert.Nil(t, err)
 	dmp := diffmatchpatch.New()
 	src, dst, _ := dmp.DiffLinesToRunes(string(bytes1), string(bytes2))
 	state := map[string]interface{}{}
-	fileDiffs := map[string]FileDiffData{}
+	fileDiffs := map[string]plumbing.FileDiffData{}
 	const fileName = "test.java"
-	fileDiffs[fileName] = FileDiffData{
+	fileDiffs[fileName] = plumbing.FileDiffData{
 		OldLinesOfCode: len(src),
 		NewLinesOfCode: len(dst),
 		Diffs:          dmp.DiffMainRunes(src, dst, false),
 	}
-	state[DependencyFileDiff] = fileDiffs
-	uastChanges := make([]UASTChange, 1)
+	state[plumbing.DependencyFileDiff] = fileDiffs
+	uastChanges := make([]Change, 1)
 	loadUast := func(name string) *uast.Node {
-		bytes, err := ioutil.ReadFile(path.Join("test_data", name))
+		bytes, err := ioutil.ReadFile(path.Join("..", "..", "test_data", name))
 		assert.Nil(t, err)
 		node := uast.Node{}
 		proto.Unmarshal(bytes, &node)
 		return &node
 	}
 	state[DependencyUastChanges] = uastChanges
-	uastChanges[0] = UASTChange{
+	uastChanges[0] = Change{
 		Change: &object.Change{
 			From: object.ChangeEntry{Name: fileName},
 			To:   object.ChangeEntry{Name: fileName}},
@@ -131,15 +133,15 @@ func TestFileDiffRefinerConsumeNoUast(t *testing.T) {
 	fd := fixtureFileDiffRefiner()
 	iresult, err := fd.Consume(state)
 	assert.Nil(t, err)
-	result := iresult[DependencyFileDiff].(map[string]FileDiffData)
+	result := iresult[plumbing.DependencyFileDiff].(map[string]plumbing.FileDiffData)
 	assert.Len(t, result, 1)
 	assert.Equal(t, fileDiffs[fileName], result[fileName])
-	fileDiffs[fileName] = FileDiffData{
+	fileDiffs[fileName] = plumbing.FileDiffData{
 		OldLinesOfCode: 100,
 		NewLinesOfCode: 100,
 		Diffs:          []diffmatchpatch.Diff{{}},
 	}
-	uastChanges[0] = UASTChange{
+	uastChanges[0] = Change{
 		Change: &object.Change{
 			From: object.ChangeEntry{Name: fileName},
 			To:   object.ChangeEntry{Name: fileName}},
@@ -147,7 +149,7 @@ func TestFileDiffRefinerConsumeNoUast(t *testing.T) {
 	}
 	iresult, err = fd.Consume(state)
 	assert.Nil(t, err)
-	result = iresult[DependencyFileDiff].(map[string]FileDiffData)
+	result = iresult[plumbing.DependencyFileDiff].(map[string]plumbing.FileDiffData)
 	assert.Len(t, result, 1)
 	assert.Equal(t, fileDiffs[fileName], result[fileName])
 }

+ 39 - 0
internal/plumbing/uast/test/utils.go

@@ -0,0 +1,39 @@
+package test
+
+import (
+	"fmt"
+	"io/ioutil"
+
+	bblfsh "gopkg.in/bblfsh/client-go.v2"
+	"gopkg.in/bblfsh/sdk.v1/uast"
+	"gopkg.in/src-d/go-git.v4/plumbing"
+	core_test "gopkg.in/src-d/hercules.v4/internal/test"
+)
+
+// ParseBlobFromTestRepo extracts the UAST from the file by it's hash and name.
+func ParseBlobFromTestRepo(hash, name string, client *bblfsh.Client) *uast.Node {
+	blob, err := core_test.Repository.BlobObject(plumbing.NewHash(hash))
+	if err != nil {
+		panic(err)
+	}
+	reader, err := blob.Reader()
+	if err != nil {
+		panic(err)
+	}
+	defer reader.Close()
+	data, err := ioutil.ReadAll(reader)
+	if err != nil {
+		panic(err)
+	}
+	request := client.NewParseRequest()
+	request.Content(string(data))
+	request.Filename(name)
+	response, err := request.Do()
+	if err != nil {
+		panic(err)
+	}
+	if response.UAST == nil {
+		panic(fmt.Sprintf("empty response for %s %s", name, hash))
+	}
+	return response.UAST
+}

+ 109 - 109
uast.go

@@ -1,4 +1,4 @@
-package hercules
+package uast
 
 import (
 	"bytes"
@@ -25,12 +25,14 @@ import (
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
 	"gopkg.in/src-d/go-git.v4/utils/ioutil"
 	"gopkg.in/src-d/go-git.v4/utils/merkletrie"
-	"gopkg.in/src-d/hercules.v3/pb"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
+	items "gopkg.in/src-d/hercules.v4/internal/plumbing"
 )
 
-// UASTExtractor retrieves UASTs from Babelfish server which correspond to changed files in a commit.
+// Extractor retrieves UASTs from Babelfish server which correspond to changed files in a commit.
 // It is a PipelineItem.
-type UASTExtractor struct {
+type Extractor struct {
 	Endpoint       string
 	Context        func() (context.Context, context.CancelFunc)
 	PoolSize       int
@@ -39,68 +41,64 @@ type UASTExtractor struct {
 	ProcessedFiles map[string]int
 
 	clients []*bblfsh.Client
-	pool    *tunny.WorkPool
+	pool    *tunny.Pool
 }
 
 const (
 	uastExtractionSkipped = -(1 << 31)
 
-	// ConfigUASTEndpoint is the name of the configuration option (UASTExtractor.Configure())
+	// ConfigUASTEndpoint is the name of the configuration option (Extractor.Configure())
 	// which sets the Babelfish server address.
 	ConfigUASTEndpoint = "ConfigUASTEndpoint"
-	// ConfigUASTTimeout is the name of the configuration option (UASTExtractor.Configure())
+	// ConfigUASTTimeout is the name of the configuration option (Extractor.Configure())
 	// which sets the maximum amount of time to wait for a Babelfish server response.
 	ConfigUASTTimeout = "ConfigUASTTimeout"
-	// ConfigUASTPoolSize is the name of the configuration option (UASTExtractor.Configure())
+	// ConfigUASTPoolSize is the name of the configuration option (Extractor.Configure())
 	// which sets the number of goroutines to run for UAST parse queries.
 	ConfigUASTPoolSize = "ConfigUASTPoolSize"
-	// ConfigUASTFailOnErrors is the name of the configuration option (UASTExtractor.Configure())
+	// ConfigUASTFailOnErrors is the name of the configuration option (Extractor.Configure())
 	// which enables early exit in case of any Babelfish UAST parsing errors.
 	ConfigUASTFailOnErrors = "ConfigUASTFailOnErrors"
-	// ConfigUASTLanguages is the name of the configuration option (UASTExtractor.Configure())
+	// ConfigUASTLanguages is the name of the configuration option (Extractor.Configure())
 	// which sets the list of languages to parse. Language names are at
 	// https://doc.bblf.sh/languages.html Names are joined with a comma ",".
 	ConfigUASTLanguages = "ConfigUASTLanguages"
 
 	// FeatureUast is the name of the Pipeline feature which activates all the items related to UAST.
 	FeatureUast = "uast"
-	// DependencyUasts is the name of the dependency provided by UASTExtractor.
+	// DependencyUasts is the name of the dependency provided by Extractor.
 	DependencyUasts = "uasts"
 )
 
 type uastTask struct {
-	Client *bblfsh.Client
 	Lock   *sync.RWMutex
 	Dest   map[plumbing.Hash]*uast.Node
 	File   *object.File
 	Errors *[]error
-	Status chan int
 }
 
 type worker struct {
-	Client   *bblfsh.Client
-	Callback func(interface{}) interface{}
+	Client    *bblfsh.Client
+	Extractor *Extractor
 }
 
-func (w worker) Ready() bool {
-	return true
-}
-
-func (w worker) Job(data interface{}) interface{} {
-	task := data.(uastTask)
-	task.Client = w.Client
-	return w.Callback(task)
+// Process will synchronously perform a job and return the result.
+func (w worker) Process(data interface{}) interface{} {
+	return w.Extractor.extractTask(w.Client, data)
 }
+func (w worker) BlockUntilReady() {}
+func (w worker) Interrupt()       {}
+func (w worker) Terminate()       {}
 
 // Name of this PipelineItem. Uniquely identifies the type, used for mapping keys, etc.
-func (exr *UASTExtractor) Name() string {
+func (exr *Extractor) Name() string {
 	return "UAST"
 }
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
-func (exr *UASTExtractor) Provides() []string {
+// to this list. Also used by core.Registry to build the global map of providers.
+func (exr *Extractor) Provides() []string {
 	arr := [...]string{DependencyUasts}
 	return arr[:]
 }
@@ -108,51 +106,51 @@ func (exr *UASTExtractor) Provides() []string {
 // Requires returns the list of names of entities which are needed by this PipelineItem.
 // Each requested entity will be inserted into `deps` of Consume(). In turn, those
 // entities are Provides() upstream.
-func (exr *UASTExtractor) Requires() []string {
-	arr := [...]string{DependencyTreeChanges, DependencyBlobCache}
+func (exr *Extractor) Requires() []string {
+	arr := [...]string{items.DependencyTreeChanges, items.DependencyBlobCache}
 	return arr[:]
 }
 
 // Features which must be enabled for this PipelineItem to be automatically inserted into the DAG.
-func (exr *UASTExtractor) Features() []string {
+func (exr *Extractor) Features() []string {
 	arr := [...]string{FeatureUast}
 	return arr[:]
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (exr *UASTExtractor) ListConfigurationOptions() []ConfigurationOption {
-	options := [...]ConfigurationOption{{
+func (exr *Extractor) ListConfigurationOptions() []core.ConfigurationOption {
+	options := [...]core.ConfigurationOption{{
 		Name:        ConfigUASTEndpoint,
 		Description: "How many days there are in a single band.",
 		Flag:        "bblfsh",
-		Type:        StringConfigurationOption,
+		Type:        core.StringConfigurationOption,
 		Default:     "0.0.0.0:9432"}, {
 		Name:        ConfigUASTTimeout,
 		Description: "Babelfish's server timeout in seconds.",
 		Flag:        "bblfsh-timeout",
-		Type:        IntConfigurationOption,
+		Type:        core.IntConfigurationOption,
 		Default:     20}, {
 		Name:        ConfigUASTPoolSize,
 		Description: "Number of goroutines to extract UASTs.",
 		Flag:        "bblfsh-pool-size",
-		Type:        IntConfigurationOption,
+		Type:        core.IntConfigurationOption,
 		Default:     runtime.NumCPU() * 2}, {
 		Name:        ConfigUASTFailOnErrors,
 		Description: "Panic if there is a UAST extraction error.",
 		Flag:        "bblfsh-fail-on-error",
-		Type:        BoolConfigurationOption,
+		Type:        core.BoolConfigurationOption,
 		Default:     false}, {
 		Name:        ConfigUASTLanguages,
 		Description: "Programming languages from which to extract UASTs. Separated by comma \",\".",
 		Flag:        "languages",
-		Type:        StringConfigurationOption,
+		Type:        core.StringConfigurationOption,
 		Default:     "Python,Java,Go,JavaScript,Ruby,PHP"},
 	}
 	return options[:]
 }
 
 // Configure sets the properties previously published by ListConfigurationOptions().
-func (exr *UASTExtractor) Configure(facts map[string]interface{}) {
+func (exr *Extractor) Configure(facts map[string]interface{}) {
 	if val, exists := facts[ConfigUASTEndpoint].(string); exists {
 		exr.Endpoint = val
 	}
@@ -178,7 +176,7 @@ func (exr *UASTExtractor) Configure(facts map[string]interface{}) {
 
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
-func (exr *UASTExtractor) Initialize(repository *git.Repository) {
+func (exr *Extractor) Initialize(repository *git.Repository) {
 	if exr.Context == nil {
 		exr.Context = func() (context.Context, context.CancelFunc) {
 			return context.Background(), nil
@@ -188,7 +186,6 @@ func (exr *UASTExtractor) Initialize(repository *git.Repository) {
 	if poolSize == 0 {
 		poolSize = runtime.NumCPU()
 	}
-	var err error
 	exr.clients = make([]*bblfsh.Client, poolSize)
 	for i := 0; i < poolSize; i++ {
 		client, err := bblfsh.NewClient(exr.Endpoint)
@@ -200,13 +197,16 @@ func (exr *UASTExtractor) Initialize(repository *git.Repository) {
 	if exr.pool != nil {
 		exr.pool.Close()
 	}
-	workers := make([]tunny.Worker, poolSize)
-	for i := 0; i < poolSize; i++ {
-		workers[i] = worker{Client: exr.clients[i], Callback: exr.extractTask}
+	{
+		i := 0
+		exr.pool = tunny.New(poolSize, func() tunny.Worker {
+			w := worker{Client: exr.clients[i], Extractor: exr}
+			i++
+			return w
+		})
 	}
-	exr.pool, err = tunny.CreateCustomPool(workers).Open()
-	if err != nil {
-		panic(err)
+	if exr.pool == nil {
+		panic("UAST goroutine pool was not created")
 	}
 	exr.ProcessedFiles = map[string]int{}
 	if exr.Languages == nil {
@@ -219,14 +219,13 @@ func (exr *UASTExtractor) Initialize(repository *git.Repository) {
 // Additionally, "commit" is always present there and represents the analysed *object.Commit.
 // This function returns the mapping with analysis results. The keys must be the same as
 // in Provides(). If there was an error, nil is returned.
-func (exr *UASTExtractor) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
-	cache := deps[DependencyBlobCache].(map[plumbing.Hash]*object.Blob)
-	treeDiffs := deps[DependencyTreeChanges].(object.Changes)
+func (exr *Extractor) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
+	cache := deps[items.DependencyBlobCache].(map[plumbing.Hash]*object.Blob)
+	treeDiffs := deps[items.DependencyTreeChanges].(object.Changes)
 	uasts := map[plumbing.Hash]*uast.Node{}
 	lock := sync.RWMutex{}
 	errs := make([]error, 0)
-	status := make(chan int)
-	pending := 0
+	wg := sync.WaitGroup{}
 	submit := func(change *object.Change) {
 		{
 			reader, err := cache[change.To.TreeEntry.Hash].Reader()
@@ -248,12 +247,16 @@ func (exr *UASTExtractor) Consume(deps map[string]interface{}) (map[string]inter
 			}
 			exr.ProcessedFiles[change.To.Name]++
 		}
-		pending++
-		exr.pool.SendWorkAsync(uastTask{
+		wg.Add(1)
+		go func(task interface{}) {
+			exr.pool.Process(task)
+			wg.Done()
+		}(uastTask{
 			Lock:   &lock,
 			Dest:   uasts,
 			File:   &object.File{Name: change.To.Name, Blob: *cache[change.To.TreeEntry.Hash]},
-			Errors: &errs, Status: status}, nil)
+			Errors: &errs,
+		})
 	}
 	for _, change := range treeDiffs {
 		action, err := change.Action()
@@ -269,9 +272,7 @@ func (exr *UASTExtractor) Consume(deps map[string]interface{}) (map[string]inter
 			submit(change)
 		}
 	}
-	for i := 0; i < pending; i++ {
-		_ = <-status
-	}
+	wg.Wait()
 	if len(errs) > 0 {
 		msgs := make([]string, len(errs))
 		for i, err := range errs {
@@ -286,7 +287,7 @@ func (exr *UASTExtractor) Consume(deps map[string]interface{}) (map[string]inter
 	return map[string]interface{}{DependencyUasts: uasts}, nil
 }
 
-func (exr *UASTExtractor) extractUAST(
+func (exr *Extractor) extractUAST(
 	client *bblfsh.Client, file *object.File) (*uast.Node, error) {
 	request := client.NewParseRequest()
 	contents, err := file.Contents()
@@ -315,10 +316,9 @@ func (exr *UASTExtractor) extractUAST(
 	return response.UAST, nil
 }
 
-func (exr *UASTExtractor) extractTask(data interface{}) interface{} {
+func (exr *Extractor) extractTask(client *bblfsh.Client, data interface{}) interface{} {
 	task := data.(uastTask)
-	defer func() { task.Status <- 0 }()
-	node, err := exr.extractUAST(task.Client, task.File)
+	node, err := exr.extractUAST(client, task.File)
 	task.Lock.Lock()
 	defer task.Lock.Unlock()
 	if err != nil {
@@ -332,33 +332,33 @@ func (exr *UASTExtractor) extractTask(data interface{}) interface{} {
 	return nil
 }
 
-// UASTChange is the type of the items in the list of changes which is provided by UASTChanges.
-type UASTChange struct {
+// Change is the type of the items in the list of changes which is provided by Changes.
+type Change struct {
 	Before *uast.Node
 	After  *uast.Node
 	Change *object.Change
 }
 
 const (
-	// DependencyUastChanges is the name of the dependency provided by UASTChanges.
+	// DependencyUastChanges is the name of the dependency provided by Changes.
 	DependencyUastChanges = "changed_uasts"
 )
 
-// UASTChanges is a structured analog of TreeDiff: it provides UASTs for every logical change
+// Changes is a structured analog of TreeDiff: it provides UASTs for every logical change
 // in a commit. It is a PipelineItem.
-type UASTChanges struct {
+type Changes struct {
 	cache map[plumbing.Hash]*uast.Node
 }
 
 // Name of this PipelineItem. Uniquely identifies the type, used for mapping keys, etc.
-func (uc *UASTChanges) Name() string {
+func (uc *Changes) Name() string {
 	return "UASTChanges"
 }
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
-func (uc *UASTChanges) Provides() []string {
+// to this list. Also used by core.Registry to build the global map of providers.
+func (uc *Changes) Provides() []string {
 	arr := [...]string{DependencyUastChanges}
 	return arr[:]
 }
@@ -366,28 +366,28 @@ func (uc *UASTChanges) Provides() []string {
 // Requires returns the list of names of entities which are needed by this PipelineItem.
 // Each requested entity will be inserted into `deps` of Consume(). In turn, those
 // entities are Provides() upstream.
-func (uc *UASTChanges) Requires() []string {
-	arr := [...]string{DependencyUasts, DependencyTreeChanges}
+func (uc *Changes) Requires() []string {
+	arr := [...]string{DependencyUasts, items.DependencyTreeChanges}
 	return arr[:]
 }
 
 // Features which must be enabled for this PipelineItem to be automatically inserted into the DAG.
-func (uc *UASTChanges) Features() []string {
+func (uc *Changes) Features() []string {
 	arr := [...]string{FeatureUast}
 	return arr[:]
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (uc *UASTChanges) ListConfigurationOptions() []ConfigurationOption {
-	return []ConfigurationOption{}
+func (uc *Changes) ListConfigurationOptions() []core.ConfigurationOption {
+	return []core.ConfigurationOption{}
 }
 
 // Configure sets the properties previously published by ListConfigurationOptions().
-func (uc *UASTChanges) Configure(facts map[string]interface{}) {}
+func (uc *Changes) Configure(facts map[string]interface{}) {}
 
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
-func (uc *UASTChanges) Initialize(repository *git.Repository) {
+func (uc *Changes) Initialize(repository *git.Repository) {
 	uc.cache = map[plumbing.Hash]*uast.Node{}
 }
 
@@ -396,10 +396,10 @@ func (uc *UASTChanges) Initialize(repository *git.Repository) {
 // Additionally, "commit" is always present there and represents the analysed *object.Commit.
 // This function returns the mapping with analysis results. The keys must be the same as
 // in Provides(). If there was an error, nil is returned.
-func (uc *UASTChanges) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
+func (uc *Changes) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
 	uasts := deps[DependencyUasts].(map[plumbing.Hash]*uast.Node)
-	treeDiffs := deps[DependencyTreeChanges].(object.Changes)
-	commit := make([]UASTChange, 0, len(treeDiffs))
+	treeDiffs := deps[items.DependencyTreeChanges].(object.Changes)
+	commit := make([]Change, 0, len(treeDiffs))
 	for _, change := range treeDiffs {
 		action, err := change.Action()
 		if err != nil {
@@ -409,17 +409,17 @@ func (uc *UASTChanges) Consume(deps map[string]interface{}) (map[string]interfac
 		case merkletrie.Insert:
 			hashTo := change.To.TreeEntry.Hash
 			uastTo := uasts[hashTo]
-			commit = append(commit, UASTChange{Before: nil, After: uastTo, Change: change})
+			commit = append(commit, Change{Before: nil, After: uastTo, Change: change})
 			uc.cache[hashTo] = uastTo
 		case merkletrie.Delete:
 			hashFrom := change.From.TreeEntry.Hash
-			commit = append(commit, UASTChange{Before: uc.cache[hashFrom], After: nil, Change: change})
+			commit = append(commit, Change{Before: uc.cache[hashFrom], After: nil, Change: change})
 			delete(uc.cache, hashFrom)
 		case merkletrie.Modify:
 			hashFrom := change.From.TreeEntry.Hash
 			hashTo := change.To.TreeEntry.Hash
 			uastTo := uasts[hashTo]
-			commit = append(commit, UASTChange{Before: uc.cache[hashFrom], After: uastTo, Change: change})
+			commit = append(commit, Change{Before: uc.cache[hashFrom], After: uastTo, Change: change})
 			delete(uc.cache, hashFrom)
 			uc.cache[hashTo] = uastTo
 		}
@@ -427,67 +427,67 @@ func (uc *UASTChanges) Consume(deps map[string]interface{}) (map[string]interfac
 	return map[string]interface{}{DependencyUastChanges: commit}, nil
 }
 
-// UASTChangesSaver dumps changed files and corresponding UASTs for every commit.
+// ChangesSaver dumps changed files and corresponding UASTs for every commit.
 // it is a LeafPipelineItem.
-type UASTChangesSaver struct {
+type ChangesSaver struct {
 	// OutputPath points to the target directory with UASTs
 	OutputPath string
 
 	repository *git.Repository
-	result     [][]UASTChange
+	result     [][]Change
 }
 
 const (
 	// ConfigUASTChangesSaverOutputPath is the name of the configuration option
-	// (UASTChangesSaver.Configure()) which sets the target directory where to save the files.
-	ConfigUASTChangesSaverOutputPath = "UASTChangesSaver.OutputPath"
+	// (ChangesSaver.Configure()) which sets the target directory where to save the files.
+	ConfigUASTChangesSaverOutputPath = "ChangesSaver.OutputPath"
 )
 
 // Name of this PipelineItem. Uniquely identifies the type, used for mapping keys, etc.
-func (saver *UASTChangesSaver) Name() string {
+func (saver *ChangesSaver) Name() string {
 	return "UASTChangesSaver"
 }
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
-func (saver *UASTChangesSaver) Provides() []string {
+// to this list. Also used by core.Registry to build the global map of providers.
+func (saver *ChangesSaver) Provides() []string {
 	return []string{}
 }
 
 // Requires returns the list of names of entities which are needed by this PipelineItem.
 // Each requested entity will be inserted into `deps` of Consume(). In turn, those
 // entities are Provides() upstream.
-func (saver *UASTChangesSaver) Requires() []string {
+func (saver *ChangesSaver) Requires() []string {
 	arr := [...]string{DependencyUastChanges}
 	return arr[:]
 }
 
 // Features which must be enabled for this PipelineItem to be automatically inserted into the DAG.
-func (saver *UASTChangesSaver) Features() []string {
+func (saver *ChangesSaver) Features() []string {
 	arr := [...]string{FeatureUast}
 	return arr[:]
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (saver *UASTChangesSaver) ListConfigurationOptions() []ConfigurationOption {
-	options := [...]ConfigurationOption{{
+func (saver *ChangesSaver) ListConfigurationOptions() []core.ConfigurationOption {
+	options := [...]core.ConfigurationOption{{
 		Name:        ConfigUASTChangesSaverOutputPath,
 		Description: "The target directory where to store the changed UAST files.",
 		Flag:        "changed-uast-dir",
-		Type:        StringConfigurationOption,
+		Type:        core.StringConfigurationOption,
 		Default:     "."},
 	}
 	return options[:]
 }
 
 // Flag for the command line switch which enables this analysis.
-func (saver *UASTChangesSaver) Flag() string {
+func (saver *ChangesSaver) Flag() string {
 	return "dump-uast-changes"
 }
 
 // Configure sets the properties previously published by ListConfigurationOptions().
-func (saver *UASTChangesSaver) Configure(facts map[string]interface{}) {
+func (saver *ChangesSaver) Configure(facts map[string]interface{}) {
 	if val, exists := facts[ConfigUASTChangesSaverOutputPath]; exists {
 		saver.OutputPath = val.(string)
 	}
@@ -495,9 +495,9 @@ func (saver *UASTChangesSaver) Configure(facts map[string]interface{}) {
 
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
-func (saver *UASTChangesSaver) Initialize(repository *git.Repository) {
+func (saver *ChangesSaver) Initialize(repository *git.Repository) {
 	saver.repository = repository
-	saver.result = [][]UASTChange{}
+	saver.result = [][]Change{}
 }
 
 // Consume runs this PipelineItem on the next commit data.
@@ -505,21 +505,21 @@ func (saver *UASTChangesSaver) Initialize(repository *git.Repository) {
 // Additionally, "commit" is always present there and represents the analysed *object.Commit.
 // This function returns the mapping with analysis results. The keys must be the same as
 // in Provides(). If there was an error, nil is returned.
-func (saver *UASTChangesSaver) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
-	changes := deps[DependencyUastChanges].([]UASTChange)
+func (saver *ChangesSaver) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
+	changes := deps[DependencyUastChanges].([]Change)
 	saver.result = append(saver.result, changes)
 	return nil, nil
 }
 
 // Finalize returns the result of the analysis. Further Consume() calls are not expected.
-func (saver *UASTChangesSaver) Finalize() interface{} {
+func (saver *ChangesSaver) Finalize() interface{} {
 	return saver.result
 }
 
 // Serialize converts the analysis result as returned by Finalize() to text or bytes.
 // The text format is YAML and the bytes format is Protocol Buffers.
-func (saver *UASTChangesSaver) Serialize(result interface{}, binary bool, writer io.Writer) error {
-	saverResult := result.([][]UASTChange)
+func (saver *ChangesSaver) Serialize(result interface{}, binary bool, writer io.Writer) error {
+	saverResult := result.([][]Change)
 	fileNames := saver.dumpFiles(saverResult)
 	if binary {
 		return saver.serializeBinary(fileNames, writer)
@@ -528,7 +528,7 @@ func (saver *UASTChangesSaver) Serialize(result interface{}, binary bool, writer
 	return nil
 }
 
-func (saver *UASTChangesSaver) dumpFiles(result [][]UASTChange) []*pb.UASTChange {
+func (saver *ChangesSaver) dumpFiles(result [][]Change) []*pb.UASTChange {
 	fileNames := []*pb.UASTChange{}
 	for i, changes := range result {
 		for j, change := range changes {
@@ -560,7 +560,7 @@ func (saver *UASTChangesSaver) dumpFiles(result [][]UASTChange) []*pb.UASTChange
 	return fileNames
 }
 
-func (saver *UASTChangesSaver) serializeText(result []*pb.UASTChange, writer io.Writer) {
+func (saver *ChangesSaver) serializeText(result []*pb.UASTChange, writer io.Writer) {
 	for _, sc := range result {
 		kv := [...]string{
 			"file: " + sc.FileName,
@@ -571,7 +571,7 @@ func (saver *UASTChangesSaver) serializeText(result []*pb.UASTChange, writer io.
 	}
 }
 
-func (saver *UASTChangesSaver) serializeBinary(result []*pb.UASTChange, writer io.Writer) error {
+func (saver *ChangesSaver) serializeBinary(result []*pb.UASTChange, writer io.Writer) error {
 	message := pb.UASTChangesSaverResults{Changes: result}
 	serialized, err := proto.Marshal(&message)
 	if err != nil {
@@ -582,7 +582,7 @@ func (saver *UASTChangesSaver) serializeBinary(result []*pb.UASTChange, writer i
 }
 
 func init() {
-	Registry.Register(&UASTExtractor{})
-	Registry.Register(&UASTChanges{})
-	Registry.Register(&UASTChangesSaver{})
+	core.Registry.Register(&Extractor{})
+	core.Registry.Register(&Changes{})
+	core.Registry.Register(&ChangesSaver{})
 }

+ 58 - 51
uast_test.go

@@ -1,6 +1,6 @@
 // +build !disable_babelfish
 
-package hercules
+package uast
 
 import (
 	"bytes"
@@ -16,12 +16,15 @@ import (
 	"gopkg.in/bblfsh/sdk.v1/uast"
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
-	"gopkg.in/src-d/hercules.v3/pb"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
+	items "gopkg.in/src-d/hercules.v4/internal/plumbing"
+	"gopkg.in/src-d/hercules.v4/internal/test"
 )
 
-func fixtureUASTExtractor() *UASTExtractor {
-	exr := UASTExtractor{Endpoint: "0.0.0.0:9432"}
-	exr.Initialize(testRepository)
+func fixtureUASTExtractor() *Extractor {
+	exr := Extractor{Endpoint: "0.0.0.0:9432"}
+	exr.Initialize(test.Repository)
 	exr.Languages["Python"] = true
 	return &exr
 }
@@ -32,8 +35,8 @@ func TestUASTExtractorMeta(t *testing.T) {
 	assert.Equal(t, len(exr.Provides()), 1)
 	assert.Equal(t, exr.Provides()[0], DependencyUasts)
 	assert.Equal(t, len(exr.Requires()), 2)
-	assert.Equal(t, exr.Requires()[0], DependencyTreeChanges)
-	assert.Equal(t, exr.Requires()[1], DependencyBlobCache)
+	assert.Equal(t, exr.Requires()[0], items.DependencyTreeChanges)
+	assert.Equal(t, exr.Requires()[1], items.DependencyBlobCache)
 	opts := exr.ListConfigurationOptions()
 	assert.Len(t, opts, 5)
 	assert.Equal(t, opts[0].Name, ConfigUASTEndpoint)
@@ -66,22 +69,21 @@ func TestUASTExtractorConfiguration(t *testing.T) {
 }
 
 func TestUASTExtractorRegistration(t *testing.T) {
-	tp, exists := Registry.registered[(&UASTExtractor{}).Name()]
-	assert.True(t, exists)
-	assert.Equal(t, tp.Elem().Name(), "UASTExtractor")
-	tps, exists := Registry.provided[(&UASTExtractor{}).Provides()[0]]
-	assert.True(t, exists)
-	assert.Len(t, tps, 1)
-	assert.Equal(t, tps[0].Elem().Name(), "UASTExtractor")
+	summoned := core.Registry.Summon((&Extractor{}).Name())
+	assert.Len(t, summoned, 1)
+	assert.Equal(t, summoned[0].Name(), "UAST")
+	summoned = core.Registry.Summon((&Extractor{}).Provides()[0])
+	assert.Len(t, summoned, 1)
+	assert.Equal(t, summoned[0].Name(), "UAST")
 }
 
 func TestUASTExtractorConsume(t *testing.T) {
 	exr := fixtureUASTExtractor()
 	changes := make(object.Changes, 2)
 	// 2b1ed978194a94edeabbca6de7ff3b5771d4d665
-	treeFrom, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeFrom, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"96c6ece9b2f3c7c51b83516400d278dea5605100"))
-	treeTo, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeTo, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"251f2094d7b523d5bcc60e663b6cf38151bf8844"))
 	changes[0] = &object.Change{From: object.ChangeEntry{
 		Name: "analyser.go",
@@ -113,16 +115,16 @@ func TestUASTExtractorConsume(t *testing.T) {
 	}
 	cache := map[plumbing.Hash]*object.Blob{}
 	hash := plumbing.NewHash("baa64828831d174f40140e4b3cfa77d1e917a2c1")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	hash = plumbing.NewHash("5d78f57d732aed825764347ec6f3ab74d50d0619")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	hash = plumbing.NewHash("c29112dbd697ad9b401333b80c18a63951bc18d9")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	hash = plumbing.NewHash("f7d918ec500e2f925ecde79b51cc007bac27de72")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	deps := map[string]interface{}{}
-	deps[DependencyBlobCache] = cache
-	deps[DependencyTreeChanges] = changes
+	deps[items.DependencyBlobCache] = cache
+	deps[items.DependencyTreeChanges] = changes
 	res, err := exr.Consume(deps)
 	// Language not enabled
 	assert.Len(t, res[DependencyUasts], 0)
@@ -152,10 +154,10 @@ func TestUASTExtractorConsume(t *testing.T) {
 	assert.Equal(t, len(uasts[hash].Children), 24)
 }
 
-func fixtureUASTChanges() *UASTChanges {
-	ch := UASTChanges{}
+func fixtureUASTChanges() *Changes {
+	ch := Changes{}
 	ch.Configure(nil)
-	ch.Initialize(testRepository)
+	ch.Initialize(test.Repository)
 	return &ch
 }
 
@@ -166,7 +168,7 @@ func TestUASTChangesMeta(t *testing.T) {
 	assert.Equal(t, ch.Provides()[0], DependencyUastChanges)
 	assert.Equal(t, len(ch.Requires()), 2)
 	assert.Equal(t, ch.Requires()[0], DependencyUasts)
-	assert.Equal(t, ch.Requires()[1], DependencyTreeChanges)
+	assert.Equal(t, ch.Requires()[1], items.DependencyTreeChanges)
 	opts := ch.ListConfigurationOptions()
 	assert.Len(t, opts, 0)
 	feats := ch.Features()
@@ -175,15 +177,14 @@ func TestUASTChangesMeta(t *testing.T) {
 }
 
 func TestUASTChangesRegistration(t *testing.T) {
-	tp, exists := Registry.registered[(&UASTChanges{}).Name()]
-	assert.True(t, exists)
-	assert.Equal(t, tp.Elem().Name(), "UASTChanges")
-	tps, exists := Registry.provided[(&UASTChanges{}).Provides()[0]]
-	assert.True(t, exists)
-	assert.True(t, len(tps) >= 1)
+	summoned := core.Registry.Summon((&Changes{}).Name())
+	assert.Len(t, summoned, 1)
+	assert.Equal(t, summoned[0].Name(), "UASTChanges")
+	summoned = core.Registry.Summon((&Changes{}).Provides()[0])
+	assert.True(t, len(summoned) >= 1)
 	matched := false
-	for _, tp := range tps {
-		matched = matched || tp.Elem().Name() == "UASTChanges"
+	for _, tp := range summoned {
+		matched = matched || tp.Name() == "UASTChanges"
 	}
 	assert.True(t, matched)
 }
@@ -208,9 +209,9 @@ func TestUASTChangesConsume(t *testing.T) {
 	uasts[hash].InternalType = "quatro"
 	uastsArray = append(uastsArray, uasts[hash])
 	changes := make(object.Changes, 3)
-	treeFrom, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeFrom, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"a1eb2ea76eb7f9bfbde9b243861474421000eb96"))
-	treeTo, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeTo, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"994eac1cd07235bb9815e547a75c84265dea00f5"))
 	changes[0] = &object.Change{From: object.ChangeEntry{
 		Name: "analyser.go",
@@ -251,13 +252,13 @@ func TestUASTChangesConsume(t *testing.T) {
 	}
 	deps := map[string]interface{}{}
 	deps[DependencyUasts] = uasts
-	deps[DependencyTreeChanges] = changes
+	deps[items.DependencyTreeChanges] = changes
 	ch := fixtureUASTChanges()
 	ch.cache[changes[0].From.TreeEntry.Hash] = uastsArray[3]
 	ch.cache[changes[2].From.TreeEntry.Hash] = uastsArray[0]
 	resultMap, err := ch.Consume(deps)
 	assert.Nil(t, err)
-	result := resultMap[DependencyUastChanges].([]UASTChange)
+	result := resultMap[DependencyUastChanges].([]Change)
 	assert.Len(t, result, 3)
 	assert.Equal(t, result[0].Change, changes[0])
 	assert.Equal(t, result[0].Before, uastsArray[3])
@@ -270,9 +271,9 @@ func TestUASTChangesConsume(t *testing.T) {
 	assert.Nil(t, result[2].After)
 }
 
-func fixtureUASTChangesSaver() *UASTChangesSaver {
-	ch := UASTChangesSaver{}
-	ch.Initialize(testRepository)
+func fixtureUASTChangesSaver() *ChangesSaver {
+	ch := ChangesSaver{}
+	ch.Initialize(test.Repository)
 	return &ch
 }
 
@@ -302,24 +303,30 @@ func TestUASTChangesSaverConfiguration(t *testing.T) {
 }
 
 func TestUASTChangesSaverRegistration(t *testing.T) {
-	tp, exists := Registry.registered[(&UASTChangesSaver{}).Name()]
-	assert.True(t, exists)
-	assert.Equal(t, tp.Elem().Name(), "UASTChangesSaver")
-	tp, exists = Registry.flags[(&UASTChangesSaver{}).Flag()]
-	assert.True(t, exists)
-	assert.Equal(t, tp.Elem().Name(), "UASTChangesSaver")
+	summoned := core.Registry.Summon((&ChangesSaver{}).Name())
+	assert.Len(t, summoned, 1)
+	assert.Equal(t, summoned[0].Name(), "UASTChangesSaver")
+	leaves := core.Registry.GetLeaves()
+	matched := false
+	for _, tp := range leaves {
+		if tp.Flag() == (&ChangesSaver{}).Flag() {
+			matched = true
+			break
+		}
+	}
+	assert.True(t, matched)
 }
 
 func TestUASTChangesSaverPayload(t *testing.T) {
 	chs := fixtureUASTChangesSaver()
 	deps := map[string]interface{}{}
-	changes := make([]UASTChange, 1)
+	changes := make([]Change, 1)
 	deps[DependencyUastChanges] = changes
-	treeFrom, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeFrom, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"a1eb2ea76eb7f9bfbde9b243861474421000eb96"))
-	treeTo, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeTo, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"994eac1cd07235bb9815e547a75c84265dea00f5"))
-	changes[0] = UASTChange{Before: &uast.Node{}, After: &uast.Node{},
+	changes[0] = Change{Before: &uast.Node{}, After: &uast.Node{},
 		Change: &object.Change{From: object.ChangeEntry{
 			Name: "analyser.go",
 			Tree: treeFrom,

rbtree/rbtree.go → internal/rbtree/rbtree.go


+ 13 - 0
internal/test/fixtures/fixtures.go

@@ -0,0 +1,13 @@
+package fixtures
+
+import (
+	"gopkg.in/src-d/hercules.v4/internal/plumbing"
+	"gopkg.in/src-d/hercules.v4/internal/test"
+)
+
+// FileDiff initializes a new plumbing.FileDiff item for testing.
+func FileDiff() *plumbing.FileDiff {
+	fd := &plumbing.FileDiff{}
+	fd.Initialize(test.Repository)
+	return fd
+}

+ 51 - 0
internal/test/repository.go

@@ -0,0 +1,51 @@
+package test
+
+import (
+	"io"
+	"os"
+
+	git "gopkg.in/src-d/go-git.v4"
+	"gopkg.in/src-d/go-git.v4/plumbing"
+	"gopkg.in/src-d/go-git.v4/plumbing/object"
+	"gopkg.in/src-d/go-git.v4/storage/memory"
+)
+
+// Repository is a boilerplate sample repository (Hercules itself).
+var Repository *git.Repository
+
+// FakeChangeForName creates an artificial Git Change from a file name and two arbitrary hashes.
+func FakeChangeForName(name string, hashFrom string, hashTo string) *object.Change {
+	return &object.Change{
+		From: object.ChangeEntry{Name: name, TreeEntry: object.TreeEntry{
+			Name: name, Hash: plumbing.NewHash(hashFrom),
+		}},
+		To: object.ChangeEntry{Name: name, TreeEntry: object.TreeEntry{
+			Name: name, Hash: plumbing.NewHash(hashTo),
+		}},
+	}
+}
+
+func init() {
+	cwd, err := os.Getwd()
+	if err == nil {
+		Repository, err = git.PlainOpen(cwd)
+		if err == nil {
+			iter, err := Repository.CommitObjects()
+			if err == nil {
+				commits := -1
+				for ; err != io.EOF; _, err = iter.Next() {
+					if err != nil {
+						panic(err)
+					}
+					commits++
+					if commits >= 100 {
+						return
+					}
+				}
+			}
+		}
+	}
+	Repository, _ = git.Clone(memory.NewStorage(), nil, &git.CloneOptions{
+		URL: "https://github.com/src-d/hercules",
+	})
+}

test_data/1.java → internal/test_data/1.java


test_data/2.java → internal/test_data/2.java


test_data/blob → internal/test_data/blob


test_data/burndown.pb → internal/test_data/burndown.pb


test_data/couples.pb → internal/test_data/couples.pb


test_data/gitmodules → internal/test_data/gitmodules


+ 0 - 0
internal/test_data/gitmodules_empty


test_data/identities → internal/test_data/identities


test_data/uast1.pb → internal/test_data/uast1.pb


test_data/uast2.pb → internal/test_data/uast2.pb


toposort/toposort.go → internal/toposort/toposort.go


toposort/toposort_test.go → internal/toposort/toposort_test.go


+ 6 - 5
labours.py

@@ -28,9 +28,9 @@ if sys.version_info[0] < 3:
 
 
 PB_MESSAGES = {
-    "Burndown": "pb.pb_pb2.BurndownAnalysisResults",
-    "Couples": "pb.pb_pb2.CouplesAnalysisResults",
-    "Shotness": "pb.pb_pb2.ShotnessAnalysisResults",
+    "Burndown": "internal.pb.pb_pb2.BurndownAnalysisResults",
+    "Couples": "internal.pb.pb_pb2.CouplesAnalysisResults",
+    "Shotness": "internal.pb.pb_pb2.ShotnessAnalysisResults",
 }
 
 
@@ -227,9 +227,10 @@ class YamlReader(Reader):
 class ProtobufReader(Reader):
     def read(self, file):
         try:
-            from pb.pb_pb2 import AnalysisResults
+            from internal.pb.pb_pb2 import AnalysisResults
         except ImportError as e:
-            print("\n\n>>> You need to generate pb/pb_pb2.py - run \"make\"\n", file=sys.stderr)
+            print("\n\n>>> You need to generate internal/pb/pb_pb2.py - run \"make\"\n",
+                  file=sys.stderr)
             raise e from None
         self.data = AnalysisResults()
         if file != "-":

+ 44 - 39
burndown.go

@@ -1,4 +1,4 @@
-package hercules
+package leaves
 
 import (
 	"errors"
@@ -15,8 +15,12 @@ import (
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
 	"gopkg.in/src-d/go-git.v4/utils/merkletrie"
-	"gopkg.in/src-d/hercules.v3/pb"
-	"gopkg.in/src-d/hercules.v3/yaml"
+	"gopkg.in/src-d/hercules.v4/internal/burndown"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
+	items "gopkg.in/src-d/hercules.v4/internal/plumbing"
+	"gopkg.in/src-d/hercules.v4/internal/plumbing/identity"
+	"gopkg.in/src-d/hercules.v4/yaml"
 )
 
 // BurndownAnalysis allows to gather the line burndown statistics for a Git repository.
@@ -55,7 +59,7 @@ type BurndownAnalysis struct {
 	// peopleHistories is the periodic snapshots of each person's status.
 	peopleHistories [][][]int64
 	// files is the mapping <file path> -> *File.
-	files map[string]*File
+	files map[string]*burndown.File
 	// matrix is the mutual deletions and self insertions.
 	matrix []map[int]int64
 	// people is the people's individual time stats.
@@ -126,7 +130,7 @@ func (analyser *BurndownAnalysis) Name() string {
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
+// to this list. Also used by core.Registry to build the global map of providers.
 func (analyser *BurndownAnalysis) Provides() []string {
 	return []string{}
 }
@@ -136,37 +140,38 @@ func (analyser *BurndownAnalysis) Provides() []string {
 // entities are Provides() upstream.
 func (analyser *BurndownAnalysis) Requires() []string {
 	arr := [...]string{
-		DependencyFileDiff, DependencyTreeChanges, DependencyBlobCache, DependencyDay, DependencyAuthor}
+		items.DependencyFileDiff, items.DependencyTreeChanges, items.DependencyBlobCache,
+		items.DependencyDay, identity.DependencyAuthor}
 	return arr[:]
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (analyser *BurndownAnalysis) ListConfigurationOptions() []ConfigurationOption {
-	options := [...]ConfigurationOption{{
+func (analyser *BurndownAnalysis) ListConfigurationOptions() []core.ConfigurationOption {
+	options := [...]core.ConfigurationOption{{
 		Name:        ConfigBurndownGranularity,
 		Description: "How many days there are in a single band.",
 		Flag:        "granularity",
-		Type:        IntConfigurationOption,
+		Type:        core.IntConfigurationOption,
 		Default:     DefaultBurndownGranularity}, {
 		Name:        ConfigBurndownSampling,
 		Description: "How frequently to record the state in days.",
 		Flag:        "sampling",
-		Type:        IntConfigurationOption,
+		Type:        core.IntConfigurationOption,
 		Default:     DefaultBurndownGranularity}, {
 		Name:        ConfigBurndownTrackFiles,
 		Description: "Record detailed statistics per each file.",
 		Flag:        "burndown-files",
-		Type:        BoolConfigurationOption,
+		Type:        core.BoolConfigurationOption,
 		Default:     false}, {
 		Name:        ConfigBurndownTrackPeople,
 		Description: "Record detailed statistics per each developer.",
 		Flag:        "burndown-people",
-		Type:        BoolConfigurationOption,
+		Type:        core.BoolConfigurationOption,
 		Default:     false}, {
 		Name:        ConfigBurndownDebug,
 		Description: "Validate the trees on each step.",
 		Flag:        "burndown-debug",
-		Type:        BoolConfigurationOption,
+		Type:        core.BoolConfigurationOption,
 		Default:     false},
 	}
 	return options[:]
@@ -184,9 +189,9 @@ func (analyser *BurndownAnalysis) Configure(facts map[string]interface{}) {
 		analyser.TrackFiles = val
 	}
 	if people, exists := facts[ConfigBurndownTrackPeople].(bool); people {
-		if val, exists := facts[FactIdentityDetectorPeopleCount].(int); exists {
+		if val, exists := facts[identity.FactIdentityDetectorPeopleCount].(int); exists {
 			analyser.PeopleNumber = val
-			analyser.reversedPeopleDict = facts[FactIdentityDetectorReversedPeopleDict].([]string)
+			analyser.reversedPeopleDict = facts[identity.FactIdentityDetectorReversedPeopleDict].([]string)
 		}
 	} else if exists {
 		analyser.PeopleNumber = 0
@@ -224,7 +229,7 @@ func (analyser *BurndownAnalysis) Initialize(repository *git.Repository) {
 	analyser.globalHistory = [][]int64{}
 	analyser.fileHistories = map[string][][]int64{}
 	analyser.peopleHistories = make([][][]int64, analyser.PeopleNumber)
-	analyser.files = map[string]*File{}
+	analyser.files = map[string]*burndown.File{}
 	analyser.matrix = make([]map[int]int64, analyser.PeopleNumber)
 	analyser.people = make([]map[int]int64, analyser.PeopleNumber)
 	analyser.day = 0
@@ -241,17 +246,17 @@ func (analyser *BurndownAnalysis) Consume(deps map[string]interface{}) (map[stri
 	if sampling == 0 {
 		sampling = 1
 	}
-	author := deps[DependencyAuthor].(int)
-	analyser.day = deps[DependencyDay].(int)
+	author := deps[identity.DependencyAuthor].(int)
+	analyser.day = deps[items.DependencyDay].(int)
 	delta := (analyser.day / sampling) - (analyser.previousDay / sampling)
 	if delta > 0 {
 		analyser.previousDay = analyser.day
 		gs, fss, pss := analyser.groupStatus()
 		analyser.updateHistories(gs, fss, pss, delta)
 	}
-	cache := deps[DependencyBlobCache].(map[plumbing.Hash]*object.Blob)
-	treeDiffs := deps[DependencyTreeChanges].(object.Changes)
-	fileDiffs := deps[DependencyFileDiff].(map[string]FileDiffData)
+	cache := deps[items.DependencyBlobCache].(map[plumbing.Hash]*object.Blob)
+	treeDiffs := deps[items.DependencyTreeChanges].(object.Changes)
+	fileDiffs := deps[items.DependencyFileDiff].(map[string]items.FileDiffData)
 	for _, change := range treeDiffs {
 		action, _ := change.Action()
 		var err error
@@ -289,7 +294,7 @@ func (analyser *BurndownAnalysis) Finalize() interface{} {
 		mrow := make([]int64, analyser.PeopleNumber+2)
 		peopleMatrix[i] = mrow
 		for key, val := range row {
-			if key == AuthorMissing {
+			if key == identity.AuthorMissing {
 				key = -1
 			} else if key == authorSelf {
 				key = -2
@@ -364,7 +369,7 @@ func (analyser *BurndownAnalysis) Deserialize(pbmessage []byte) (interface{}, er
 
 // MergeResults combines two BurndownResult-s together.
 func (analyser *BurndownAnalysis) MergeResults(
-	r1, r2 interface{}, c1, c2 *CommonAnalysisResult) interface{} {
+	r1, r2 interface{}, c1, c2 *core.CommonAnalysisResult) interface{} {
 	bar1 := r1.(BurndownResult)
 	bar2 := r2.(BurndownResult)
 	merged := BurndownResult{}
@@ -379,7 +384,7 @@ func (analyser *BurndownAnalysis) MergeResults(
 		merged.granularity = bar2.granularity
 	}
 	var people map[string][3]int
-	people, merged.reversedPeopleDict = IdentityDetector{}.MergeReversedDicts(
+	people, merged.reversedPeopleDict = identity.Detector{}.MergeReversedDicts(
 		bar1.reversedPeopleDict, bar2.reversedPeopleDict)
 	var wg sync.WaitGroup
 	if len(bar1.GlobalHistory) > 0 || len(bar2.GlobalHistory) > 0 {
@@ -498,7 +503,7 @@ func (analyser *BurndownAnalysis) MergeResults(
 // resamples them to days so that they become square, sums and resamples back to the
 // least of (sampling1, sampling2) and (granularity1, granularity2).
 func mergeMatrices(m1, m2 [][]int64, granularity1, sampling1, granularity2, sampling2 int,
-	c1, c2 *CommonAnalysisResult) [][]int64 {
+	c1, c2 *core.CommonAnalysisResult) [][]int64 {
 	commonMerged := *c1
 	commonMerged.Merge(c2)
 
@@ -802,7 +807,7 @@ func (analyser *BurndownAnalysis) packPersonWithDay(person int, day int) int {
 
 func (analyser *BurndownAnalysis) unpackPersonWithDay(value int) (int, int) {
 	if analyser.PeopleNumber == 0 {
-		return AuthorMissing, value
+		return identity.AuthorMissing, value
 	}
 	return value >> 14, value & 0x3FFF
 }
@@ -817,7 +822,7 @@ func (analyser *BurndownAnalysis) updateStatus(
 func (analyser *BurndownAnalysis) updatePeople(
 	peopleUncasted interface{}, _ int, previousValue int, delta int) {
 	previousAuthor, previousTime := analyser.unpackPersonWithDay(previousValue)
-	if previousAuthor == AuthorMissing {
+	if previousAuthor == identity.AuthorMissing {
 		return
 	}
 	people := peopleUncasted.([]map[int]int64)
@@ -835,7 +840,7 @@ func (analyser *BurndownAnalysis) updateMatrix(
 	matrix := matrixUncasted.([]map[int]int64)
 	newAuthor, _ := analyser.unpackPersonWithDay(currentTime)
 	oldAuthor, _ := analyser.unpackPersonWithDay(previousTime)
-	if oldAuthor == AuthorMissing {
+	if oldAuthor == identity.AuthorMissing {
 		return
 	}
 	if newAuthor == oldAuthor && delta > 0 {
@@ -856,24 +861,24 @@ func (analyser *BurndownAnalysis) updateMatrix(
 
 func (analyser *BurndownAnalysis) newFile(
 	author int, day int, size int, global map[int]int64, people []map[int]int64,
-	matrix []map[int]int64) *File {
-	statuses := make([]Status, 1)
-	statuses[0] = NewStatus(global, analyser.updateStatus)
+	matrix []map[int]int64) *burndown.File {
+	statuses := make([]burndown.Status, 1)
+	statuses[0] = burndown.NewStatus(global, analyser.updateStatus)
 	if analyser.TrackFiles {
-		statuses = append(statuses, NewStatus(map[int]int64{}, analyser.updateStatus))
+		statuses = append(statuses, burndown.NewStatus(map[int]int64{}, analyser.updateStatus))
 	}
 	if analyser.PeopleNumber > 0 {
-		statuses = append(statuses, NewStatus(people, analyser.updatePeople))
-		statuses = append(statuses, NewStatus(matrix, analyser.updateMatrix))
+		statuses = append(statuses, burndown.NewStatus(people, analyser.updatePeople))
+		statuses = append(statuses, burndown.NewStatus(matrix, analyser.updateMatrix))
 		day = analyser.packPersonWithDay(author, day)
 	}
-	return NewFile(day, size, statuses...)
+	return burndown.NewFile(day, size, statuses...)
 }
 
 func (analyser *BurndownAnalysis) handleInsertion(
 	change *object.Change, author int, cache map[plumbing.Hash]*object.Blob) error {
 	blob := cache[change.To.TreeEntry.Hash]
-	lines, err := CountLines(blob)
+	lines, err := items.CountLines(blob)
 	if err != nil {
 		if err.Error() == "binary" {
 			return nil
@@ -895,7 +900,7 @@ func (analyser *BurndownAnalysis) handleDeletion(
 	change *object.Change, author int, cache map[plumbing.Hash]*object.Blob) error {
 
 	blob := cache[change.From.TreeEntry.Hash]
-	lines, err := CountLines(blob)
+	lines, err := items.CountLines(blob)
 	if err != nil {
 		if err.Error() == "binary" {
 			return nil
@@ -911,7 +916,7 @@ func (analyser *BurndownAnalysis) handleDeletion(
 
 func (analyser *BurndownAnalysis) handleModification(
 	change *object.Change, author int, cache map[plumbing.Hash]*object.Blob,
-	diffs map[string]FileDiffData) error {
+	diffs map[string]items.FileDiffData) error {
 
 	file, exists := analyser.files[change.From.Name]
 	if !exists {
@@ -1124,5 +1129,5 @@ func (analyser *BurndownAnalysis) updateHistories(
 }
 
 func init() {
-	Registry.Register(&BurndownAnalysis{})
+	core.Registry.Register(&BurndownAnalysis{})
 }

+ 87 - 73
burndown_test.go

@@ -1,4 +1,4 @@
-package hercules
+package leaves
 
 import (
 	"bytes"
@@ -7,18 +7,26 @@ import (
 	"path"
 	"testing"
 
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/test/fixtures"
+
 	"github.com/gogo/protobuf/proto"
 	"github.com/stretchr/testify/assert"
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
-	"gopkg.in/src-d/hercules.v3/pb"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
+	items "gopkg.in/src-d/hercules.v4/internal/plumbing"
+	"gopkg.in/src-d/hercules.v4/internal/plumbing/identity"
+	"gopkg.in/src-d/hercules.v4/internal/test"
 )
 
 func TestBurndownMeta(t *testing.T) {
 	burndown := BurndownAnalysis{}
 	assert.Equal(t, burndown.Name(), "Burndown")
 	assert.Equal(t, len(burndown.Provides()), 0)
-	required := [...]string{DependencyFileDiff, DependencyTreeChanges, DependencyBlobCache, DependencyDay, DependencyAuthor}
+	required := [...]string{
+		items.DependencyFileDiff, items.DependencyTreeChanges, items.DependencyBlobCache,
+		items.DependencyDay, identity.DependencyAuthor}
 	for _, name := range required {
 		assert.Contains(t, burndown.Requires(), name)
 	}
@@ -43,8 +51,8 @@ func TestBurndownConfigure(t *testing.T) {
 	facts[ConfigBurndownTrackFiles] = true
 	facts[ConfigBurndownTrackPeople] = true
 	facts[ConfigBurndownDebug] = true
-	facts[FactIdentityDetectorPeopleCount] = 5
-	facts[FactIdentityDetectorReversedPeopleDict] = burndown.Requires()
+	facts[identity.FactIdentityDetectorPeopleCount] = 5
+	facts[identity.FactIdentityDetectorReversedPeopleDict] = burndown.Requires()
 	burndown.Configure(facts)
 	assert.Equal(t, burndown.Granularity, 100)
 	assert.Equal(t, burndown.Sampling, 200)
@@ -53,7 +61,7 @@ func TestBurndownConfigure(t *testing.T) {
 	assert.Equal(t, burndown.Debug, true)
 	assert.Equal(t, burndown.reversedPeopleDict, burndown.Requires())
 	facts[ConfigBurndownTrackPeople] = false
-	facts[FactIdentityDetectorPeopleCount] = 50
+	facts[identity.FactIdentityDetectorPeopleCount] = 50
 	burndown.Configure(facts)
 	assert.Equal(t, burndown.PeopleNumber, 0)
 	facts = map[string]interface{}{}
@@ -67,29 +75,35 @@ func TestBurndownConfigure(t *testing.T) {
 }
 
 func TestBurndownRegistration(t *testing.T) {
-	tp, exists := Registry.registered[(&BurndownAnalysis{}).Name()]
-	assert.True(t, exists)
-	assert.Equal(t, tp.Elem().Name(), "BurndownAnalysis")
-	tp, exists = Registry.flags[(&BurndownAnalysis{}).Flag()]
-	assert.True(t, exists)
-	assert.Equal(t, tp.Elem().Name(), "BurndownAnalysis")
+	summoned := core.Registry.Summon((&BurndownAnalysis{}).Name())
+	assert.Len(t, summoned, 1)
+	assert.Equal(t, summoned[0].Name(), "Burndown")
+	leaves := core.Registry.GetLeaves()
+	matched := false
+	for _, tp := range leaves {
+		if tp.Flag() == (&BurndownAnalysis{}).Flag() {
+			matched = true
+			break
+		}
+	}
+	assert.True(t, matched)
 }
 
 func TestBurndownInitialize(t *testing.T) {
 	burndown := BurndownAnalysis{}
 	burndown.Sampling = -10
 	burndown.Granularity = DefaultBurndownGranularity
-	burndown.Initialize(testRepository)
+	burndown.Initialize(test.Repository)
 	assert.Equal(t, burndown.Sampling, DefaultBurndownGranularity)
 	assert.Equal(t, burndown.Granularity, DefaultBurndownGranularity)
 	burndown.Sampling = 0
 	burndown.Granularity = DefaultBurndownGranularity - 1
-	burndown.Initialize(testRepository)
+	burndown.Initialize(test.Repository)
 	assert.Equal(t, burndown.Sampling, DefaultBurndownGranularity-1)
 	assert.Equal(t, burndown.Granularity, DefaultBurndownGranularity-1)
 	burndown.Sampling = DefaultBurndownGranularity - 1
 	burndown.Granularity = -10
-	burndown.Initialize(testRepository)
+	burndown.Initialize(test.Repository)
 	assert.Equal(t, burndown.Sampling, DefaultBurndownGranularity-1)
 	assert.Equal(t, burndown.Granularity, DefaultBurndownGranularity)
 }
@@ -101,26 +115,26 @@ func TestBurndownConsumeFinalize(t *testing.T) {
 		PeopleNumber: 2,
 		TrackFiles:   true,
 	}
-	burndown.Initialize(testRepository)
+	burndown.Initialize(test.Repository)
 	deps := map[string]interface{}{}
 
 	// stage 1
-	deps[DependencyAuthor] = 0
-	deps[DependencyDay] = 0
+	deps[identity.DependencyAuthor] = 0
+	deps[items.DependencyDay] = 0
 	cache := map[plumbing.Hash]*object.Blob{}
 	hash := plumbing.NewHash("291286b4ac41952cbd1389fda66420ec03c1a9fe")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	hash = plumbing.NewHash("c29112dbd697ad9b401333b80c18a63951bc18d9")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	hash = plumbing.NewHash("baa64828831d174f40140e4b3cfa77d1e917a2c1")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	hash = plumbing.NewHash("dc248ba2b22048cc730c571a748e8ffcf7085ab9")
-	cache[hash], _ = testRepository.BlobObject(hash)
-	deps[DependencyBlobCache] = cache
+	cache[hash], _ = test.Repository.BlobObject(hash)
+	deps[items.DependencyBlobCache] = cache
 	changes := make(object.Changes, 3)
-	treeFrom, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeFrom, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"a1eb2ea76eb7f9bfbde9b243861474421000eb96"))
-	treeTo, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeTo, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"994eac1cd07235bb9815e547a75c84265dea00f5"))
 	changes[0] = &object.Change{From: object.ChangeEntry{
 		Name: "analyser.go",
@@ -159,11 +173,11 @@ func TestBurndownConsumeFinalize(t *testing.T) {
 		},
 	},
 	}
-	deps[DependencyTreeChanges] = changes
-	fd := fixtureFileDiff()
+	deps[items.DependencyTreeChanges] = changes
+	fd := fixtures.FileDiff()
 	result, err := fd.Consume(deps)
 	assert.Nil(t, err)
-	deps[DependencyFileDiff] = result[DependencyFileDiff]
+	deps[items.DependencyFileDiff] = result[items.DependencyFileDiff]
 	result, err = burndown.Consume(deps)
 	assert.Nil(t, result)
 	assert.Nil(t, err)
@@ -182,7 +196,7 @@ func TestBurndownConsumeFinalize(t *testing.T) {
 		Granularity: 30,
 		Sampling:    0,
 	}
-	burndown2.Initialize(testRepository)
+	burndown2.Initialize(test.Repository)
 	_, err = burndown2.Consume(deps)
 	assert.Nil(t, err)
 	assert.Equal(t, len(burndown2.people), 0)
@@ -191,24 +205,24 @@ func TestBurndownConsumeFinalize(t *testing.T) {
 
 	// stage 2
 	// 2b1ed978194a94edeabbca6de7ff3b5771d4d665
-	deps[DependencyAuthor] = 1
-	deps[DependencyDay] = 30
+	deps[identity.DependencyAuthor] = 1
+	deps[items.DependencyDay] = 30
 	cache = map[plumbing.Hash]*object.Blob{}
 	hash = plumbing.NewHash("291286b4ac41952cbd1389fda66420ec03c1a9fe")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	hash = plumbing.NewHash("baa64828831d174f40140e4b3cfa77d1e917a2c1")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	hash = plumbing.NewHash("29c9fafd6a2fae8cd20298c3f60115bc31a4c0f2")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	hash = plumbing.NewHash("c29112dbd697ad9b401333b80c18a63951bc18d9")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	hash = plumbing.NewHash("f7d918ec500e2f925ecde79b51cc007bac27de72")
-	cache[hash], _ = testRepository.BlobObject(hash)
-	deps[DependencyBlobCache] = cache
+	cache[hash], _ = test.Repository.BlobObject(hash)
+	deps[items.DependencyBlobCache] = cache
 	changes = make(object.Changes, 3)
-	treeFrom, _ = testRepository.TreeObject(plumbing.NewHash(
+	treeFrom, _ = test.Repository.TreeObject(plumbing.NewHash(
 		"96c6ece9b2f3c7c51b83516400d278dea5605100"))
-	treeTo, _ = testRepository.TreeObject(plumbing.NewHash(
+	treeTo, _ = test.Repository.TreeObject(plumbing.NewHash(
 		"251f2094d7b523d5bcc60e663b6cf38151bf8844"))
 	changes[0] = &object.Change{From: object.ChangeEntry{
 		Name: "analyser.go",
@@ -256,11 +270,11 @@ func TestBurndownConsumeFinalize(t *testing.T) {
 		},
 	}, To: object.ChangeEntry{},
 	}
-	deps[DependencyTreeChanges] = changes
-	fd = fixtureFileDiff()
+	deps[items.DependencyTreeChanges] = changes
+	fd = fixtures.FileDiff()
 	result, err = fd.Consume(deps)
 	assert.Nil(t, err)
-	deps[DependencyFileDiff] = result[DependencyFileDiff]
+	deps[items.DependencyFileDiff] = result[items.DependencyFileDiff]
 	result, err = burndown.Consume(deps)
 	assert.Nil(t, result)
 	assert.Nil(t, err)
@@ -322,25 +336,25 @@ func TestBurndownSerialize(t *testing.T) {
 		PeopleNumber: 2,
 		TrackFiles:   true,
 	}
-	burndown.Initialize(testRepository)
+	burndown.Initialize(test.Repository)
 	deps := map[string]interface{}{}
 	// stage 1
-	deps[DependencyAuthor] = 0
-	deps[DependencyDay] = 0
+	deps[identity.DependencyAuthor] = 0
+	deps[items.DependencyDay] = 0
 	cache := map[plumbing.Hash]*object.Blob{}
 	hash := plumbing.NewHash("291286b4ac41952cbd1389fda66420ec03c1a9fe")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	hash = plumbing.NewHash("c29112dbd697ad9b401333b80c18a63951bc18d9")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	hash = plumbing.NewHash("baa64828831d174f40140e4b3cfa77d1e917a2c1")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	hash = plumbing.NewHash("dc248ba2b22048cc730c571a748e8ffcf7085ab9")
-	cache[hash], _ = testRepository.BlobObject(hash)
-	deps[DependencyBlobCache] = cache
+	cache[hash], _ = test.Repository.BlobObject(hash)
+	deps[items.DependencyBlobCache] = cache
 	changes := make(object.Changes, 3)
-	treeFrom, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeFrom, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"a1eb2ea76eb7f9bfbde9b243861474421000eb96"))
-	treeTo, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeTo, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"994eac1cd07235bb9815e547a75c84265dea00f5"))
 	changes[0] = &object.Change{From: object.ChangeEntry{
 		Name: "analyser.go",
@@ -379,32 +393,32 @@ func TestBurndownSerialize(t *testing.T) {
 		},
 	},
 	}
-	deps[DependencyTreeChanges] = changes
-	fd := fixtureFileDiff()
+	deps[items.DependencyTreeChanges] = changes
+	fd := fixtures.FileDiff()
 	result, _ := fd.Consume(deps)
-	deps[DependencyFileDiff] = result[DependencyFileDiff]
+	deps[items.DependencyFileDiff] = result[items.DependencyFileDiff]
 	burndown.Consume(deps)
 
 	// stage 2
 	// 2b1ed978194a94edeabbca6de7ff3b5771d4d665
-	deps[DependencyAuthor] = 1
-	deps[DependencyDay] = 30
+	deps[identity.DependencyAuthor] = 1
+	deps[items.DependencyDay] = 30
 	cache = map[plumbing.Hash]*object.Blob{}
 	hash = plumbing.NewHash("291286b4ac41952cbd1389fda66420ec03c1a9fe")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	hash = plumbing.NewHash("baa64828831d174f40140e4b3cfa77d1e917a2c1")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	hash = plumbing.NewHash("29c9fafd6a2fae8cd20298c3f60115bc31a4c0f2")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	hash = plumbing.NewHash("c29112dbd697ad9b401333b80c18a63951bc18d9")
-	cache[hash], _ = testRepository.BlobObject(hash)
+	cache[hash], _ = test.Repository.BlobObject(hash)
 	hash = plumbing.NewHash("f7d918ec500e2f925ecde79b51cc007bac27de72")
-	cache[hash], _ = testRepository.BlobObject(hash)
-	deps[DependencyBlobCache] = cache
+	cache[hash], _ = test.Repository.BlobObject(hash)
+	deps[items.DependencyBlobCache] = cache
 	changes = make(object.Changes, 3)
-	treeFrom, _ = testRepository.TreeObject(plumbing.NewHash(
+	treeFrom, _ = test.Repository.TreeObject(plumbing.NewHash(
 		"96c6ece9b2f3c7c51b83516400d278dea5605100"))
-	treeTo, _ = testRepository.TreeObject(plumbing.NewHash(
+	treeTo, _ = test.Repository.TreeObject(plumbing.NewHash(
 		"251f2094d7b523d5bcc60e663b6cf38151bf8844"))
 	changes[0] = &object.Change{From: object.ChangeEntry{
 		Name: "analyser.go",
@@ -452,10 +466,10 @@ func TestBurndownSerialize(t *testing.T) {
 		},
 	}, To: object.ChangeEntry{},
 	}
-	deps[DependencyTreeChanges] = changes
-	fd = fixtureFileDiff()
+	deps[items.DependencyTreeChanges] = changes
+	fd = fixtures.FileDiff()
 	result, _ = fd.Consume(deps)
-	deps[DependencyFileDiff] = result[DependencyFileDiff]
+	deps[items.DependencyFileDiff] = result[items.DependencyFileDiff]
 	people := [...]string{"one@srcd", "two@srcd"}
 	burndown.reversedPeopleDict = people[:]
 	burndown.Consume(deps)
@@ -807,7 +821,7 @@ func TestBurndownMergeGlobalHistory(t *testing.T) {
 		sampling:           15,
 		granularity:        20,
 	}
-	c1 := CommonAnalysisResult{
+	c1 := core.CommonAnalysisResult{
 		BeginTime:     600566400, // 1989 Jan 12
 		EndTime:       604713600, // 1989 March 1
 		CommitsNumber: 10,
@@ -857,7 +871,7 @@ func TestBurndownMergeGlobalHistory(t *testing.T) {
 		sampling:           14,
 		granularity:        19,
 	}
-	c2 := CommonAnalysisResult{
+	c2 := core.CommonAnalysisResult{
 		BeginTime:     601084800, // 1989 Jan 18
 		EndTime:       605923200, // 1989 March 15
 		CommitsNumber: 10,
@@ -946,7 +960,7 @@ func TestBurndownMergeNils(t *testing.T) {
 		sampling:           15,
 		granularity:        20,
 	}
-	c1 := CommonAnalysisResult{
+	c1 := core.CommonAnalysisResult{
 		BeginTime:     600566400, // 1989 Jan 12
 		EndTime:       604713600, // 1989 March 1
 		CommitsNumber: 10,
@@ -961,7 +975,7 @@ func TestBurndownMergeNils(t *testing.T) {
 		sampling:           14,
 		granularity:        19,
 	}
-	c2 := CommonAnalysisResult{
+	c2 := core.CommonAnalysisResult{
 		BeginTime:     601084800, // 1989 Jan 18
 		EndTime:       605923200, // 1989 March 15
 		CommitsNumber: 10,
@@ -1048,7 +1062,7 @@ func TestBurndownMergeNils(t *testing.T) {
 }
 
 func TestBurndownDeserialize(t *testing.T) {
-	allBuffer, err := ioutil.ReadFile(path.Join("test_data", "burndown.pb"))
+	allBuffer, err := ioutil.ReadFile(path.Join("..", "internal", "test_data", "burndown.pb"))
 	assert.Nil(t, err)
 	message := pb.AnalysisResults{}
 	err = proto.Unmarshal(allBuffer, &message)

+ 18 - 15
comment_sentiment.go

@@ -1,6 +1,6 @@
 // +build tensorflow
 
-package hercules
+package leaves
 
 import (
 	"fmt"
@@ -16,7 +16,10 @@ import (
 	progress "gopkg.in/cheggaaa/pb.v1"
 	"gopkg.in/src-d/go-git.v4"
 	"gopkg.in/src-d/go-git.v4/plumbing"
-	"gopkg.in/src-d/hercules.v3/pb"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
+	items "gopkg.in/src-d/hercules.v4/internal/plumbing"
+	uast_items "gopkg.in/src-d/hercules.v4/internal/plumbing/uast"
 	"gopkg.in/vmarkovtsev/BiDiSentiment.v1"
 )
 
@@ -27,7 +30,7 @@ type CommentSentimentAnalysis struct {
 
 	commentsByDay map[int][]string
 	commitsByDay  map[int][]plumbing.Hash
-	xpather       *ChangesXPather
+	xpather       *uast_items.ChangesXPather
 }
 
 // CommentSentimentResult contains the sentiment values per day, where 1 means very negative
@@ -65,7 +68,7 @@ func (sent *CommentSentimentAnalysis) Name() string {
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
+// to this list. Also used by core.Registry to build the global map of providers.
 func (sent *CommentSentimentAnalysis) Provides() []string {
 	return []string{}
 }
@@ -74,29 +77,29 @@ func (sent *CommentSentimentAnalysis) Provides() []string {
 // Each requested entity will be inserted into `deps` of Consume(). In turn, those
 // entities are Provides() upstream.
 func (sent *CommentSentimentAnalysis) Requires() []string {
-	arr := [...]string{DependencyUastChanges, DependencyDay}
+	arr := [...]string{uast_items.DependencyUastChanges, items.DependencyDay}
 	return arr[:]
 }
 
 // Features which must be enabled for this PipelineItem to be automatically inserted into the DAG.
 func (sent *CommentSentimentAnalysis) Features() []string {
-	arr := [...]string{FeatureUast}
+	arr := [...]string{uast_items.FeatureUast}
 	return arr[:]
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (sent *CommentSentimentAnalysis) ListConfigurationOptions() []ConfigurationOption {
-	options := [...]ConfigurationOption{{
+func (sent *CommentSentimentAnalysis) ListConfigurationOptions() []core.ConfigurationOption {
+	options := [...]core.ConfigurationOption{{
 		Name:        ConfigCommentSentimentMinLength,
 		Description: "Minimum length of the comment to be analyzed.",
 		Flag:        "min-comment-len",
-		Type:        IntConfigurationOption,
+		Type:        core.IntConfigurationOption,
 		Default:     DefaultCommentSentimentCommentMinLength}, {
 		Name: ConfigCommentSentimentGap,
 		Description: "Sentiment value threshold, values between 0.5 - X/2 and 0.5 + x/2 will not be " +
 			"considered. Must be >= 0 and < 1. The purpose is to exclude neutral comments.",
 		Flag:    "sentiment-gap",
-		Type:    FloatConfigurationOption,
+		Type:    core.FloatConfigurationOption,
 		Default: DefaultCommentSentimentGap},
 	}
 	return options[:]
@@ -116,7 +119,7 @@ func (sent *CommentSentimentAnalysis) Configure(facts map[string]interface{}) {
 		sent.MinCommentLength = val.(int)
 	}
 	sent.validate()
-	sent.commitsByDay = facts[FactCommitsByDay].(map[int][]plumbing.Hash)
+	sent.commitsByDay = facts[items.FactCommitsByDay].(map[int][]plumbing.Hash)
 }
 
 func (sent *CommentSentimentAnalysis) validate() {
@@ -136,7 +139,7 @@ func (sent *CommentSentimentAnalysis) validate() {
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (sent *CommentSentimentAnalysis) Initialize(repository *git.Repository) {
 	sent.commentsByDay = map[int][]string{}
-	sent.xpather = &ChangesXPather{XPath: "//*[@roleComment]"}
+	sent.xpather = &uast_items.ChangesXPather{XPath: "//*[@roleComment]"}
 	sent.validate()
 }
 
@@ -146,8 +149,8 @@ func (sent *CommentSentimentAnalysis) Initialize(repository *git.Repository) {
 // This function returns the mapping with analysis results. The keys must be the same as
 // in Provides(). If there was an error, nil is returned.
 func (sent *CommentSentimentAnalysis) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
-	changes := deps[DependencyUastChanges].([]UASTChange)
-	day := deps[DependencyDay].(int)
+	changes := deps[uast_items.DependencyUastChanges].([]uast_items.Change)
+	day := deps[items.DependencyDay].(int)
 	commentNodes := sent.xpather.Extract(changes)
 	comments := sent.mergeComments(commentNodes)
 	dayComments := sent.commentsByDay[day]
@@ -348,5 +351,5 @@ func (sent *CommentSentimentAnalysis) mergeComments(nodes []*uast.Node) []string
 }
 
 func init() {
-	Registry.Register(&CommentSentimentAnalysis{})
+	core.Registry.Register(&CommentSentimentAnalysis{})
 }

+ 30 - 19
comment_sentiment_test.go

@@ -1,6 +1,6 @@
 // +build tensorflow
 
-package hercules
+package leaves
 
 import (
 	"bytes"
@@ -11,9 +11,14 @@ import (
 	"github.com/gogo/protobuf/proto"
 	"github.com/stretchr/testify/assert"
 	"gopkg.in/bblfsh/client-go.v2"
-	"gopkg.in/src-d/go-git.v4/plumbing"
-	"gopkg.in/src-d/hercules.v3/pb"
 	"gopkg.in/bblfsh/client-go.v2/tools"
+	"gopkg.in/src-d/go-git.v4/plumbing"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
+	items "gopkg.in/src-d/hercules.v4/internal/plumbing"
+	uast_items "gopkg.in/src-d/hercules.v4/internal/plumbing/uast"
+	uast_test "gopkg.in/src-d/hercules.v4/internal/plumbing/uast/test"
+	"gopkg.in/src-d/hercules.v4/internal/test"
 )
 
 func fixtureCommentSentiment() *CommentSentimentAnalysis {
@@ -22,10 +27,10 @@ func fixtureCommentSentiment() *CommentSentimentAnalysis {
 		MinCommentLength: DefaultCommentSentimentCommentMinLength,
 	}
 	facts := map[string]interface{}{
-		FactCommitsByDay: map[int][]plumbing.Hash{},
+		items.FactCommitsByDay: map[int][]plumbing.Hash{},
 	}
 	sent.Configure(facts)
-	sent.Initialize(testRepository)
+	sent.Initialize(test.Repository)
 	return sent
 }
 
@@ -33,7 +38,7 @@ func TestCommentSentimentMeta(t *testing.T) {
 	sent := CommentSentimentAnalysis{}
 	assert.Equal(t, sent.Name(), "Sentiment")
 	assert.Equal(t, len(sent.Provides()), 0)
-	required := [...]string{DependencyUastChanges, DependencyDay}
+	required := [...]string{uast_items.DependencyUastChanges, items.DependencyDay}
 	for _, name := range required {
 		assert.Contains(t, sent.Requires(), name)
 	}
@@ -48,7 +53,7 @@ func TestCommentSentimentMeta(t *testing.T) {
 	assert.Len(t, opts, matches)
 	assert.Equal(t, sent.Flag(), "sentiment")
 	assert.Len(t, sent.Features(), 1)
-	assert.Equal(t, sent.Features()[0], FeatureUast)
+	assert.Equal(t, sent.Features()[0], uast_items.FeatureUast)
 }
 
 func TestCommentSentimentConfigure(t *testing.T) {
@@ -56,7 +61,7 @@ func TestCommentSentimentConfigure(t *testing.T) {
 	facts := map[string]interface{}{}
 	facts[ConfigCommentSentimentMinLength] = 77
 	facts[ConfigCommentSentimentGap] = float32(0.77)
-	facts[FactCommitsByDay] = map[int][]plumbing.Hash{}
+	facts[items.FactCommitsByDay] = map[int][]plumbing.Hash{}
 	sent.Configure(facts)
 	assert.Equal(t, sent.Gap, float32(0.77))
 	assert.Equal(t, sent.MinCommentLength, 77)
@@ -68,12 +73,18 @@ func TestCommentSentimentConfigure(t *testing.T) {
 }
 
 func TestCommentSentimentRegistration(t *testing.T) {
-	tp, exists := Registry.registered[(&CommentSentimentAnalysis{}).Name()]
-	assert.True(t, exists)
-	assert.Equal(t, tp.Elem().Name(), "CommentSentimentAnalysis")
-	tp, exists = Registry.flags[(&CommentSentimentAnalysis{}).Flag()]
-	assert.True(t, exists)
-	assert.Equal(t, tp.Elem().Name(), "CommentSentimentAnalysis")
+	summoned := core.Registry.Summon((&CommentSentimentAnalysis{}).Name())
+	assert.Len(t, summoned, 1)
+	assert.Equal(t, summoned[0].Name(), "Sentiment")
+	leaves := core.Registry.GetLeaves()
+	matched := false
+	for _, tp := range leaves {
+		if tp.Flag() == (&CommentSentimentAnalysis{}).Flag() {
+			matched = true
+			break
+		}
+	}
+	assert.True(t, matched)
 }
 
 func TestCommentSentimentSerializeText(t *testing.T) {
@@ -131,8 +142,8 @@ func TestCommentSentimentConsume(t *testing.T) {
 	}
 	hash1 := "4f7c7a154638a0f2468276c56188d90c9cef0dfc"
 	hash2 := "2a7392320b332494a08d5113aabe6d056fef7e9d"
-	root1 := parseBlobFromTestRepo(hash1, "labours.py", client)
-	root2 := parseBlobFromTestRepo(hash2, "labours.py", client)
+	root1 := uast_test.ParseBlobFromTestRepo(hash1, "labours.py", client)
+	root2 := uast_test.ParseBlobFromTestRepo(hash2, "labours.py", client)
 	comments, _ := tools.Filter(root2, "//*[@roleComment]")
 	for _, c := range comments {
 		t := strings.TrimSpace(c.Token)
@@ -142,10 +153,10 @@ func TestCommentSentimentConsume(t *testing.T) {
 			c.StartPosition = nil
 		}
 	}
-	gitChange := fakeChangeForName("labours.py", hash1, hash2)
+	gitChange := test.FakeChangeForName("labours.py", hash1, hash2)
 	deps := map[string]interface{}{
-		DependencyDay: 0,
-		DependencyUastChanges: []UASTChange{
+		items.DependencyDay: 0,
+		uast_items.DependencyUastChanges: []uast_items.Change{
 			{Before: root1, After: root2, Change: gitChange},
 		},
 	}

+ 19 - 16
couples.go

@@ -1,4 +1,4 @@
-package hercules
+package leaves
 
 import (
 	"fmt"
@@ -9,8 +9,11 @@ import (
 	"gopkg.in/src-d/go-git.v4"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
 	"gopkg.in/src-d/go-git.v4/utils/merkletrie"
-	"gopkg.in/src-d/hercules.v3/pb"
-	"gopkg.in/src-d/hercules.v3/yaml"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
+	items "gopkg.in/src-d/hercules.v4/internal/plumbing"
+	"gopkg.in/src-d/hercules.v4/internal/plumbing/identity"
+	"gopkg.in/src-d/hercules.v4/yaml"
 )
 
 // CouplesAnalysis calculates the number of common commits for files and authors.
@@ -49,7 +52,7 @@ func (couples *CouplesAnalysis) Name() string {
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
+// to this list. Also used by core.Registry to build the global map of providers.
 func (couples *CouplesAnalysis) Provides() []string {
 	return []string{}
 }
@@ -58,20 +61,20 @@ func (couples *CouplesAnalysis) Provides() []string {
 // Each requested entity will be inserted into `deps` of Consume(). In turn, those
 // entities are Provides() upstream.
 func (couples *CouplesAnalysis) Requires() []string {
-	arr := [...]string{DependencyAuthor, DependencyTreeChanges}
+	arr := [...]string{identity.DependencyAuthor, items.DependencyTreeChanges}
 	return arr[:]
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (couples *CouplesAnalysis) ListConfigurationOptions() []ConfigurationOption {
-	return []ConfigurationOption{}
+func (couples *CouplesAnalysis) ListConfigurationOptions() []core.ConfigurationOption {
+	return []core.ConfigurationOption{}
 }
 
 // Configure sets the properties previously published by ListConfigurationOptions().
 func (couples *CouplesAnalysis) Configure(facts map[string]interface{}) {
-	if val, exists := facts[FactIdentityDetectorPeopleCount].(int); exists {
+	if val, exists := facts[identity.FactIdentityDetectorPeopleCount].(int); exists {
 		couples.PeopleNumber = val
-		couples.reversedPeopleDict = facts[FactIdentityDetectorReversedPeopleDict].([]string)
+		couples.reversedPeopleDict = facts[identity.FactIdentityDetectorReversedPeopleDict].([]string)
 	}
 }
 
@@ -97,12 +100,12 @@ func (couples *CouplesAnalysis) Initialize(repository *git.Repository) {
 // This function returns the mapping with analysis results. The keys must be the same as
 // in Provides(). If there was an error, nil is returned.
 func (couples *CouplesAnalysis) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
-	author := deps[DependencyAuthor].(int)
-	if author == AuthorMissing {
+	author := deps[identity.DependencyAuthor].(int)
+	if author == identity.AuthorMissing {
 		author = couples.PeopleNumber
 	}
 	couples.peopleCommits[author]++
-	treeDiff := deps[DependencyTreeChanges].(object.Changes)
+	treeDiff := deps[items.DependencyTreeChanges].(object.Changes)
 	context := make([]string, 0)
 	deleteFile := func(name string) {
 		// we do not remove the file from people - the context does not expire
@@ -262,14 +265,14 @@ func (couples *CouplesAnalysis) Deserialize(pbmessage []byte) (interface{}, erro
 }
 
 // MergeResults combines two CouplesAnalysis-s together.
-func (couples *CouplesAnalysis) MergeResults(r1, r2 interface{}, c1, c2 *CommonAnalysisResult) interface{} {
+func (couples *CouplesAnalysis) MergeResults(r1, r2 interface{}, c1, c2 *core.CommonAnalysisResult) interface{} {
 	cr1 := r1.(CouplesResult)
 	cr2 := r2.(CouplesResult)
 	merged := CouplesResult{}
 	var people, files map[string][3]int
-	people, merged.reversedPeopleDict = IdentityDetector{}.MergeReversedDicts(
+	people, merged.reversedPeopleDict = identity.Detector{}.MergeReversedDicts(
 		cr1.reversedPeopleDict, cr2.reversedPeopleDict)
-	files, merged.Files = IdentityDetector{}.MergeReversedDicts(cr1.Files, cr2.Files)
+	files, merged.Files = identity.Detector{}.MergeReversedDicts(cr1.Files, cr2.Files)
 	merged.PeopleFiles = make([][]int, len(merged.reversedPeopleDict))
 	peopleFilesDicts := make([]map[int]bool, len(merged.reversedPeopleDict))
 	addPeopleFiles := func(peopleFiles [][]int, reversedPeopleDict []string,
@@ -461,5 +464,5 @@ func (couples *CouplesAnalysis) serializeBinary(result *CouplesResult, writer io
 }
 
 func init() {
-	Registry.Register(&CouplesAnalysis{})
+	core.Registry.Register(&CouplesAnalysis{})
 }

+ 38 - 28
couples_test.go

@@ -1,4 +1,4 @@
-package hercules
+package leaves
 
 import (
 	"bytes"
@@ -10,12 +10,16 @@ import (
 	"github.com/gogo/protobuf/proto"
 	"github.com/stretchr/testify/assert"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
-	"gopkg.in/src-d/hercules.v3/pb"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
+	"gopkg.in/src-d/hercules.v4/internal/plumbing"
+	"gopkg.in/src-d/hercules.v4/internal/plumbing/identity"
+	"gopkg.in/src-d/hercules.v4/internal/test"
 )
 
 func fixtureCouples() *CouplesAnalysis {
 	c := CouplesAnalysis{PeopleNumber: 3}
-	c.Initialize(testRepository)
+	c.Initialize(test.Repository)
 	return &c
 }
 
@@ -24,19 +28,25 @@ func TestCouplesMeta(t *testing.T) {
 	assert.Equal(t, c.Name(), "Couples")
 	assert.Equal(t, len(c.Provides()), 0)
 	assert.Equal(t, len(c.Requires()), 2)
-	assert.Equal(t, c.Requires()[0], DependencyAuthor)
-	assert.Equal(t, c.Requires()[1], DependencyTreeChanges)
+	assert.Equal(t, c.Requires()[0], identity.DependencyAuthor)
+	assert.Equal(t, c.Requires()[1], plumbing.DependencyTreeChanges)
 	assert.Equal(t, c.Flag(), "couples")
 	assert.Len(t, c.ListConfigurationOptions(), 0)
 }
 
 func TestCouplesRegistration(t *testing.T) {
-	tp, exists := Registry.registered[(&CouplesAnalysis{}).Name()]
-	assert.True(t, exists)
-	assert.Equal(t, tp.Elem().Name(), "CouplesAnalysis")
-	tp, exists = Registry.flags[(&CouplesAnalysis{}).Flag()]
-	assert.True(t, exists)
-	assert.Equal(t, tp.Elem().Name(), "CouplesAnalysis")
+	summoned := core.Registry.Summon((&CouplesAnalysis{}).Name())
+	assert.Len(t, summoned, 1)
+	assert.Equal(t, summoned[0].Name(), "Couples")
+	leaves := core.Registry.GetLeaves()
+	matched := false
+	for _, tp := range leaves {
+		if tp.Flag() == (&CouplesAnalysis{}).Flag() {
+			matched = true
+			break
+		}
+	}
+	assert.True(t, matched)
 }
 
 func generateChanges(names ...string) object.Changes {
@@ -78,16 +88,16 @@ func generateChanges(names ...string) object.Changes {
 func TestCouplesConsumeFinalize(t *testing.T) {
 	c := fixtureCouples()
 	deps := map[string]interface{}{}
-	deps[DependencyAuthor] = 0
-	deps[DependencyTreeChanges] = generateChanges("+two", "+four", "+six")
+	deps[identity.DependencyAuthor] = 0
+	deps[plumbing.DependencyTreeChanges] = generateChanges("+two", "+four", "+six")
 	c.Consume(deps)
-	deps[DependencyTreeChanges] = generateChanges("+one", "-two", "=three", ">four>five")
+	deps[plumbing.DependencyTreeChanges] = generateChanges("+one", "-two", "=three", ">four>five")
 	c.Consume(deps)
-	deps[DependencyAuthor] = 1
-	deps[DependencyTreeChanges] = generateChanges("=one", "=three", "-six")
+	deps[identity.DependencyAuthor] = 1
+	deps[plumbing.DependencyTreeChanges] = generateChanges("=one", "=three", "-six")
 	c.Consume(deps)
-	deps[DependencyAuthor] = 2
-	deps[DependencyTreeChanges] = generateChanges("=five")
+	deps[identity.DependencyAuthor] = 2
+	deps[plumbing.DependencyTreeChanges] = generateChanges("=five")
 	c.Consume(deps)
 	assert.Equal(t, len(c.people[0]), 5)
 	assert.Equal(t, c.people[0]["one"], 1)
@@ -165,21 +175,21 @@ func TestCouplesSerialize(t *testing.T) {
 	facts := map[string]interface{}{}
 	c.Configure(facts)
 	assert.Equal(t, c.PeopleNumber, 1)
-	facts[FactIdentityDetectorPeopleCount] = 3
-	facts[FactIdentityDetectorReversedPeopleDict] = people[:]
+	facts[identity.FactIdentityDetectorPeopleCount] = 3
+	facts[identity.FactIdentityDetectorReversedPeopleDict] = people[:]
 	c.Configure(facts)
 	assert.Equal(t, c.PeopleNumber, 3)
 	deps := map[string]interface{}{}
-	deps[DependencyAuthor] = 0
-	deps[DependencyTreeChanges] = generateChanges("+two", "+four", "+six")
+	deps[identity.DependencyAuthor] = 0
+	deps[plumbing.DependencyTreeChanges] = generateChanges("+two", "+four", "+six")
 	c.Consume(deps)
-	deps[DependencyTreeChanges] = generateChanges("+one", "-two", "=three", ">four>five")
+	deps[plumbing.DependencyTreeChanges] = generateChanges("+one", "-two", "=three", ">four>five")
 	c.Consume(deps)
-	deps[DependencyAuthor] = 1
-	deps[DependencyTreeChanges] = generateChanges("=one", "=three", "-six")
+	deps[identity.DependencyAuthor] = 1
+	deps[plumbing.DependencyTreeChanges] = generateChanges("=one", "=three", "-six")
 	c.Consume(deps)
-	deps[DependencyAuthor] = 2
-	deps[DependencyTreeChanges] = generateChanges("=five")
+	deps[identity.DependencyAuthor] = 2
+	deps[plumbing.DependencyTreeChanges] = generateChanges("=five")
 	c.Consume(deps)
 	result := c.Finalize().(CouplesResult)
 	buffer := &bytes.Buffer{}
@@ -247,7 +257,7 @@ func TestCouplesSerialize(t *testing.T) {
 }
 
 func TestCouplesDeserialize(t *testing.T) {
-	allBuffer, err := ioutil.ReadFile(path.Join("test_data", "couples.pb"))
+	allBuffer, err := ioutil.ReadFile(path.Join("..", "internal", "test_data", "couples.pb"))
 	assert.Nil(t, err)
 	message := pb.AnalysisResults{}
 	err = proto.Unmarshal(allBuffer, &message)

+ 10 - 8
file_history.go

@@ -1,4 +1,4 @@
-package hercules
+package leaves
 
 import (
 	"fmt"
@@ -11,7 +11,9 @@ import (
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
 	"gopkg.in/src-d/go-git.v4/utils/merkletrie"
-	"gopkg.in/src-d/hercules.v3/pb"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
+	items "gopkg.in/src-d/hercules.v4/internal/plumbing"
 )
 
 // FileHistory contains the intermediate state which is mutated by Consume(). It should implement
@@ -32,7 +34,7 @@ func (history *FileHistory) Name() string {
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
+// to this list. Also used by core.Registry to build the global map of providers.
 func (history *FileHistory) Provides() []string {
 	return []string{}
 }
@@ -41,13 +43,13 @@ func (history *FileHistory) Provides() []string {
 // Each requested entity will be inserted into `deps` of Consume(). In turn, those
 // entities are Provides() upstream.
 func (history *FileHistory) Requires() []string {
-	arr := [...]string{DependencyTreeChanges}
+	arr := [...]string{items.DependencyTreeChanges}
 	return arr[:]
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (history *FileHistory) ListConfigurationOptions() []ConfigurationOption {
-	return []ConfigurationOption{}
+func (history *FileHistory) ListConfigurationOptions() []core.ConfigurationOption {
+	return []core.ConfigurationOption{}
 }
 
 // Flag for the command line switch which enables this analysis.
@@ -72,7 +74,7 @@ func (history *FileHistory) Initialize(repository *git.Repository) {
 // in Provides(). If there was an error, nil is returned.
 func (history *FileHistory) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
 	commit := deps["commit"].(*object.Commit).Hash
-	changes := deps[DependencyTreeChanges].(object.Changes)
+	changes := deps[items.DependencyTreeChanges].(object.Changes)
 	for _, change := range changes {
 		action, _ := change.Action()
 		switch action {
@@ -150,5 +152,5 @@ func (history *FileHistory) serializeBinary(result *FileHistoryResult, writer io
 }
 
 func init() {
-	Registry.Register(&FileHistory{})
+	core.Registry.Register(&FileHistory{})
 }

+ 29 - 20
file_history_test.go

@@ -1,4 +1,4 @@
-package hercules
+package leaves
 
 import (
 	"bytes"
@@ -8,12 +8,15 @@ import (
 	"github.com/stretchr/testify/assert"
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
-	"gopkg.in/src-d/hercules.v3/pb"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
+	items "gopkg.in/src-d/hercules.v4/internal/plumbing"
+	"gopkg.in/src-d/hercules.v4/internal/test"
 )
 
 func fixtureFileHistory() *FileHistory {
 	fh := FileHistory{}
-	fh.Initialize(testRepository)
+	fh.Initialize(test.Repository)
 	return &fh
 }
 
@@ -22,27 +25,33 @@ func TestFileHistoryMeta(t *testing.T) {
 	assert.Equal(t, fh.Name(), "FileHistory")
 	assert.Equal(t, len(fh.Provides()), 0)
 	assert.Equal(t, len(fh.Requires()), 1)
-	assert.Equal(t, fh.Requires()[0], DependencyTreeChanges)
+	assert.Equal(t, fh.Requires()[0], items.DependencyTreeChanges)
 	assert.Len(t, fh.ListConfigurationOptions(), 0)
 	fh.Configure(nil)
 }
 
 func TestFileHistoryRegistration(t *testing.T) {
-	tp, exists := Registry.registered[(&FileHistory{}).Name()]
-	assert.True(t, exists)
-	assert.Equal(t, tp.Elem().Name(), "FileHistory")
-	tp, exists = Registry.flags[(&FileHistory{}).Flag()]
-	assert.True(t, exists)
-	assert.Equal(t, tp.Elem().Name(), "FileHistory")
+	summoned := core.Registry.Summon((&FileHistory{}).Name())
+	assert.Len(t, summoned, 1)
+	assert.Equal(t, summoned[0].Name(), "FileHistory")
+	leaves := core.Registry.GetLeaves()
+	matched := false
+	for _, tp := range leaves {
+		if tp.Flag() == (&FileHistory{}).Flag() {
+			matched = true
+			break
+		}
+	}
+	assert.True(t, matched)
 }
 
 func TestFileHistoryConsume(t *testing.T) {
 	fh := fixtureFileHistory()
 	deps := map[string]interface{}{}
 	changes := make(object.Changes, 3)
-	treeFrom, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeFrom, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"a1eb2ea76eb7f9bfbde9b243861474421000eb96"))
-	treeTo, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeTo, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"994eac1cd07235bb9815e547a75c84265dea00f5"))
 	changes[0] = &object.Change{From: object.ChangeEntry{
 		Name: "analyser.go",
@@ -81,8 +90,8 @@ func TestFileHistoryConsume(t *testing.T) {
 		},
 	},
 	}
-	deps[DependencyTreeChanges] = changes
-	commit, _ := testRepository.CommitObject(plumbing.NewHash(
+	deps[items.DependencyTreeChanges] = changes
+	commit, _ := test.Repository.CommitObject(plumbing.NewHash(
 		"2b1ed978194a94edeabbca6de7ff3b5771d4d665"))
 	deps["commit"] = commit
 	fh.files["cmd/hercules/main.go"] = []plumbing.Hash{plumbing.NewHash(
@@ -108,7 +117,7 @@ func TestFileHistorySerializeText(t *testing.T) {
 	fh := fixtureFileHistory()
 	deps := map[string]interface{}{}
 	changes := make(object.Changes, 1)
-	treeTo, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeTo, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"994eac1cd07235bb9815e547a75c84265dea00f5"))
 	changes[0] = &object.Change{From: object.ChangeEntry{}, To: object.ChangeEntry{
 		Name: ".travis.yml",
@@ -120,8 +129,8 @@ func TestFileHistorySerializeText(t *testing.T) {
 		},
 	},
 	}
-	deps[DependencyTreeChanges] = changes
-	commit, _ := testRepository.CommitObject(plumbing.NewHash(
+	deps[items.DependencyTreeChanges] = changes
+	commit, _ := test.Repository.CommitObject(plumbing.NewHash(
 		"2b1ed978194a94edeabbca6de7ff3b5771d4d665"))
 	deps["commit"] = commit
 	fh.Consume(deps)
@@ -135,7 +144,7 @@ func TestFileHistorySerializeBinary(t *testing.T) {
 	fh := fixtureFileHistory()
 	deps := map[string]interface{}{}
 	changes := make(object.Changes, 1)
-	treeTo, _ := testRepository.TreeObject(plumbing.NewHash(
+	treeTo, _ := test.Repository.TreeObject(plumbing.NewHash(
 		"994eac1cd07235bb9815e547a75c84265dea00f5"))
 	changes[0] = &object.Change{From: object.ChangeEntry{}, To: object.ChangeEntry{
 		Name: ".travis.yml",
@@ -147,8 +156,8 @@ func TestFileHistorySerializeBinary(t *testing.T) {
 		},
 	},
 	}
-	deps[DependencyTreeChanges] = changes
-	commit, _ := testRepository.CommitObject(plumbing.NewHash(
+	deps[items.DependencyTreeChanges] = changes
+	commit, _ := test.Repository.CommitObject(plumbing.NewHash(
 		"2b1ed978194a94edeabbca6de7ff3b5771d4d665"))
 	deps["commit"] = commit
 	fh.Consume(deps)

+ 16 - 13
shotness.go

@@ -1,4 +1,4 @@
-package hercules
+package leaves
 
 import (
 	"fmt"
@@ -13,7 +13,10 @@ import (
 	"gopkg.in/bblfsh/sdk.v1/uast"
 	"gopkg.in/src-d/go-git.v4"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
-	"gopkg.in/src-d/hercules.v3/pb"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
+	items "gopkg.in/src-d/hercules.v4/internal/plumbing"
+	uast_items "gopkg.in/src-d/hercules.v4/internal/plumbing/uast"
 )
 
 // ShotnessAnalysis contains the intermediate state which is mutated by Consume(). It should implement
@@ -75,7 +78,7 @@ func (shotness *ShotnessAnalysis) Name() string {
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
+// to this list. Also used by core.Registry to build the global map of providers.
 func (shotness *ShotnessAnalysis) Provides() []string {
 	return []string{}
 }
@@ -84,28 +87,28 @@ func (shotness *ShotnessAnalysis) Provides() []string {
 // Each requested entity will be inserted into `deps` of Consume(). In turn, those
 // entities are Provides() upstream.
 func (shotness *ShotnessAnalysis) Requires() []string {
-	arr := [...]string{DependencyFileDiff, DependencyUastChanges}
+	arr := [...]string{items.DependencyFileDiff, uast_items.DependencyUastChanges}
 	return arr[:]
 }
 
 // Features which must be enabled for this PipelineItem to be automatically inserted into the DAG.
 func (shotness *ShotnessAnalysis) Features() []string {
-	arr := [...]string{FeatureUast}
+	arr := [...]string{uast_items.FeatureUast}
 	return arr[:]
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (shotness *ShotnessAnalysis) ListConfigurationOptions() []ConfigurationOption {
-	opts := [...]ConfigurationOption{{
+func (shotness *ShotnessAnalysis) ListConfigurationOptions() []core.ConfigurationOption {
+	opts := [...]core.ConfigurationOption{{
 		Name:        ConfigShotnessXpathStruct,
 		Description: "UAST XPath query to use for filtering the nodes.",
 		Flag:        "shotness-xpath-struct",
-		Type:        StringConfigurationOption,
+		Type:        core.StringConfigurationOption,
 		Default:     DefaultShotnessXpathStruct}, {
 		Name:        ConfigShotnessXpathName,
 		Description: "UAST XPath query to determine the names of the filtered nodes.",
 		Flag:        "shotness-xpath-name",
-		Type:        StringConfigurationOption,
+		Type:        core.StringConfigurationOption,
 		Default:     DefaultShotnessXpathName},
 	}
 	return opts[:]
@@ -144,8 +147,8 @@ func (shotness *ShotnessAnalysis) Initialize(repository *git.Repository) {
 // in Provides(). If there was an error, nil is returned.
 func (shotness *ShotnessAnalysis) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
 	commit := deps["commit"].(*object.Commit)
-	changesList := deps[DependencyUastChanges].([]UASTChange)
-	diffs := deps[DependencyFileDiff].(map[string]FileDiffData)
+	changesList := deps[uast_items.DependencyUastChanges].([]uast_items.Change)
+	diffs := deps[items.DependencyFileDiff].(map[string]items.FileDiffData)
 	allNodes := map[string]bool{}
 
 	addNode := func(name string, node *uast.Node, fileName string) {
@@ -253,7 +256,7 @@ func (shotness *ShotnessAnalysis) Consume(deps map[string]interface{}) (map[stri
 					endLine = node.EndPosition.Line
 				} else {
 					// we need to determine node.EndPosition.Line
-					VisitEachNode(node, func(child *uast.Node) {
+					uast_items.VisitEachNode(node, func(child *uast.Node) {
 						if child.StartPosition != nil {
 							candidate := child.StartPosition.Line
 							if child.EndPosition != nil {
@@ -467,5 +470,5 @@ func reverseNodeMap(nodes map[string]*uast.Node) map[*uast.Node]string {
 }
 
 func init() {
-	Registry.Register(&ShotnessAnalysis{})
+	core.Registry.Register(&ShotnessAnalysis{})
 }

+ 46 - 35
shotness_test.go

@@ -1,4 +1,4 @@
-package hercules
+package leaves
 
 import (
 	"io/ioutil"
@@ -6,31 +6,36 @@ import (
 	"testing"
 
 	"bytes"
+
 	"github.com/gogo/protobuf/proto"
 	"github.com/sergi/go-diff/diffmatchpatch"
 	"github.com/stretchr/testify/assert"
 	"gopkg.in/bblfsh/sdk.v1/uast"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
-	"gopkg.in/src-d/hercules.v3/pb"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
+	items "gopkg.in/src-d/hercules.v4/internal/plumbing"
+	uast_items "gopkg.in/src-d/hercules.v4/internal/plumbing/uast"
+	"gopkg.in/src-d/hercules.v4/internal/test"
 )
 
 func fixtureShotness() *ShotnessAnalysis {
 	sh := &ShotnessAnalysis{}
-	sh.Initialize(testRepository)
+	sh.Initialize(test.Repository)
 	sh.Configure(nil)
 	return sh
 }
 
 func TestShotnessMeta(t *testing.T) {
 	sh := &ShotnessAnalysis{}
-	sh.Initialize(testRepository)
+	sh.Initialize(test.Repository)
 	assert.NotNil(t, sh.nodes)
 	assert.NotNil(t, sh.files)
 	assert.Equal(t, sh.Name(), "Shotness")
 	assert.Len(t, sh.Provides(), 0)
 	assert.Equal(t, len(sh.Requires()), 2)
-	assert.Equal(t, sh.Requires()[0], DependencyFileDiff)
-	assert.Equal(t, sh.Requires()[1], DependencyUastChanges)
+	assert.Equal(t, sh.Requires()[0], items.DependencyFileDiff)
+	assert.Equal(t, sh.Requires()[1], uast_items.DependencyUastChanges)
 	assert.Len(t, sh.ListConfigurationOptions(), 2)
 	assert.Equal(t, sh.ListConfigurationOptions()[0].Name, ConfigShotnessXpathStruct)
 	assert.Equal(t, sh.ListConfigurationOptions()[1].Name, ConfigShotnessXpathName)
@@ -45,51 +50,57 @@ func TestShotnessMeta(t *testing.T) {
 	assert.Equal(t, sh.XpathName, "another!")
 	features := sh.Features()
 	assert.Len(t, features, 1)
-	assert.Equal(t, features[0], FeatureUast)
+	assert.Equal(t, features[0], uast_items.FeatureUast)
 }
 
 func TestShotnessRegistration(t *testing.T) {
-	tp, exists := Registry.registered[(&ShotnessAnalysis{}).Name()]
-	assert.True(t, exists)
-	assert.Equal(t, tp.Elem().Name(), "ShotnessAnalysis")
-	tp, exists = Registry.flags[(&ShotnessAnalysis{}).Flag()]
-	assert.True(t, exists)
-	assert.Equal(t, tp.Elem().Name(), "ShotnessAnalysis")
+	summoned := core.Registry.Summon((&ShotnessAnalysis{}).Name())
+	assert.Len(t, summoned, 1)
+	assert.Equal(t, summoned[0].Name(), "Shotness")
+	leaves := core.Registry.GetLeaves()
+	matched := false
+	for _, tp := range leaves {
+		if tp.Flag() == (&ShotnessAnalysis{}).Flag() {
+			matched = true
+			break
+		}
+	}
+	assert.True(t, matched)
 }
 
 func bakeShotness(t *testing.T, eraseEndPosition bool) (*ShotnessAnalysis, ShotnessResult) {
 	sh := fixtureShotness()
-	bytes1, err := ioutil.ReadFile(path.Join("test_data", "1.java"))
+	bytes1, err := ioutil.ReadFile(path.Join("..", "internal", "test_data", "1.java"))
 	assert.Nil(t, err)
-	bytes2, err := ioutil.ReadFile(path.Join("test_data", "2.java"))
+	bytes2, err := ioutil.ReadFile(path.Join("..", "internal", "test_data", "2.java"))
 	assert.Nil(t, err)
 	dmp := diffmatchpatch.New()
 	src, dst, _ := dmp.DiffLinesToRunes(string(bytes1), string(bytes2))
 	state := map[string]interface{}{}
 	state["commit"] = &object.Commit{}
-	fileDiffs := map[string]FileDiffData{}
+	fileDiffs := map[string]items.FileDiffData{}
 	const fileName = "test.java"
-	fileDiffs[fileName] = FileDiffData{
+	fileDiffs[fileName] = items.FileDiffData{
 		OldLinesOfCode: len(src),
 		NewLinesOfCode: len(dst),
 		Diffs:          dmp.DiffMainRunes(src, dst, false),
 	}
-	state[DependencyFileDiff] = fileDiffs
-	uastChanges := make([]UASTChange, 1)
+	state[items.DependencyFileDiff] = fileDiffs
+	uastChanges := make([]uast_items.Change, 1)
 	loadUast := func(name string) *uast.Node {
-		bytes, err := ioutil.ReadFile(path.Join("test_data", name))
+		bytes, err := ioutil.ReadFile(path.Join("..", "internal", "test_data", name))
 		assert.Nil(t, err)
 		node := uast.Node{}
 		proto.Unmarshal(bytes, &node)
 		if eraseEndPosition {
-			VisitEachNode(&node, func(child *uast.Node) {
+			uast_items.VisitEachNode(&node, func(child *uast.Node) {
 				child.EndPosition = nil
 			})
 		}
 		return &node
 	}
-	state[DependencyUastChanges] = uastChanges
-	uastChanges[0] = UASTChange{
+	state[uast_items.DependencyUastChanges] = uastChanges
+	uastChanges[0] = uast_items.Change{
 		Change: &object.Change{
 			From: object.ChangeEntry{},
 			To:   object.ChangeEntry{Name: fileName}},
@@ -98,7 +109,7 @@ func bakeShotness(t *testing.T, eraseEndPosition bool) (*ShotnessAnalysis, Shotn
 	iresult, err := sh.Consume(state)
 	assert.Nil(t, err)
 	assert.Nil(t, iresult)
-	uastChanges[0] = UASTChange{
+	uastChanges[0] = uast_items.Change{
 		Change: &object.Change{
 			From: object.ChangeEntry{Name: fileName},
 			To:   object.ChangeEntry{Name: fileName}},
@@ -112,33 +123,33 @@ func bakeShotness(t *testing.T, eraseEndPosition bool) (*ShotnessAnalysis, Shotn
 
 func TestShotnessConsume(t *testing.T) {
 	sh := fixtureShotness()
-	bytes1, err := ioutil.ReadFile(path.Join("test_data", "1.java"))
+	bytes1, err := ioutil.ReadFile(path.Join("..", "internal", "test_data", "1.java"))
 	assert.Nil(t, err)
-	bytes2, err := ioutil.ReadFile(path.Join("test_data", "2.java"))
+	bytes2, err := ioutil.ReadFile(path.Join("..", "internal", "test_data", "2.java"))
 	assert.Nil(t, err)
 	dmp := diffmatchpatch.New()
 	src, dst, _ := dmp.DiffLinesToRunes(string(bytes1), string(bytes2))
 	state := map[string]interface{}{}
 	state["commit"] = &object.Commit{}
-	fileDiffs := map[string]FileDiffData{}
+	fileDiffs := map[string]items.FileDiffData{}
 	const fileName = "test.java"
 	const newfileName = "new.java"
-	fileDiffs[fileName] = FileDiffData{
+	fileDiffs[fileName] = items.FileDiffData{
 		OldLinesOfCode: len(src),
 		NewLinesOfCode: len(dst),
 		Diffs:          dmp.DiffMainRunes(src, dst, false),
 	}
-	state[DependencyFileDiff] = fileDiffs
-	uastChanges := make([]UASTChange, 1)
+	state[items.DependencyFileDiff] = fileDiffs
+	uastChanges := make([]uast_items.Change, 1)
 	loadUast := func(name string) *uast.Node {
-		bytes, err := ioutil.ReadFile(path.Join("test_data", name))
+		bytes, err := ioutil.ReadFile(path.Join("..", "internal", "test_data", name))
 		assert.Nil(t, err)
 		node := uast.Node{}
 		proto.Unmarshal(bytes, &node)
 		return &node
 	}
-	state[DependencyUastChanges] = uastChanges
-	uastChanges[0] = UASTChange{
+	state[uast_items.DependencyUastChanges] = uastChanges
+	uastChanges[0] = uast_items.Change{
 		Change: &object.Change{
 			From: object.ChangeEntry{},
 			To:   object.ChangeEntry{Name: fileName}},
@@ -147,7 +158,7 @@ func TestShotnessConsume(t *testing.T) {
 	iresult, err := sh.Consume(state)
 	assert.Nil(t, err)
 	assert.Nil(t, iresult)
-	uastChanges[0] = UASTChange{
+	uastChanges[0] = uast_items.Change{
 		Change: &object.Change{
 			From: object.ChangeEntry{Name: fileName},
 			To:   object.ChangeEntry{Name: newfileName}},
@@ -185,7 +196,7 @@ func TestShotnessConsume(t *testing.T) {
 		"MethodDeclaration_testUnpackEntryFromStream_"+newfileName)
 	assert.Equal(t, result.Counters[15], map[int]int{
 		8: 1, 0: 1, 5: 1, 6: 1, 11: 1, 1: 1, 13: 1, 17: 1, 3: 1, 15: 1, 9: 1, 4: 1, 7: 1, 16: 1, 2: 1, 12: 1, 10: 1})
-	uastChanges[0] = UASTChange{
+	uastChanges[0] = uast_items.Change{
 		Change: &object.Change{
 			From: object.ChangeEntry{Name: newfileName},
 			To:   object.ChangeEntry{}},

+ 0 - 19
vendor/github.com/jeffail/tunny/LICENSE

@@ -1,19 +0,0 @@
-Copyright (c) 2014 Ashley Jeffs
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.

+ 0 - 229
vendor/github.com/jeffail/tunny/README.md

@@ -1,229 +0,0 @@
-![Tunny](tunny_logo.png "Tunny")
-
-Tunny is a Golang library for spawning and managing a goroutine pool.
-
-The API is synchronous and simple to use. Jobs are allocated to a worker when one becomes available.
-
-https://godoc.org/github.com/Jeffail/tunny
-
-## How to install:
-
-```bash
-go get github.com/jeffail/tunny
-```
-
-## How to use:
-
-The most obvious use for a goroutine pool would be limiting heavy jobs to the number of CPUs available. In the example below we limit the work from arbitrary numbers of HTTP request goroutines through our pool.
-
-```go
-package main
-
-import (
-	"io/ioutil"
-	"net/http"
-	"runtime"
-
-	"github.com/jeffail/tunny"
-)
-
-func main() {
-	numCPUs := runtime.NumCPU()
-	runtime.GOMAXPROCS(numCPUs+1) // numCPUs hot threads + one for async tasks.
-
-	pool, _ := tunny.CreatePool(numCPUs, func(object interface{}) interface{} {
-		input, _ := object.([]byte)
-
-		// Do something that takes a lot of work
-		output := input
-
-		return output
-	}).Open()
-
-	defer pool.Close()
-
-	http.HandleFunc("/work", func(w http.ResponseWriter, r *http.Request) {
-		input, err := ioutil.ReadAll(r.Body)
-		if err != nil {
-			http.Error(w, "Internal error", http.StatusInternalServerError)
-		}
-
-		// Send work to our pool
-		result, _ := pool.SendWork(input)
-
-		w.Write(result.([]byte))
-	})
-
-	http.ListenAndServe(":8080", nil)
-}
-```
-
-Tunny supports timeouts. You can replace the `SendWork` call above to the following:
-
-```go
-		// Or, alternatively, send it with a timeout (in this case 5 seconds).
-		result, err := pool.SendWorkTimed(5000, input)
-		if err != nil {
-			http.Error(w, "Request timed out", http.StatusRequestTimeout)
-		}
-```
-
-## Can I send a closure instead of data?
-
-Yes, the arguments passed to the worker are boxed as interface{}, so this can actually be a func, you can implement this yourself, or if you're not bothered about return values you can use:
-
-```go
-exampleChannel := make(chan int)
-
-pool, _ := tunny.CreatePoolGeneric(numCPUs).Open()
-
-err := pool.SendWork(func() {
-	/* Do your hard work here, usual rules of closures apply here,
-	 * so you can return values like so:
-	 */
-	exampleChannel <- 10
-})
-
-if err != nil {
-	// You done goofed
-}
-```
-
-## How do I give my workers state?
-
-Tunny workers implement the `TunnyWorkers` interface, simply implement this interface to have your own objects (and state) act as your workers.
-
-```go
-/*
-TunnyWorker - The basic interface of a tunny worker.
-*/
-type TunnyWorker interface {
-
-	// Called for each job, expects the result to be returned synchronously
-	TunnyJob(interface{}) interface{}
-
-	// Called after each job, this indicates whether the worker is ready for the next job.
-	// The default implementation is to return true always. If false is returned then the
-	// method is called every five milliseconds until either true is returned or the pool
-	// is closed.
-	TunnyReady() bool
-}
-```
-
-Here is a short example:
-
-```go
-type customWorker struct {
-	// TODO: Put some state here
-}
-
-// Use this call to block further jobs if necessary
-func (worker *customWorker) TunnyReady() bool {
-	return true
-}
-
-// This is where the work actually happens
-func (worker *customWorker) TunnyJob(data interface{}) interface{} {
-	/* TODO: Use and modify state
-	 * there's no need for thread safety paradigms here unless the
-	 * data is being accessed from another goroutine outside of
-	 * the pool.
-	 */
-	if outputStr, ok := data.(string); ok {
-		return ("custom job done: " + outputStr)
-	}
-	return nil
-}
-
-func TestCustomWorkers (t *testing.T) {
-	outChan := make(chan int, 10)
-
-	wg := new(sync.WaitGroup)
-	wg.Add(10)
-
-	workers := make([]tunny.TunnyWorker, 4)
-	for i, _ := range workers {
-		workers[i] = &(customWorker{})
-	}
-
-	pool, _ := tunny.CreateCustomPool(workers).Open()
-
-	defer pool.Close()
-
-	for i := 0; i < 10; i++ {
-		go func() {
-			value, _ := pool.SendWork("hello world")
-			fmt.Println(value.(string))
-
-			wg.Done()
-		}()
-	}
-
-	wg.Wait()
-}
-```
-
-The TunnyReady method allows you to use your state to determine whether or not a worker should take on another job. For example, your worker could hold a counter of how many jobs it has done, and perhaps after a certain amount it should perform another act before taking on more work, it's important to use TunnyReady for these occasions since blocking the TunnyJob call will hold up the waiting client.
-
-It is recommended that you do not block TunnyReady() whilst you wait for some condition to change, since this can prevent the pool from closing the worker goroutines. Currently, TunnyReady is called at 5 millisecond intervals until you answer true or the pool is closed.
-
-## I need more control
-
-You crazy fool, let's take this up to the next level. You can optionally implement `TunnyExtendedWorker` for more control.
-
-```go
-/*
-TunnyExtendedWorker - An optional interface that can be implemented if the worker needs
-more control over its state.
-*/
-type TunnyExtendedWorker interface {
-
-	// Called when the pool is opened, this will be called before any jobs are sent.
-	TunnyInitialize()
-
-	// Called when the pool is closed, this will be called after all jobs are completed.
-	TunnyTerminate()
-}
-```
-
-## Can a worker detect when a timeout occurs?
-
-Yes, you can also implement the `TunnyInterruptable` interface.
-
-```go
-/*
-TunnyInterruptable - An optional interface that can be implemented in order to allow the
-worker to drop jobs when they are abandoned.
-*/
-type TunnyInterruptable interface {
-
-	// Called when the current job has been abandoned by the client.
-	TunnyInterrupt()
-}
-```
-
-This method will be called in the event that a timeout occurs whilst waiting for the result. `TunnyInterrupt` is called from a newly spawned goroutine, so you'll need to create your own mechanism for stopping your worker mid-way through a job.
-
-## Can SendWork be called asynchronously?
-
-There are the helper functions SendWorkAsync and SendWorkTimedAsync, that are the same as their respective sync calls with an optional second argument func(interface{}, error), this is the call made when a result is returned and can be nil if there is no need for the closure.
-
-However, if you find yourself in a situation where the sync return is not necessary then chances are you don't actually need Tunny at all. Golang is all about making concurrent programming simple by nature, and using Tunny for implementing simple async worker calls defeats the great work of the language spec and adds overhead that isn't necessary.
-
-## Behaviours and caveats:
-
-### - Workers request jobs on an ad-hoc basis
-
-When there is a backlog of jobs waiting to be serviced, and all workers are occupied, a job will not be assigned to a worker until it is already prepared for its next job. This means workers do not develop their own individual queues. Instead, the backlog is shared by the entire pool.
-
-This means an individual worker is able to halt, or spend exceptional lengths of time on a single request without hindering the flow of any other requests, provided there are other active workers in the pool.
-
-### - A job can be dropped before work is begun
-
-Tunny has support for specified timeouts at the work request level, if this timeout is triggered whilst waiting for a worker to become available then the request is dropped entirely and no effort is wasted on the abandoned request.
-
-### - Backlogged jobs are FIFO, for now
-
-When a job arrives and all workers are occupied the waiting thread will lock at a select block whilst waiting to be assigned a worker. In practice this seems to create a FIFO queue, implying that this is how the implementation of Golang has dealt with select blocks, channels and multiple reading goroutines.
-
-However, I haven't found a guarantee of this behaviour in the Golang documentation, so I cannot guarantee that this will always be the case.

+ 0 - 379
vendor/github.com/jeffail/tunny/tunny.go

@@ -1,379 +0,0 @@
-/*
-Copyright (c) 2014 Ashley Jeffs
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-*/
-
-// Package tunny implements a simple pool for maintaining independant worker goroutines.
-package tunny
-
-import (
-	"errors"
-	"expvar"
-	"reflect"
-	"strconv"
-	"sync"
-	"sync/atomic"
-	"time"
-)
-
-// Errors that are used throughout the Tunny API.
-var (
-	ErrPoolAlreadyRunning = errors.New("the pool is already running")
-	ErrPoolNotRunning     = errors.New("the pool is not running")
-	ErrJobNotFunc         = errors.New("generic worker not given a func()")
-	ErrWorkerClosed       = errors.New("worker was closed")
-	ErrJobTimedOut        = errors.New("job request timed out")
-)
-
-/*
-Worker - The basic interface of a tunny worker.
-*/
-type Worker interface {
-
-	// Called for each job, expects the result to be returned synchronously
-	Job(interface{}) interface{}
-
-	// Called after each job, this indicates whether the worker is ready for the next job.
-	// The default implementation is to return true always. If false is returned then the
-	// method is called every five milliseconds until either true is returned or the pool
-	// is closed. For efficiency you should have this call block until your worker is ready,
-	// otherwise you introduce a 5ms latency between jobs.
-	Ready() bool
-}
-
-/*
-ExtendedWorker - An optional interface that can be implemented if the worker needs
-more control over its state.
-*/
-type ExtendedWorker interface {
-
-	// Called when the pool is opened, this will be called before any jobs are sent.
-	Initialize()
-
-	// Called when the pool is closed, this will be called after all jobs are completed.
-	Terminate()
-}
-
-/*
-Interruptable - An optional interface that can be implemented in order to allow the
-worker to drop jobs when they are abandoned.
-*/
-type Interruptable interface {
-
-	// Called when the current job has been abandoned by the client.
-	TunnyInterrupt()
-}
-
-/*
-Default and very basic implementation of a tunny worker. This worker holds a closure which
-is assigned at construction, and this closure is called on each job.
-*/
-type defaultWorker struct {
-	job *func(interface{}) interface{}
-}
-
-func (worker *defaultWorker) Job(data interface{}) interface{} {
-	return (*worker.job)(data)
-}
-
-func (worker *defaultWorker) Ready() bool {
-	return true
-}
-
-/*
-WorkPool contains the structures and methods required to communicate with your pool, it must
-be opened before sending work and closed when all jobs are completed.
-
-You may open and close a pool as many times as you wish, calling close is a blocking call that
-guarantees all goroutines are stopped.
-*/
-type WorkPool struct {
-	workers          []*workerWrapper
-	selects          []reflect.SelectCase
-	statusMutex      sync.RWMutex
-	running          uint32
-	pendingAsyncJobs int32
-}
-
-func (pool *WorkPool) isRunning() bool {
-	return (atomic.LoadUint32(&pool.running) == 1)
-}
-
-func (pool *WorkPool) setRunning(running bool) {
-	if running {
-		atomic.SwapUint32(&pool.running, 1)
-	} else {
-		atomic.SwapUint32(&pool.running, 0)
-	}
-}
-
-/*
-Open all channels and launch the background goroutines managed by the pool.
-*/
-func (pool *WorkPool) Open() (*WorkPool, error) {
-	pool.statusMutex.Lock()
-	defer pool.statusMutex.Unlock()
-
-	if !pool.isRunning() {
-
-		pool.selects = make([]reflect.SelectCase, len(pool.workers))
-
-		for i, workerWrapper := range pool.workers {
-			workerWrapper.Open()
-
-			pool.selects[i] = reflect.SelectCase{
-				Dir:  reflect.SelectRecv,
-				Chan: reflect.ValueOf(workerWrapper.readyChan),
-			}
-		}
-
-		pool.setRunning(true)
-		return pool, nil
-
-	}
-	return nil, ErrPoolAlreadyRunning
-}
-
-/*
-Close all channels and goroutines managed by the pool.
-*/
-func (pool *WorkPool) Close() error {
-	pool.statusMutex.Lock()
-	defer pool.statusMutex.Unlock()
-
-	if pool.isRunning() {
-		for _, workerWrapper := range pool.workers {
-			workerWrapper.Close()
-		}
-		for _, workerWrapper := range pool.workers {
-			workerWrapper.Join()
-		}
-		pool.setRunning(false)
-		return nil
-	}
-	return ErrPoolNotRunning
-}
-
-/*
-CreatePool - Creates a pool of workers, and takes a closure argument which is the action
-to perform for each job.
-*/
-func CreatePool(numWorkers int, job func(interface{}) interface{}) *WorkPool {
-	pool := WorkPool{running: 0}
-
-	pool.workers = make([]*workerWrapper, numWorkers)
-	for i := range pool.workers {
-		newWorker := workerWrapper{
-			worker: &(defaultWorker{&job}),
-		}
-		pool.workers[i] = &newWorker
-	}
-
-	return &pool
-}
-
-/*
-CreatePoolGeneric - Creates a pool of generic workers. When sending work to a pool of
-generic workers you send a closure (func()) which is the job to perform.
-*/
-func CreatePoolGeneric(numWorkers int) *WorkPool {
-
-	return CreatePool(numWorkers, func(jobCall interface{}) interface{} {
-		if method, ok := jobCall.(func()); ok {
-			method()
-			return nil
-		}
-		return ErrJobNotFunc
-	})
-
-}
-
-/*
-CreateCustomPool - Creates a pool for an array of custom workers. The custom workers
-must implement Worker, and may also optionally implement ExtendedWorker and
-Interruptable.
-*/
-func CreateCustomPool(customWorkers []Worker) *WorkPool {
-	pool := WorkPool{running: 0}
-
-	pool.workers = make([]*workerWrapper, len(customWorkers))
-	for i := range pool.workers {
-		newWorker := workerWrapper{
-			worker: customWorkers[i],
-		}
-		pool.workers[i] = &newWorker
-	}
-
-	return &pool
-}
-
-/*
-SendWorkTimed - Send a job to a worker and return the result, this is a synchronous
-call with a timeout.
-*/
-func (pool *WorkPool) SendWorkTimed(milliTimeout time.Duration, jobData interface{}) (interface{}, error) {
-	pool.statusMutex.RLock()
-	defer pool.statusMutex.RUnlock()
-
-	if pool.isRunning() {
-		before := time.Now()
-
-		// Create a new time out timer
-		timeout := time.NewTimer(milliTimeout * time.Millisecond)
-		defer timeout.Stop()
-
-		// Create new selectcase[] and add time out case
-		selectCases := append(pool.selects[:], reflect.SelectCase{
-			Dir:  reflect.SelectRecv,
-			Chan: reflect.ValueOf(timeout.C),
-		})
-
-		// Wait for workers, or time out
-		if chosen, _, ok := reflect.Select(selectCases); ok {
-
-			// Check if the selected index is a worker, otherwise we timed out
-			if chosen < (len(selectCases) - 1) {
-				pool.workers[chosen].jobChan <- jobData
-
-				timeoutRemain := time.NewTimer((milliTimeout * time.Millisecond) - time.Since(before))
-				defer timeoutRemain.Stop()
-
-				// Wait for response, or time out
-				select {
-				case data, open := <-pool.workers[chosen].outputChan:
-					if !open {
-						return nil, ErrWorkerClosed
-					}
-					return data, nil
-				case <-timeoutRemain.C:
-					/* If we time out here we also need to ensure that the output is still
-					 * collected and that the worker can move on. Therefore, we fork the
-					 * waiting process into a new goroutine.
-					 */
-					go func() {
-						pool.workers[chosen].Interrupt()
-						<-pool.workers[chosen].outputChan
-					}()
-					return nil, ErrJobTimedOut
-				}
-			} else {
-				return nil, ErrJobTimedOut
-			}
-		} else {
-			// This means the chosen channel was closed
-			return nil, ErrWorkerClosed
-		}
-	} else {
-		return nil, ErrPoolNotRunning
-	}
-}
-
-/*
-SendWorkTimedAsync - Send a timed job to a worker without blocking, and optionally
-send the result to a receiving closure. You may set the closure to nil if no
-further actions are required.
-*/
-func (pool *WorkPool) SendWorkTimedAsync(
-	milliTimeout time.Duration,
-	jobData interface{},
-	after func(interface{}, error),
-) {
-	atomic.AddInt32(&pool.pendingAsyncJobs, 1)
-	go func() {
-		defer atomic.AddInt32(&pool.pendingAsyncJobs, -1)
-		result, err := pool.SendWorkTimed(milliTimeout, jobData)
-		if after != nil {
-			after(result, err)
-		}
-	}()
-}
-
-/*
-SendWork - Send a job to a worker and return the result, this is a synchronous call.
-*/
-func (pool *WorkPool) SendWork(jobData interface{}) (interface{}, error) {
-	pool.statusMutex.RLock()
-	defer pool.statusMutex.RUnlock()
-
-	if pool.isRunning() {
-		if chosen, _, ok := reflect.Select(pool.selects); ok && chosen >= 0 {
-			pool.workers[chosen].jobChan <- jobData
-			result, open := <-pool.workers[chosen].outputChan
-
-			if !open {
-				return nil, ErrWorkerClosed
-			}
-			return result, nil
-		}
-		return nil, ErrWorkerClosed
-	}
-	return nil, ErrPoolNotRunning
-}
-
-/*
-SendWorkAsync - Send a job to a worker without blocking, and optionally send the
-result to a receiving closure. You may set the closure to nil if no further actions
-are required.
-*/
-func (pool *WorkPool) SendWorkAsync(jobData interface{}, after func(interface{}, error)) {
-	atomic.AddInt32(&pool.pendingAsyncJobs, 1)
-	go func() {
-		defer atomic.AddInt32(&pool.pendingAsyncJobs, -1)
-		result, err := pool.SendWork(jobData)
-		if after != nil {
-			after(result, err)
-		}
-	}()
-}
-
-/*
-NumPendingAsyncJobs - Get the current count of async jobs either in flight, or waiting for a worker
-*/
-func (pool *WorkPool) NumPendingAsyncJobs() int32 {
-	return atomic.LoadInt32(&pool.pendingAsyncJobs)
-}
-
-/*
-NumWorkers - Number of workers in the pool
-*/
-func (pool *WorkPool) NumWorkers() int {
-	return len(pool.workers)
-}
-
-type liveVarAccessor func() string
-
-func (a liveVarAccessor) String() string {
-	return a()
-}
-
-/*
-PublishExpvarMetrics - Publishes the NumWorkers and NumPendingAsyncJobs to expvars
-*/
-func (pool *WorkPool) PublishExpvarMetrics(poolName string) {
-	ret := expvar.NewMap(poolName)
-	asyncJobsFn := func() string {
-		return strconv.FormatInt(int64(pool.NumPendingAsyncJobs()), 10)
-	}
-	numWorkersFn := func() string {
-		return strconv.FormatInt(int64(pool.NumWorkers()), 10)
-	}
-	ret.Set("pendingAsyncJobs", liveVarAccessor(asyncJobsFn))
-	ret.Set("numWorkers", liveVarAccessor(numWorkersFn))
-}

BIN
vendor/github.com/jeffail/tunny/tunny_logo.png


+ 0 - 286
vendor/github.com/jeffail/tunny/tunny_test.go

@@ -1,286 +0,0 @@
-/*
-Copyright (c) 2014 Ashley Jeffs
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-*/
-
-package tunny
-
-import (
-	"sync"
-	"testing"
-	"time"
-)
-
-/*--------------------------------------------------------------------------------------------------
- */
-
-func TestBasicJob(t *testing.T) {
-	pool, err := CreatePool(1, func(in interface{}) interface{} {
-		intVal := in.(int)
-		return intVal * 2
-	}).Open()
-	if err != nil {
-		t.Errorf("Failed to create pool: %v", err)
-		return
-	}
-	defer pool.Close()
-
-	for i := 0; i < 1; i++ {
-		ret, err := pool.SendWork(10)
-		if err != nil {
-			t.Errorf("Failed to send work: %v", err)
-			return
-		}
-		retInt := ret.(int)
-		if ret != 20 {
-			t.Errorf("Wrong return value: %v != %v", 20, retInt)
-		}
-	}
-}
-
-func TestParallelJobs(t *testing.T) {
-	nWorkers := 10
-
-	jobGroup := sync.WaitGroup{}
-	testGroup := sync.WaitGroup{}
-
-	pool, err := CreatePool(nWorkers, func(in interface{}) interface{} {
-		jobGroup.Done()
-		jobGroup.Wait()
-
-		intVal := in.(int)
-		return intVal * 2
-	}).Open()
-	if err != nil {
-		t.Errorf("Failed to create pool: %v", err)
-		return
-	}
-	defer pool.Close()
-
-	for j := 0; j < 1; j++ {
-		jobGroup.Add(nWorkers)
-		testGroup.Add(nWorkers)
-
-		for i := 0; i < nWorkers; i++ {
-			go func() {
-				ret, err := pool.SendWork(10)
-				if err != nil {
-					t.Errorf("Failed to send work: %v", err)
-					return
-				}
-				retInt := ret.(int)
-				if ret != 20 {
-					t.Errorf("Wrong return value: %v != %v", 20, retInt)
-				}
-
-				testGroup.Done()
-			}()
-		}
-
-		testGroup.Wait()
-	}
-}
-
-/*--------------------------------------------------------------------------------------------------
- */
-
-// Basic worker implementation
-type dummyWorker struct {
-	ready bool
-	t     *testing.T
-}
-
-func (d *dummyWorker) TunnyJob(in interface{}) interface{} {
-	if !d.ready {
-		d.t.Errorf("Job called without polling Ready")
-	}
-	d.ready = false
-	return in
-}
-
-func (d *dummyWorker) TunnyReady() bool {
-	d.ready = true
-	return d.ready
-}
-
-// Test the pool with a basic worker implementation
-func TestDummyWorker(t *testing.T) {
-	pool, err := CreateCustomPool([]Worker{&dummyWorker{t: t}}).Open()
-	if err != nil {
-		t.Errorf("Failed to create pool: %v", err)
-		return
-	}
-	defer pool.Close()
-
-	for i := 0; i < 100; i++ {
-		if result, err := pool.SendWork(12); err != nil {
-			t.Errorf("Failed to send work: %v", err)
-		} else if resInt, ok := result.(int); !ok || resInt != 12 {
-			t.Errorf("Unexpected result from job: %v != %v", 12, result)
-		}
-	}
-}
-
-// Extended worker implementation
-type dummyExtWorker struct {
-	dummyWorker
-
-	initialized bool
-}
-
-func (d *dummyExtWorker) TunnyJob(in interface{}) interface{} {
-	if !d.initialized {
-		d.t.Errorf("Job called without calling Initialize")
-	}
-	return d.dummyWorker.TunnyJob(in)
-}
-
-func (d *dummyExtWorker) TunnyInitialize() {
-	d.initialized = true
-}
-
-func (d *dummyExtWorker) TunnyTerminate() {
-	if !d.initialized {
-		d.t.Errorf("Terminate called without calling Initialize")
-	}
-	d.initialized = false
-}
-
-// Test the pool with an extended worker implementation
-func TestDummyExtWorker(t *testing.T) {
-	pool, err := CreateCustomPool(
-		[]Worker{
-			&dummyExtWorker{
-				dummyWorker: dummyWorker{t: t},
-			},
-		}).Open()
-	if err != nil {
-		t.Errorf("Failed to create pool: %v", err)
-		return
-	}
-	defer pool.Close()
-
-	for i := 0; i < 100; i++ {
-		if result, err := pool.SendWork(12); err != nil {
-			t.Errorf("Failed to send work: %v", err)
-		} else if resInt, ok := result.(int); !ok || resInt != 12 {
-			t.Errorf("Unexpected result from job: %v != %v", 12, result)
-		}
-	}
-}
-
-// Extended and interruptible worker implementation
-type dummyExtIntWorker struct {
-	dummyExtWorker
-
-	jobLock *sync.Mutex
-}
-
-func (d *dummyExtIntWorker) TunnyJob(in interface{}) interface{} {
-	d.jobLock.Lock()
-	d.jobLock.Unlock()
-
-	return d.dummyExtWorker.TunnyJob(in)
-}
-
-func (d *dummyExtIntWorker) TunnyReady() bool {
-	d.jobLock.Lock()
-
-	return d.dummyExtWorker.TunnyReady()
-}
-
-func (d *dummyExtIntWorker) TunnyInterrupt() {
-	d.jobLock.Unlock()
-}
-
-// Test the pool with an extended and interruptible worker implementation
-func TestDummyExtIntWorker(t *testing.T) {
-	pool, err := CreateCustomPool(
-		[]Worker{
-			&dummyExtIntWorker{
-				dummyExtWorker: dummyExtWorker{
-					dummyWorker: dummyWorker{t: t},
-				},
-				jobLock: &sync.Mutex{},
-			},
-		}).Open()
-	if err != nil {
-		t.Errorf("Failed to create pool: %v", err)
-		return
-	}
-	defer pool.Close()
-
-	for i := 0; i < 100; i++ {
-		if _, err := pool.SendWorkTimed(1, nil); err == nil {
-			t.Errorf("Expected timeout from dummyExtIntWorker.")
-		}
-	}
-}
-
-func TestNumWorkers(t *testing.T) {
-	numWorkers := 10
-	pool, err := CreatePoolGeneric(numWorkers).Open()
-	if err != nil {
-		t.Errorf("Failed to create pool: %v", err)
-		return
-	}
-	defer pool.Close()
-	actual := pool.NumWorkers()
-	if actual != numWorkers {
-		t.Errorf("Expected to get %d workers, but got %d", numWorkers, actual)
-	}
-}
-
-var waitHalfSecond = func() {
-	time.Sleep(500 * time.Millisecond)
-}
-
-func TestNumPendingReportsAllWorkersWithNoWork(t *testing.T) {
-	numWorkers := 10
-	pool, err := CreatePoolGeneric(numWorkers).Open()
-	if err != nil {
-		t.Errorf("Failed to create pool: %v", err)
-		return
-	}
-	defer pool.Close()
-	actual := pool.NumPendingAsyncJobs()
-	if actual != 0 {
-		t.Errorf("Expected to get 0 pending jobs when pool is quiet, but got %d", actual)
-	}
-}
-
-func TestNumPendingReportsNotAllWorkersWhenSomeBusy(t *testing.T) {
-	numWorkers := 10
-	pool, err := CreatePoolGeneric(numWorkers).Open()
-	if err != nil {
-		t.Errorf("Failed to create pool: %v", err)
-		return
-	}
-	defer pool.Close()
-	pool.SendWorkAsync(waitHalfSecond, nil)
-	actual := pool.NumPendingAsyncJobs()
-	expected := int32(1)
-	if actual != expected {
-		t.Errorf("Expected to get %d pending jobs when pool has work, but got %d", expected, actual)
-	}
-}
-
-/*--------------------------------------------------------------------------------------------------
- */

+ 0 - 110
vendor/github.com/jeffail/tunny/worker.go

@@ -1,110 +0,0 @@
-/*
-Copyright (c) 2014 Ashley Jeffs
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-*/
-
-package tunny
-
-import (
-	"sync/atomic"
-	"time"
-)
-
-type workerWrapper struct {
-	readyChan  chan int
-	jobChan    chan interface{}
-	outputChan chan interface{}
-	poolOpen   uint32
-	worker     Worker
-}
-
-func (wrapper *workerWrapper) Loop() {
-
-	// TODO: Configure?
-	tout := time.Duration(5)
-
-	for !wrapper.worker.Ready() {
-		// It's sad that we can't simply check if jobChan is closed here.
-		if atomic.LoadUint32(&wrapper.poolOpen) == 0 {
-			break
-		}
-		time.Sleep(tout * time.Millisecond)
-	}
-
-	wrapper.readyChan <- 1
-
-	for data := range wrapper.jobChan {
-		wrapper.outputChan <- wrapper.worker.Job(data)
-		for !wrapper.worker.Ready() {
-			if atomic.LoadUint32(&wrapper.poolOpen) == 0 {
-				break
-			}
-			time.Sleep(tout * time.Millisecond)
-		}
-		wrapper.readyChan <- 1
-	}
-
-	close(wrapper.readyChan)
-	close(wrapper.outputChan)
-
-}
-
-func (wrapper *workerWrapper) Open() {
-	if extWorker, ok := wrapper.worker.(ExtendedWorker); ok {
-		extWorker.Initialize()
-	}
-
-	wrapper.readyChan = make(chan int)
-	wrapper.jobChan = make(chan interface{})
-	wrapper.outputChan = make(chan interface{})
-
-	atomic.SwapUint32(&wrapper.poolOpen, uint32(1))
-
-	go wrapper.Loop()
-}
-
-// Follow this with Join(), otherwise terminate isn't called on the worker
-func (wrapper *workerWrapper) Close() {
-	close(wrapper.jobChan)
-
-	// Breaks the worker out of a Ready() -> false loop
-	atomic.SwapUint32(&wrapper.poolOpen, uint32(0))
-}
-
-func (wrapper *workerWrapper) Join() {
-	// Ensure that both the ready and output channels are closed
-	for {
-		_, readyOpen := <-wrapper.readyChan
-		_, outputOpen := <-wrapper.outputChan
-		if !readyOpen && !outputOpen {
-			break
-		}
-	}
-
-	if extWorker, ok := wrapper.worker.(ExtendedWorker); ok {
-		extWorker.Terminate()
-	}
-}
-
-func (wrapper *workerWrapper) Interrupt() {
-	if extWorker, ok := wrapper.worker.(Interruptable); ok {
-		extWorker.TunnyInterrupt()
-	}
-}