Quellcode durchsuchen

Refactor the project structure

Signed-off-by: Vadim Markovtsev <vadim@sourced.tech>
Vadim Markovtsev vor 7 Jahren
Ursprung
Commit
98c2224828
70 geänderte Dateien mit 2298 neuen und 351 gelöschten Zeilen
  1. 3 2
      .travis.yml
  2. 4 4
      Dockerfile
  3. 9 9
      Makefile
  4. 3 3
      README.md
  5. 5 5
      appveyor.yml
  6. 2 2
      cmd/hercules/combine.go
  7. 1 1
      cmd/hercules/plugin.template
  8. 2 2
      cmd/hercules/root.go
  9. 2 2
      contrib/_plugin_example/churn_analysis.go
  10. 87 0
      core.go
  11. 0 0
      internal/__init__.py
  12. 5 46
      file.go
  13. 2 2
      file_test.go
  14. 7 4
      pipeline.go
  15. 2 2
      pipeline_test.go
  16. 1 1
      registry.go
  17. 1 1
      registry_test.go
  18. 5 3
      dummies.go
  19. 1 1
      dummies_test.go
  20. 44 0
      internal/math.go
  21. 0 0
      internal/pb/__init__.py
  22. 685 0
      internal/pb/pb.pb.go
  23. 0 0
      internal/pb/pb.proto
  24. 1127 0
      internal/pb/pb_pb2.py
  25. 0 0
      internal/pb/utils.go
  26. 11 9
      blob_cache.go
  27. 4 3
      blob_cache_test.go
  28. 6 5
      day.go
  29. 1 1
      day_test.go
  30. 14 6
      diff.go
  31. 1 1
      diff_test.go
  32. 9 8
      identity.go
  33. 1 1
      identity_test.go
  34. 1 1
      mailmap.go
  35. 3 2
      mailmap_test.go
  36. 11 8
      renames.go
  37. 1 1
      renames_test.go
  38. 8 7
      tree_diff.go
  39. 1 1
      tree_diff_test.go
  40. 3 3
      changes_xpather.go
  41. 1 1
      changes_xpather_test.go
  42. 15 13
      diff_refiner.go
  43. 1 1
      diff_refiner_test.go
  44. 84 82
      uast.go
  45. 2 2
      uast_test.go
  46. 0 0
      internal/rbtree/rbtree.go
  47. 0 0
      internal/test_data/1.java
  48. 0 0
      internal/test_data/2.java
  49. 0 0
      internal/test_data/blob
  50. 0 0
      internal/test_data/burndown.pb
  51. 0 0
      internal/test_data/couples.pb
  52. 0 0
      internal/test_data/gitmodules
  53. 0 0
      internal/test_data/gitmodules_empty
  54. 0 0
      internal/test_data/identities
  55. 0 0
      internal/test_data/uast1.pb
  56. 0 0
      internal/test_data/uast2.pb
  57. 0 0
      internal/toposort/toposort.go
  58. 0 0
      internal/toposort/toposort_test.go
  59. 0 0
      internal/yaml/utils.go
  60. 3 3
      labours.py
  61. 44 39
      burndown.go
  62. 2 2
      burndown_test.go
  63. 18 15
      comment_sentiment.go
  64. 3 3
      comment_sentiment_test.go
  65. 19 16
      couples.go
  66. 2 2
      couples_test.go
  67. 10 8
      file_history.go
  68. 2 2
      file_history_test.go
  69. 16 13
      shotness.go
  70. 3 2
      shotness_test.go

+ 3 - 2
.travis.yml

@@ -17,7 +17,7 @@ addons:
     - gcc-6
     - g++-6
 
-go_import_path: gopkg.in/src-d/hercules.v3
+go_import_path: gopkg.in/src-d/hercules.v4
 go:
   - 1.9.x
   - 1.10.x
@@ -41,6 +41,7 @@ before_install:
   - wget -O protoc.zip https://github.com/google/protobuf/releases/download/v$PROTOC_VERSION/protoc-$PROTOC_VERSION-linux-x86_64.zip
   - unzip -d ~/.local protoc.zip && rm protoc.zip
   - go get -v github.com/golang/lint/golint
+  - go get -v github.com/haya14busa/goverage
   - git clone --depth 1 https://github.com/src-d/go-git $GOPATH/src/gopkg.in/src-d/go-git.v4
   - wget https://bootstrap.pypa.io/get-pip.py && python3 get-pip.py --user && rm get-pip.py
   - export PATH=~/usr/bin:$PATH
@@ -57,7 +58,7 @@ script:
   - set -e
   - go vet -tags tensorflow ./...
   - golint -set_exit_status ./...
-  - go test -tags tensorflow -v -cpu=1,2 -coverprofile=coverage.txt -covermode=count gopkg.in/src-d/hercules.v3
+  - goverage -tags tensorflow -v -cpu=1,2 -coverprofile=coverage.txt -covermode=count gopkg.in/src-d/hercules.v4/...
   - $GOPATH/bin/hercules version
   - $GOPATH/bin/hercules --burndown --couples --quiet --pb https://github.com/src-d/hercules > 1.pb
   - cp 1.pb 2.pb

+ 4 - 4
Dockerfile

@@ -12,14 +12,14 @@ RUN apt-get update && \
     unzip -d /usr/local protoc.zip && rm protoc.zip && \
     locale-gen en_US.UTF-8 && \
     export PATH=/usr/lib/go-1.10/bin:$PATH && \
-    go get -v -d gopkg.in/src-d/hercules.v3/... && \
-    cd /root/src/gopkg.in/src-d/hercules.v3 && \
+    go get -v -d gopkg.in/src-d/hercules.v4/... && \
+    cd /root/src/gopkg.in/src-d/hercules.v4 && \
     export CGO_CXXFLAGS=-std=c++14 && \
     curl -L "https://storage.googleapis.com/tensorflow/libtensorflow/libtensorflow-cpu-$(go env GOOS)-x86_64-1.7.0.tar.gz" | tar -C /usr/local -xz && \
     make && \
     rm /usr/local/bin/protoc && rm /usr/local/readme.txt && rm -rf /usr/local/include/google && \
     cp /root/bin/hercules /usr/local/bin && \
-    cp -r /root/src/gopkg.in/src-d/hercules.v3/*.py /root/src/gopkg.in/src-d/hercules.v3/pb /usr/local/bin && \
+    cp -r /root/src/gopkg.in/src-d/hercules.v4/*.py /root/src/gopkg.in/src-d/hercules.v4/internal/pb /usr/local/bin && \
     sed -i 's/parser.add_argument("--backend",/parser.add_argument("--backend", default="Agg",/' /usr/local/bin/labours.py && \
     echo '#!/bin/bash\n\
 \n\
@@ -28,7 +28,7 @@ echo "	$@"\n\
 echo\n\' > /browser && \
     chmod +x /browser && \
     curl https://bootstrap.pypa.io/get-pip.py | python3 && \
-    pip3 install --no-cache-dir --no-build-isolation -r /root/src/gopkg.in/src-d/hercules.v3/requirements.txt https://github.com/mind/wheels/releases/download/tf1.7-cpu/tensorflow-1.7.0-cp36-cp36m-linux_x86_64.whl && \
+    pip3 install --no-cache-dir --no-build-isolation -r /root/src/gopkg.in/src-d/hercules.v4/requirements.txt https://github.com/mind/wheels/releases/download/tf1.7-cpu/tensorflow-1.7.0-cp36-cp36m-linux_x86_64.whl && \
     rm -rf /root/* && \
     apt-get remove -y software-properties-common golang-1.10-go python3-dev libyaml-dev libxml2-dev curl git make unzip g++ && \
     apt-get remove -y *-doc *-man && \

+ 9 - 9
Makefile

@@ -12,22 +12,22 @@ endif
 all: ${GOPATH}/bin/hercules${EXE}
 
 test: all
-	go test gopkg.in/src-d/hercules.v3
+	go test gopkg.in/src-d/hercules.v4
 
 ${GOPATH}/bin/protoc-gen-gogo${EXE}:
 	go get -v github.com/gogo/protobuf/protoc-gen-gogo
 
 ifneq ($(OS),Windows_NT)
-pb/pb.pb.go: pb/pb.proto ${GOPATH}/bin/protoc-gen-gogo
-	PATH=${PATH}:${GOPATH}/bin protoc --gogo_out=pb --proto_path=pb pb/pb.proto
+internal/pb/pb.pb.go: internal/pb/pb.proto ${GOPATH}/bin/protoc-gen-gogo
+	PATH=${PATH}:${GOPATH}/bin protoc --gogo_out=internal/pb --proto_path=internal/pb internal/pb/pb.proto
 else
-pb/pb.pb.go: pb/pb.proto ${GOPATH}/bin/protoc-gen-gogo.exe
+internal/pb/pb.pb.go: pb/pb.proto ${GOPATH}/bin/protoc-gen-gogo.exe
 	set "PATH=${PATH};${GOPATH}\bin" && \
-	call protoc --gogo_out=pb --proto_path=pb pb/pb.proto
+	call protoc --gogo_out=internal/pb --proto_path=internal/pb internal/pb/pb.proto
 endif
 
-pb/pb_pb2.py: pb/pb.proto
-	protoc --python_out pb --proto_path=pb pb/pb.proto
+internal/pb/pb_pb2.py: internal/pb/pb.proto
+	protoc --python_out internal/pb --proto_path=internal/pb internal/pb/pb.proto
 
 cmd/hercules/plugin_template_source.go: cmd/hercules/plugin.template
 	cd cmd/hercules && go generate
@@ -39,5 +39,5 @@ ${GOPATH}/pkg/$(PKG)/gopkg.in/bblfsh/client-go.v2: ${GOPATH}/src/gopkg.in/bblfsh
 	cd ${GOPATH}/src/gopkg.in/bblfsh/client-go.v2 && \
 	make dependencies
 
-${GOPATH}/bin/hercules${EXE}: *.go cmd/hercules/*.go rbtree/*.go yaml/*.go toposort/*.go pb/*.go ${GOPATH}/pkg/$(PKG)/gopkg.in/bblfsh/client-go.v2 pb/pb.pb.go pb/pb_pb2.py cmd/hercules/plugin_template_source.go
-	go get -tags "$(TAGS)" -ldflags "-X gopkg.in/src-d/hercules.v3.BinaryGitHash=$(shell git rev-parse HEAD)" gopkg.in/src-d/hercules.v3/cmd/hercules
+${GOPATH}/bin/hercules${EXE}: *.go */*.go */*/*.go ${GOPATH}/pkg/$(PKG)/gopkg.in/bblfsh/client-go.v2 internal/pb/pb.pb.go internal/pb/pb_pb2.py cmd/hercules/plugin_template_source.go
+	go get -tags "$(TAGS)" -ldflags "-X gopkg.in/src-d/hercules.v4.BinaryGitHash=$(shell git rev-parse HEAD)" gopkg.in/src-d/hercules.v4/cmd/hercules

Datei-Diff unterdrückt, da er zu groß ist
+ 3 - 3
README.md


+ 5 - 5
appveyor.yml

@@ -2,7 +2,7 @@ version: "{build}"
 platform: x64
 image: Visual Studio 2017
 
-clone_folder: c:\gopath\src\gopkg.in\src-d\hercules.v3
+clone_folder: c:\gopath\src\gopkg.in\src-d\hercules.v4
 
 environment:
   GOPATH: c:\gopath
@@ -17,14 +17,14 @@ install:
 build_script:
   - set PATH=%PATH:C:\Program Files\Git\usr\bin;=%
   - set PATH=C:\msys64\mingw64\bin;%PATH%
-  - cd %GOPATH%\src\gopkg.in\src-d\hercules.v3
+  - cd %GOPATH%\src\gopkg.in\src-d\hercules.v4
   - set DISABLE_TENSORFLOW=1
   - make
-  - 7z a c:\gopath\src\gopkg.in\src-d\hercules.v3\hercules.win64.zip %GOPATH%\bin\hercules.exe
+  - 7z a c:\gopath\src\gopkg.in\src-d\hercules.v4\hercules.win64.zip %GOPATH%\bin\hercules.exe
 
 test_script:
-  - go get -v -t -d gopkg.in/src-d/hercules.v3/...
-  - go test -v -tags disable_babelfish gopkg.in/src-d/hercules.v3
+  - go get -v -t -d gopkg.in/src-d/hercules.v4/...
+  - go test -v -tags disable_babelfish gopkg.in/src-d/hercules.v4/...
 
 artifacts:
   - name: hercules.win64.zip

+ 2 - 2
cmd/hercules/combine.go

@@ -12,8 +12,8 @@ import (
 
 	"github.com/gogo/protobuf/proto"
 	"github.com/spf13/cobra"
-	"gopkg.in/src-d/hercules.v3"
-	"gopkg.in/src-d/hercules.v3/pb"
+	"gopkg.in/src-d/hercules.v4"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
 )
 
 // combineCmd represents the combine command

+ 1 - 1
cmd/hercules/plugin.template

@@ -24,7 +24,7 @@ import (
 
   "github.com/gogo/protobuf/proto"
   "gopkg.in/src-d/go-git.v4"
-  "gopkg.in/src-d/hercules.v3"
+  "gopkg.in/src-d/hercules.v4"
 )
 
 // {{.name}} contains the intermediate state which is mutated by Consume(). It should implement

+ 2 - 2
cmd/hercules/root.go

@@ -25,8 +25,8 @@ import (
 	"gopkg.in/src-d/go-git.v4/storage"
 	"gopkg.in/src-d/go-git.v4/storage/filesystem"
 	"gopkg.in/src-d/go-git.v4/storage/memory"
-	"gopkg.in/src-d/hercules.v3"
-	"gopkg.in/src-d/hercules.v3/pb"
+	"gopkg.in/src-d/hercules.v4"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
 )
 
 // oneLineWriter splits the output data by lines and outputs one on top of another using '\r'.

+ 2 - 2
contrib/_plugin_example/churn_analysis.go

@@ -13,8 +13,8 @@ import (
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
 	"gopkg.in/src-d/go-git.v4/utils/merkletrie"
-	"gopkg.in/src-d/hercules.v3"
-	"gopkg.in/src-d/hercules.v3/yaml"
+	"gopkg.in/src-d/hercules.v4"
+	"gopkg.in/src-d/hercules.v4/yaml"
 )
 
 // ChurnAnalysis contains the intermediate state which is mutated by Consume(). It should implement

+ 87 - 0
core.go

@@ -0,0 +1,87 @@
+package hercules
+
+import (
+	git "gopkg.in/src-d/go-git.v4"
+	"gopkg.in/src-d/go-git.v4/plumbing/object"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/leaves"
+)
+
+// ConfigurationOptionType represents the possible types of a ConfigurationOption's value.
+type ConfigurationOptionType = core.ConfigurationOptionType
+
+const (
+	// BoolConfigurationOption reflects the boolean value type.
+	BoolConfigurationOption = core.BoolConfigurationOption
+	// IntConfigurationOption reflects the integer value type.
+	IntConfigurationOption = core.IntConfigurationOption
+	// StringConfigurationOption reflects the string value type.
+	StringConfigurationOption = core.StringConfigurationOption
+	// FloatConfigurationOption reflects a floating point value type.
+	FloatConfigurationOption = core.FloatConfigurationOption
+	// StringsConfigurationOption reflects the array of strings value type.
+	StringsConfigurationOption = core.StringsConfigurationOption
+)
+
+// ConfigurationOption allows for the unified, retrospective way to setup PipelineItem-s.
+type ConfigurationOption = core.ConfigurationOption
+
+// PipelineItem is the interface for all the units in the Git commits analysis pipeline.
+type PipelineItem = core.PipelineItem
+
+// FeaturedPipelineItem enables switching the automatic insertion of pipeline items on or off.
+type FeaturedPipelineItem = core.FeaturedPipelineItem
+
+// LeafPipelineItem corresponds to the top level pipeline items which produce the end results.
+type LeafPipelineItem = core.LeafPipelineItem
+
+// MergeablePipelineItem specifies the methods to combine several analysis results together.
+type MergeablePipelineItem = core.MergeablePipelineItem
+
+// CommonAnalysisResult holds the information which is always extracted at Pipeline.Run().
+type CommonAnalysisResult = core.CommonAnalysisResult
+
+// MetadataToCommonAnalysisResult copies the data from a Protobuf message.
+func MetadataToCommonAnalysisResult(meta *core.Metadata) *CommonAnalysisResult {
+	return core.MetadataToCommonAnalysisResult(meta)
+}
+
+// Pipeline is the core Hercules entity which carries several PipelineItems and executes them.
+// See the extended example of how a Pipeline works in doc.go
+type Pipeline = core.Pipeline
+
+const (
+	// ConfigPipelineDumpPath is the name of the Pipeline configuration option (Pipeline.Initialize())
+	// which enables saving the items DAG to the specified file.
+	ConfigPipelineDumpPath = core.ConfigPipelineDumpPath
+	// ConfigPipelineDryRun is the name of the Pipeline configuration option (Pipeline.Initialize())
+	// which disables Configure() and Initialize() invocation on each PipelineItem during the
+	// Pipeline initialization.
+	// Subsequent Run() calls are going to fail. Useful with ConfigPipelineDumpPath=true.
+	ConfigPipelineDryRun = core.ConfigPipelineDryRun
+	// ConfigPipelineCommits is the name of the Pipeline configuration option (Pipeline.Initialize())
+	// which allows to specify the custom commit sequence. By default, Pipeline.Commits() is used.
+	ConfigPipelineCommits = core.ConfigPipelineCommits
+)
+
+// NewPipeline initializes a new instance of Pipeline struct.
+func NewPipeline(repository *git.Repository) *Pipeline {
+	return core.NewPipeline(repository)
+}
+
+// LoadCommitsFromFile reads the file by the specified FS path and generates the sequence of commits
+// by interpreting each line as a Git commit hash.
+func LoadCommitsFromFile(path string, repository *git.Repository) ([]*object.Commit, error) {
+	return core.LoadCommitsFromFile(path, repository)
+}
+
+// PipelineItemRegistry contains all the known PipelineItem-s.
+type PipelineItemRegistry = core.PipelineItemRegistry
+
+// Registry contains all known pipeline item types.
+var Registry = core.Registry
+
+func init() {
+	// hack to link with .leaves
+	_ = leaves.BurndownAnalysis{}
+}

pb/__init__.py → internal/__init__.py


+ 5 - 46
file.go

@@ -1,8 +1,10 @@
-package hercules
+package burndown
 
 import (
 	"fmt"
-	"gopkg.in/src-d/hercules.v3/rbtree"
+
+	"gopkg.in/src-d/hercules.v4/internal"
+	"gopkg.in/src-d/hercules.v4/internal/rbtree"
 )
 
 // Status is the something we would like to keep track of in File.Update().
@@ -36,49 +38,6 @@ func NewStatus(data interface{}, update func(interface{}, int, int, int)) Status
 // TreeEnd denotes the value of the last leaf in the tree.
 const TreeEnd int = -1
 
-// The ugly side of Go.
-// template <typename T> please!
-
-// min calculates the minimum of two 32-bit integers.
-func min(a int, b int) int {
-	if a < b {
-		return a
-	}
-	return b
-}
-
-// min64 calculates the minimum of two 64-bit integers.
-func min64(a int64, b int64) int64 {
-	if a < b {
-		return a
-	}
-	return b
-}
-
-// max calculates the maximum of two 32-bit integers.
-func max(a int, b int) int {
-	if a < b {
-		return b
-	}
-	return a
-}
-
-// max64 calculates the maximum of two 64-bit integers.
-func max64(a int64, b int64) int64 {
-	if a < b {
-		return b
-	}
-	return a
-}
-
-// abs64 calculates the absolute value of a 64-bit integer.
-func abs64(v int64) int64 {
-	if v <= 0 {
-		return -v
-	}
-	return v
-}
-
 func (file *File) updateTime(currentTime int, previousTime int, delta int) {
 	for _, status := range file.statuses {
 		status.update(status.data, currentTime, previousTime, delta)
@@ -199,7 +158,7 @@ func (file *File) Update(time int, pos int, insLength int, delLength int) {
 			}
 			break
 		}
-		delta := min(nextIter.Item().Key, pos+delLength) - max(node.Key, pos)
+		delta := internal.Min(nextIter.Item().Key, pos+delLength) - internal.Max(node.Key, pos)
 		if delta <= 0 {
 			break
 		}

+ 2 - 2
file_test.go

@@ -1,10 +1,10 @@
-package hercules
+package burndown
 
 import (
 	"testing"
 
 	"github.com/stretchr/testify/assert"
-	"gopkg.in/src-d/hercules.v3/rbtree"
+	"gopkg.in/src-d/hercules.v4/internal/rbtree"
 )
 
 func updateStatusFile(

+ 7 - 4
pipeline.go

@@ -1,4 +1,4 @@
-package hercules
+package core
 
 import (
 	"bufio"
@@ -16,8 +16,8 @@ import (
 	"gopkg.in/src-d/go-git.v4"
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
-	"gopkg.in/src-d/hercules.v3/pb"
-	"gopkg.in/src-d/hercules.v3/toposort"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
+	"gopkg.in/src-d/hercules.v4/internal/toposort"
 )
 
 // ConfigurationOptionType represents the possible types of a ConfigurationOption's value.
@@ -180,8 +180,11 @@ func (car *CommonAnalysisResult) FillMetadata(meta *pb.Metadata) *pb.Metadata {
 	return meta
 }
 
+// Metadata is defined in internal/pb/pb.pb.go - header of the binary file.
+type Metadata = pb.Metadata
+
 // MetadataToCommonAnalysisResult copies the data from a Protobuf message.
-func MetadataToCommonAnalysisResult(meta *pb.Metadata) *CommonAnalysisResult {
+func MetadataToCommonAnalysisResult(meta *Metadata) *CommonAnalysisResult {
 	return &CommonAnalysisResult{
 		BeginTime:     meta.BeginUnixTime,
 		EndTime:       meta.EndUnixTime,

+ 2 - 2
pipeline_test.go

@@ -1,4 +1,4 @@
-package hercules
+package core
 
 import (
 	"errors"
@@ -13,7 +13,7 @@ import (
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
 	"gopkg.in/src-d/go-git.v4/storage/memory"
-	"gopkg.in/src-d/hercules.v3/pb"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
 )
 
 type testPipelineItem struct {

+ 1 - 1
registry.go

@@ -1,4 +1,4 @@
-package hercules
+package core
 
 import (
 	"fmt"

+ 1 - 1
registry_test.go

@@ -1,4 +1,4 @@
-package hercules
+package core
 
 import (
 	"reflect"

+ 5 - 3
dummies.go

@@ -1,4 +1,4 @@
-package hercules
+package internal
 
 import (
 	"io"
@@ -60,9 +60,11 @@ func (obj dummyEncodedObject) Writer() (io.WriteCloser, error) {
 	return nil, errors.New("dummy failure")
 }
 
-func createDummyBlob(hash plumbing.Hash, fails ...bool) (*object.Blob, error) {
+// CreateDummyBlob constructs a fake object.Blob with empty contents.
+// Optionally returns an error if read or written.
+func CreateDummyBlob(hash plumbing.Hash, fails ...bool) (*object.Blob, error) {
 	if len(fails) > 1 {
-		panic("invalid usage of createDummyBlob() - this is a bug")
+		panic("invalid usage of CreateDummyBlob() - this is a bug")
 	}
 	var realFails bool
 	if len(fails) == 1 {

+ 1 - 1
dummies_test.go

@@ -1,4 +1,4 @@
-package hercules
+package internal
 
 import (
 	"io"

+ 44 - 0
internal/math.go

@@ -0,0 +1,44 @@
+package internal
+
+// The ugly side of Go.
+// template <typename T> please!
+
+// Min calculates the minimum of two 32-bit integers.
+func Min(a int, b int) int {
+	if a < b {
+		return a
+	}
+	return b
+}
+
+// Min64 calculates the minimum of two 64-bit integers.
+func Min64(a int64, b int64) int64 {
+	if a < b {
+		return a
+	}
+	return b
+}
+
+// Max calculates the maximum of two 32-bit integers.
+func Max(a int, b int) int {
+	if a < b {
+		return b
+	}
+	return a
+}
+
+// Max64 calculates the maximum of two 64-bit integers.
+func Max64(a int64, b int64) int64 {
+	if a < b {
+		return b
+	}
+	return a
+}
+
+// Abs64 calculates the absolute value of a 64-bit integer.
+func Abs64(v int64) int64 {
+	if v <= 0 {
+		return -v
+	}
+	return v
+}

test_data/gitmodules_empty → internal/pb/__init__.py


+ 685 - 0
internal/pb/pb.pb.go

@@ -0,0 +1,685 @@
+// Code generated by protoc-gen-gogo. DO NOT EDIT.
+// source: pb.proto
+
+/*
+Package pb is a generated protocol buffer package.
+
+It is generated from these files:
+	pb.proto
+
+It has these top-level messages:
+	Metadata
+	BurndownSparseMatrixRow
+	BurndownSparseMatrix
+	BurndownAnalysisResults
+	CompressedSparseRowMatrix
+	Couples
+	TouchedFiles
+	CouplesAnalysisResults
+	UASTChange
+	UASTChangesSaverResults
+	ShotnessRecord
+	ShotnessAnalysisResults
+	FileHistory
+	FileHistoryResultMessage
+	Sentiment
+	CommentSentimentResults
+	AnalysisResults
+*/
+package pb
+
+import proto "github.com/gogo/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
+
+type Metadata struct {
+	// this format is versioned
+	Version int32 `protobuf:"varint,1,opt,name=version,proto3" json:"version,omitempty"`
+	// git hash of the revision from which Hercules is built
+	Hash string `protobuf:"bytes,2,opt,name=hash,proto3" json:"hash,omitempty"`
+	// repository's name
+	Repository string `protobuf:"bytes,3,opt,name=repository,proto3" json:"repository,omitempty"`
+	// UNIX timestamp of the first analysed commit
+	BeginUnixTime int64 `protobuf:"varint,4,opt,name=begin_unix_time,json=beginUnixTime,proto3" json:"begin_unix_time,omitempty"`
+	// UNIX timestamp of the last analysed commit
+	EndUnixTime int64 `protobuf:"varint,5,opt,name=end_unix_time,json=endUnixTime,proto3" json:"end_unix_time,omitempty"`
+	// number of processed commits
+	Commits int32 `protobuf:"varint,6,opt,name=commits,proto3" json:"commits,omitempty"`
+	// duration of the analysis in milliseconds
+	RunTime int64 `protobuf:"varint,7,opt,name=run_time,json=runTime,proto3" json:"run_time,omitempty"`
+}
+
+func (m *Metadata) Reset()                    { *m = Metadata{} }
+func (m *Metadata) String() string            { return proto.CompactTextString(m) }
+func (*Metadata) ProtoMessage()               {}
+func (*Metadata) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{0} }
+
+func (m *Metadata) GetVersion() int32 {
+	if m != nil {
+		return m.Version
+	}
+	return 0
+}
+
+func (m *Metadata) GetHash() string {
+	if m != nil {
+		return m.Hash
+	}
+	return ""
+}
+
+func (m *Metadata) GetRepository() string {
+	if m != nil {
+		return m.Repository
+	}
+	return ""
+}
+
+func (m *Metadata) GetBeginUnixTime() int64 {
+	if m != nil {
+		return m.BeginUnixTime
+	}
+	return 0
+}
+
+func (m *Metadata) GetEndUnixTime() int64 {
+	if m != nil {
+		return m.EndUnixTime
+	}
+	return 0
+}
+
+func (m *Metadata) GetCommits() int32 {
+	if m != nil {
+		return m.Commits
+	}
+	return 0
+}
+
+func (m *Metadata) GetRunTime() int64 {
+	if m != nil {
+		return m.RunTime
+	}
+	return 0
+}
+
+type BurndownSparseMatrixRow struct {
+	// the first `len(column)` elements are stored,
+	// the rest `number_of_columns - len(column)` values are zeros
+	Columns []uint32 `protobuf:"varint,1,rep,packed,name=columns" json:"columns,omitempty"`
+}
+
+func (m *BurndownSparseMatrixRow) Reset()                    { *m = BurndownSparseMatrixRow{} }
+func (m *BurndownSparseMatrixRow) String() string            { return proto.CompactTextString(m) }
+func (*BurndownSparseMatrixRow) ProtoMessage()               {}
+func (*BurndownSparseMatrixRow) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{1} }
+
+func (m *BurndownSparseMatrixRow) GetColumns() []uint32 {
+	if m != nil {
+		return m.Columns
+	}
+	return nil
+}
+
+type BurndownSparseMatrix struct {
+	Name            string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
+	NumberOfRows    int32  `protobuf:"varint,2,opt,name=number_of_rows,json=numberOfRows,proto3" json:"number_of_rows,omitempty"`
+	NumberOfColumns int32  `protobuf:"varint,3,opt,name=number_of_columns,json=numberOfColumns,proto3" json:"number_of_columns,omitempty"`
+	// `len(row)` matches `number_of_rows`
+	Rows []*BurndownSparseMatrixRow `protobuf:"bytes,4,rep,name=rows" json:"rows,omitempty"`
+}
+
+func (m *BurndownSparseMatrix) Reset()                    { *m = BurndownSparseMatrix{} }
+func (m *BurndownSparseMatrix) String() string            { return proto.CompactTextString(m) }
+func (*BurndownSparseMatrix) ProtoMessage()               {}
+func (*BurndownSparseMatrix) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{2} }
+
+func (m *BurndownSparseMatrix) GetName() string {
+	if m != nil {
+		return m.Name
+	}
+	return ""
+}
+
+func (m *BurndownSparseMatrix) GetNumberOfRows() int32 {
+	if m != nil {
+		return m.NumberOfRows
+	}
+	return 0
+}
+
+func (m *BurndownSparseMatrix) GetNumberOfColumns() int32 {
+	if m != nil {
+		return m.NumberOfColumns
+	}
+	return 0
+}
+
+func (m *BurndownSparseMatrix) GetRows() []*BurndownSparseMatrixRow {
+	if m != nil {
+		return m.Rows
+	}
+	return nil
+}
+
+type BurndownAnalysisResults struct {
+	// how many days are in each band [burndown_project, burndown_file, burndown_developer]
+	Granularity int32 `protobuf:"varint,1,opt,name=granularity,proto3" json:"granularity,omitempty"`
+	// how frequently we measure the state of each band [burndown_project, burndown_file, burndown_developer]
+	Sampling int32 `protobuf:"varint,2,opt,name=sampling,proto3" json:"sampling,omitempty"`
+	// always exists
+	Project *BurndownSparseMatrix `protobuf:"bytes,3,opt,name=project" json:"project,omitempty"`
+	// this is included if `-burndown-files` was specified
+	Files []*BurndownSparseMatrix `protobuf:"bytes,4,rep,name=files" json:"files,omitempty"`
+	// these two are included if `-burndown-people` was specified
+	People []*BurndownSparseMatrix `protobuf:"bytes,5,rep,name=people" json:"people,omitempty"`
+	// rows and cols order correspond to `burndown_developer`
+	PeopleInteraction *CompressedSparseRowMatrix `protobuf:"bytes,6,opt,name=people_interaction,json=peopleInteraction" json:"people_interaction,omitempty"`
+}
+
+func (m *BurndownAnalysisResults) Reset()                    { *m = BurndownAnalysisResults{} }
+func (m *BurndownAnalysisResults) String() string            { return proto.CompactTextString(m) }
+func (*BurndownAnalysisResults) ProtoMessage()               {}
+func (*BurndownAnalysisResults) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{3} }
+
+func (m *BurndownAnalysisResults) GetGranularity() int32 {
+	if m != nil {
+		return m.Granularity
+	}
+	return 0
+}
+
+func (m *BurndownAnalysisResults) GetSampling() int32 {
+	if m != nil {
+		return m.Sampling
+	}
+	return 0
+}
+
+func (m *BurndownAnalysisResults) GetProject() *BurndownSparseMatrix {
+	if m != nil {
+		return m.Project
+	}
+	return nil
+}
+
+func (m *BurndownAnalysisResults) GetFiles() []*BurndownSparseMatrix {
+	if m != nil {
+		return m.Files
+	}
+	return nil
+}
+
+func (m *BurndownAnalysisResults) GetPeople() []*BurndownSparseMatrix {
+	if m != nil {
+		return m.People
+	}
+	return nil
+}
+
+func (m *BurndownAnalysisResults) GetPeopleInteraction() *CompressedSparseRowMatrix {
+	if m != nil {
+		return m.PeopleInteraction
+	}
+	return nil
+}
+
+type CompressedSparseRowMatrix struct {
+	NumberOfRows    int32 `protobuf:"varint,1,opt,name=number_of_rows,json=numberOfRows,proto3" json:"number_of_rows,omitempty"`
+	NumberOfColumns int32 `protobuf:"varint,2,opt,name=number_of_columns,json=numberOfColumns,proto3" json:"number_of_columns,omitempty"`
+	// https://en.wikipedia.org/wiki/Sparse_matrix#Compressed_sparse_row_.28CSR.2C_CRS_or_Yale_format.29
+	Data    []int64 `protobuf:"varint,3,rep,packed,name=data" json:"data,omitempty"`
+	Indices []int32 `protobuf:"varint,4,rep,packed,name=indices" json:"indices,omitempty"`
+	Indptr  []int64 `protobuf:"varint,5,rep,packed,name=indptr" json:"indptr,omitempty"`
+}
+
+func (m *CompressedSparseRowMatrix) Reset()                    { *m = CompressedSparseRowMatrix{} }
+func (m *CompressedSparseRowMatrix) String() string            { return proto.CompactTextString(m) }
+func (*CompressedSparseRowMatrix) ProtoMessage()               {}
+func (*CompressedSparseRowMatrix) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{4} }
+
+func (m *CompressedSparseRowMatrix) GetNumberOfRows() int32 {
+	if m != nil {
+		return m.NumberOfRows
+	}
+	return 0
+}
+
+func (m *CompressedSparseRowMatrix) GetNumberOfColumns() int32 {
+	if m != nil {
+		return m.NumberOfColumns
+	}
+	return 0
+}
+
+func (m *CompressedSparseRowMatrix) GetData() []int64 {
+	if m != nil {
+		return m.Data
+	}
+	return nil
+}
+
+func (m *CompressedSparseRowMatrix) GetIndices() []int32 {
+	if m != nil {
+		return m.Indices
+	}
+	return nil
+}
+
+func (m *CompressedSparseRowMatrix) GetIndptr() []int64 {
+	if m != nil {
+		return m.Indptr
+	}
+	return nil
+}
+
+type Couples struct {
+	// name of each `matrix`'s row and column
+	Index []string `protobuf:"bytes,1,rep,name=index" json:"index,omitempty"`
+	// is always square
+	Matrix *CompressedSparseRowMatrix `protobuf:"bytes,2,opt,name=matrix" json:"matrix,omitempty"`
+}
+
+func (m *Couples) Reset()                    { *m = Couples{} }
+func (m *Couples) String() string            { return proto.CompactTextString(m) }
+func (*Couples) ProtoMessage()               {}
+func (*Couples) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{5} }
+
+func (m *Couples) GetIndex() []string {
+	if m != nil {
+		return m.Index
+	}
+	return nil
+}
+
+func (m *Couples) GetMatrix() *CompressedSparseRowMatrix {
+	if m != nil {
+		return m.Matrix
+	}
+	return nil
+}
+
+type TouchedFiles struct {
+	Files []int32 `protobuf:"varint,1,rep,packed,name=files" json:"files,omitempty"`
+}
+
+func (m *TouchedFiles) Reset()                    { *m = TouchedFiles{} }
+func (m *TouchedFiles) String() string            { return proto.CompactTextString(m) }
+func (*TouchedFiles) ProtoMessage()               {}
+func (*TouchedFiles) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{6} }
+
+func (m *TouchedFiles) GetFiles() []int32 {
+	if m != nil {
+		return m.Files
+	}
+	return nil
+}
+
+type CouplesAnalysisResults struct {
+	FileCouples   *Couples `protobuf:"bytes,6,opt,name=file_couples,json=fileCouples" json:"file_couples,omitempty"`
+	PeopleCouples *Couples `protobuf:"bytes,7,opt,name=people_couples,json=peopleCouples" json:"people_couples,omitempty"`
+	// order corresponds to `people_couples::index`
+	PeopleFiles []*TouchedFiles `protobuf:"bytes,8,rep,name=people_files,json=peopleFiles" json:"people_files,omitempty"`
+}
+
+func (m *CouplesAnalysisResults) Reset()                    { *m = CouplesAnalysisResults{} }
+func (m *CouplesAnalysisResults) String() string            { return proto.CompactTextString(m) }
+func (*CouplesAnalysisResults) ProtoMessage()               {}
+func (*CouplesAnalysisResults) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{7} }
+
+func (m *CouplesAnalysisResults) GetFileCouples() *Couples {
+	if m != nil {
+		return m.FileCouples
+	}
+	return nil
+}
+
+func (m *CouplesAnalysisResults) GetPeopleCouples() *Couples {
+	if m != nil {
+		return m.PeopleCouples
+	}
+	return nil
+}
+
+func (m *CouplesAnalysisResults) GetPeopleFiles() []*TouchedFiles {
+	if m != nil {
+		return m.PeopleFiles
+	}
+	return nil
+}
+
+type UASTChange struct {
+	FileName   string `protobuf:"bytes,1,opt,name=file_name,json=fileName,proto3" json:"file_name,omitempty"`
+	SrcBefore  string `protobuf:"bytes,2,opt,name=src_before,json=srcBefore,proto3" json:"src_before,omitempty"`
+	SrcAfter   string `protobuf:"bytes,3,opt,name=src_after,json=srcAfter,proto3" json:"src_after,omitempty"`
+	UastBefore string `protobuf:"bytes,4,opt,name=uast_before,json=uastBefore,proto3" json:"uast_before,omitempty"`
+	UastAfter  string `protobuf:"bytes,5,opt,name=uast_after,json=uastAfter,proto3" json:"uast_after,omitempty"`
+}
+
+func (m *UASTChange) Reset()                    { *m = UASTChange{} }
+func (m *UASTChange) String() string            { return proto.CompactTextString(m) }
+func (*UASTChange) ProtoMessage()               {}
+func (*UASTChange) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{8} }
+
+func (m *UASTChange) GetFileName() string {
+	if m != nil {
+		return m.FileName
+	}
+	return ""
+}
+
+func (m *UASTChange) GetSrcBefore() string {
+	if m != nil {
+		return m.SrcBefore
+	}
+	return ""
+}
+
+func (m *UASTChange) GetSrcAfter() string {
+	if m != nil {
+		return m.SrcAfter
+	}
+	return ""
+}
+
+func (m *UASTChange) GetUastBefore() string {
+	if m != nil {
+		return m.UastBefore
+	}
+	return ""
+}
+
+func (m *UASTChange) GetUastAfter() string {
+	if m != nil {
+		return m.UastAfter
+	}
+	return ""
+}
+
+type UASTChangesSaverResults struct {
+	Changes []*UASTChange `protobuf:"bytes,1,rep,name=changes" json:"changes,omitempty"`
+}
+
+func (m *UASTChangesSaverResults) Reset()                    { *m = UASTChangesSaverResults{} }
+func (m *UASTChangesSaverResults) String() string            { return proto.CompactTextString(m) }
+func (*UASTChangesSaverResults) ProtoMessage()               {}
+func (*UASTChangesSaverResults) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{9} }
+
+func (m *UASTChangesSaverResults) GetChanges() []*UASTChange {
+	if m != nil {
+		return m.Changes
+	}
+	return nil
+}
+
+type ShotnessRecord struct {
+	InternalRole string          `protobuf:"bytes,1,opt,name=internal_role,json=internalRole,proto3" json:"internal_role,omitempty"`
+	Roles        []int32         `protobuf:"varint,2,rep,packed,name=roles" json:"roles,omitempty"`
+	Name         string          `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"`
+	File         string          `protobuf:"bytes,4,opt,name=file,proto3" json:"file,omitempty"`
+	Counters     map[int32]int32 `protobuf:"bytes,5,rep,name=counters" json:"counters,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"`
+}
+
+func (m *ShotnessRecord) Reset()                    { *m = ShotnessRecord{} }
+func (m *ShotnessRecord) String() string            { return proto.CompactTextString(m) }
+func (*ShotnessRecord) ProtoMessage()               {}
+func (*ShotnessRecord) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{10} }
+
+func (m *ShotnessRecord) GetInternalRole() string {
+	if m != nil {
+		return m.InternalRole
+	}
+	return ""
+}
+
+func (m *ShotnessRecord) GetRoles() []int32 {
+	if m != nil {
+		return m.Roles
+	}
+	return nil
+}
+
+func (m *ShotnessRecord) GetName() string {
+	if m != nil {
+		return m.Name
+	}
+	return ""
+}
+
+func (m *ShotnessRecord) GetFile() string {
+	if m != nil {
+		return m.File
+	}
+	return ""
+}
+
+func (m *ShotnessRecord) GetCounters() map[int32]int32 {
+	if m != nil {
+		return m.Counters
+	}
+	return nil
+}
+
+type ShotnessAnalysisResults struct {
+	Records []*ShotnessRecord `protobuf:"bytes,1,rep,name=records" json:"records,omitempty"`
+}
+
+func (m *ShotnessAnalysisResults) Reset()                    { *m = ShotnessAnalysisResults{} }
+func (m *ShotnessAnalysisResults) String() string            { return proto.CompactTextString(m) }
+func (*ShotnessAnalysisResults) ProtoMessage()               {}
+func (*ShotnessAnalysisResults) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{11} }
+
+func (m *ShotnessAnalysisResults) GetRecords() []*ShotnessRecord {
+	if m != nil {
+		return m.Records
+	}
+	return nil
+}
+
+type FileHistory struct {
+	Commits []string `protobuf:"bytes,1,rep,name=commits" json:"commits,omitempty"`
+}
+
+func (m *FileHistory) Reset()                    { *m = FileHistory{} }
+func (m *FileHistory) String() string            { return proto.CompactTextString(m) }
+func (*FileHistory) ProtoMessage()               {}
+func (*FileHistory) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{12} }
+
+func (m *FileHistory) GetCommits() []string {
+	if m != nil {
+		return m.Commits
+	}
+	return nil
+}
+
+type FileHistoryResultMessage struct {
+	Files map[string]*FileHistory `protobuf:"bytes,1,rep,name=files" json:"files,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value"`
+}
+
+func (m *FileHistoryResultMessage) Reset()                    { *m = FileHistoryResultMessage{} }
+func (m *FileHistoryResultMessage) String() string            { return proto.CompactTextString(m) }
+func (*FileHistoryResultMessage) ProtoMessage()               {}
+func (*FileHistoryResultMessage) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{13} }
+
+func (m *FileHistoryResultMessage) GetFiles() map[string]*FileHistory {
+	if m != nil {
+		return m.Files
+	}
+	return nil
+}
+
+type Sentiment struct {
+	Value    float32  `protobuf:"fixed32,1,opt,name=value,proto3" json:"value,omitempty"`
+	Comments []string `protobuf:"bytes,2,rep,name=comments" json:"comments,omitempty"`
+	Commits  []string `protobuf:"bytes,3,rep,name=commits" json:"commits,omitempty"`
+}
+
+func (m *Sentiment) Reset()                    { *m = Sentiment{} }
+func (m *Sentiment) String() string            { return proto.CompactTextString(m) }
+func (*Sentiment) ProtoMessage()               {}
+func (*Sentiment) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{14} }
+
+func (m *Sentiment) GetValue() float32 {
+	if m != nil {
+		return m.Value
+	}
+	return 0
+}
+
+func (m *Sentiment) GetComments() []string {
+	if m != nil {
+		return m.Comments
+	}
+	return nil
+}
+
+func (m *Sentiment) GetCommits() []string {
+	if m != nil {
+		return m.Commits
+	}
+	return nil
+}
+
+type CommentSentimentResults struct {
+	SentimentByDay map[int32]*Sentiment `protobuf:"bytes,1,rep,name=sentiment_by_day,json=sentimentByDay" json:"sentiment_by_day,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value"`
+}
+
+func (m *CommentSentimentResults) Reset()                    { *m = CommentSentimentResults{} }
+func (m *CommentSentimentResults) String() string            { return proto.CompactTextString(m) }
+func (*CommentSentimentResults) ProtoMessage()               {}
+func (*CommentSentimentResults) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{15} }
+
+func (m *CommentSentimentResults) GetSentimentByDay() map[int32]*Sentiment {
+	if m != nil {
+		return m.SentimentByDay
+	}
+	return nil
+}
+
+type AnalysisResults struct {
+	Header *Metadata `protobuf:"bytes,1,opt,name=header" json:"header,omitempty"`
+	// the mapped values are dynamic messages which require the second parsing pass.
+	Contents map[string][]byte `protobuf:"bytes,2,rep,name=contents" json:"contents,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
+}
+
+func (m *AnalysisResults) Reset()                    { *m = AnalysisResults{} }
+func (m *AnalysisResults) String() string            { return proto.CompactTextString(m) }
+func (*AnalysisResults) ProtoMessage()               {}
+func (*AnalysisResults) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{16} }
+
+func (m *AnalysisResults) GetHeader() *Metadata {
+	if m != nil {
+		return m.Header
+	}
+	return nil
+}
+
+func (m *AnalysisResults) GetContents() map[string][]byte {
+	if m != nil {
+		return m.Contents
+	}
+	return nil
+}
+
+func init() {
+	proto.RegisterType((*Metadata)(nil), "Metadata")
+	proto.RegisterType((*BurndownSparseMatrixRow)(nil), "BurndownSparseMatrixRow")
+	proto.RegisterType((*BurndownSparseMatrix)(nil), "BurndownSparseMatrix")
+	proto.RegisterType((*BurndownAnalysisResults)(nil), "BurndownAnalysisResults")
+	proto.RegisterType((*CompressedSparseRowMatrix)(nil), "CompressedSparseRowMatrix")
+	proto.RegisterType((*Couples)(nil), "Couples")
+	proto.RegisterType((*TouchedFiles)(nil), "TouchedFiles")
+	proto.RegisterType((*CouplesAnalysisResults)(nil), "CouplesAnalysisResults")
+	proto.RegisterType((*UASTChange)(nil), "UASTChange")
+	proto.RegisterType((*UASTChangesSaverResults)(nil), "UASTChangesSaverResults")
+	proto.RegisterType((*ShotnessRecord)(nil), "ShotnessRecord")
+	proto.RegisterType((*ShotnessAnalysisResults)(nil), "ShotnessAnalysisResults")
+	proto.RegisterType((*FileHistory)(nil), "FileHistory")
+	proto.RegisterType((*FileHistoryResultMessage)(nil), "FileHistoryResultMessage")
+	proto.RegisterType((*Sentiment)(nil), "Sentiment")
+	proto.RegisterType((*CommentSentimentResults)(nil), "CommentSentimentResults")
+	proto.RegisterType((*AnalysisResults)(nil), "AnalysisResults")
+}
+
+func init() { proto.RegisterFile("pb.proto", fileDescriptorPb) }
+
+var fileDescriptorPb = []byte{
+	// 1053 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x56, 0xdf, 0x6e, 0x1b, 0xc5,
+	0x17, 0xd6, 0x66, 0xfd, 0xf7, 0xac, 0x9d, 0xb4, 0xf3, 0xeb, 0xaf, 0xd9, 0x06, 0xb5, 0x98, 0x25,
+	0x80, 0xa1, 0x65, 0x8b, 0xdc, 0x1b, 0x08, 0x37, 0x24, 0x2e, 0x15, 0xbd, 0x08, 0x48, 0xe3, 0x14,
+	0x2e, 0xad, 0xf1, 0xee, 0x24, 0x5e, 0x58, 0xcf, 0xac, 0x66, 0x76, 0x93, 0xf8, 0x65, 0xb8, 0x43,
+	0x42, 0x48, 0x88, 0x0b, 0x5e, 0x80, 0xd7, 0xe0, 0x19, 0x78, 0x09, 0x34, 0xff, 0xec, 0xb5, 0xe5,
+	0x54, 0xdc, 0xcd, 0x39, 0xe7, 0xfb, 0xce, 0x9c, 0xf9, 0xce, 0x99, 0xd9, 0x85, 0x4e, 0x31, 0x8b,
+	0x0b, 0xc1, 0x4b, 0x1e, 0xfd, 0xed, 0x41, 0xe7, 0x9c, 0x96, 0x24, 0x25, 0x25, 0x41, 0x21, 0xb4,
+	0xaf, 0xa9, 0x90, 0x19, 0x67, 0xa1, 0x37, 0xf0, 0x86, 0x4d, 0xec, 0x4c, 0x84, 0xa0, 0x31, 0x27,
+	0x72, 0x1e, 0xee, 0x0d, 0xbc, 0x61, 0x17, 0xeb, 0x35, 0x7a, 0x02, 0x20, 0x68, 0xc1, 0x65, 0x56,
+	0x72, 0xb1, 0x0c, 0x7d, 0x1d, 0xa9, 0x79, 0xd0, 0x87, 0x70, 0x30, 0xa3, 0x57, 0x19, 0x9b, 0x56,
+	0x2c, 0xbb, 0x9d, 0x96, 0xd9, 0x82, 0x86, 0x8d, 0x81, 0x37, 0xf4, 0x71, 0x5f, 0xbb, 0xdf, 0xb0,
+	0xec, 0xf6, 0x22, 0x5b, 0x50, 0x14, 0x41, 0x9f, 0xb2, 0xb4, 0x86, 0x6a, 0x6a, 0x54, 0x40, 0x59,
+	0xba, 0xc2, 0x84, 0xd0, 0x4e, 0xf8, 0x62, 0x91, 0x95, 0x32, 0x6c, 0x99, 0xca, 0xac, 0x89, 0x1e,
+	0x41, 0x47, 0x54, 0xcc, 0x10, 0xdb, 0x9a, 0xd8, 0x16, 0x15, 0x53, 0xa4, 0xe8, 0x05, 0x1c, 0x9e,
+	0x55, 0x82, 0xa5, 0xfc, 0x86, 0x4d, 0x0a, 0x22, 0x24, 0x3d, 0x27, 0xa5, 0xc8, 0x6e, 0x31, 0xbf,
+	0x31, 0xf9, 0xf2, 0x6a, 0xc1, 0x64, 0xe8, 0x0d, 0xfc, 0x61, 0x1f, 0x3b, 0x33, 0xfa, 0xcd, 0x83,
+	0x07, 0xbb, 0x58, 0x4a, 0x02, 0x46, 0x16, 0x54, 0x2b, 0xd3, 0xc5, 0x7a, 0x8d, 0x8e, 0x61, 0x9f,
+	0x55, 0x8b, 0x19, 0x15, 0x53, 0x7e, 0x39, 0x15, 0xfc, 0x46, 0x6a, 0x81, 0x9a, 0xb8, 0x67, 0xbc,
+	0xdf, 0x5d, 0x62, 0x7e, 0x23, 0xd1, 0x27, 0x70, 0x7f, 0x8d, 0x72, 0xdb, 0xfa, 0x1a, 0x78, 0xe0,
+	0x80, 0x63, 0xe3, 0x46, 0xcf, 0xa0, 0xa1, 0xf3, 0x34, 0x06, 0xfe, 0x30, 0x18, 0x85, 0xf1, 0x1d,
+	0x07, 0xc0, 0x1a, 0x15, 0xfd, 0xb1, 0xb7, 0x3e, 0xe2, 0x29, 0x23, 0xf9, 0x52, 0x66, 0x12, 0x53,
+	0x59, 0xe5, 0xa5, 0x44, 0x03, 0x08, 0xae, 0x04, 0x61, 0x55, 0x4e, 0x44, 0x56, 0x2e, 0x6d, 0x43,
+	0xeb, 0x2e, 0x74, 0x04, 0x1d, 0x49, 0x16, 0x45, 0x9e, 0xb1, 0x2b, 0x5b, 0xf7, 0xca, 0x46, 0xcf,
+	0xa1, 0x5d, 0x08, 0xfe, 0x23, 0x4d, 0x4a, 0x5d, 0x69, 0x30, 0xfa, 0xff, 0xee, 0x52, 0x1c, 0x0a,
+	0x3d, 0x85, 0xe6, 0x65, 0x96, 0x53, 0x57, 0xf9, 0x1d, 0x70, 0x83, 0x41, 0x9f, 0x42, 0xab, 0xa0,
+	0xbc, 0xc8, 0x55, 0xaf, 0xdf, 0x82, 0xb6, 0x20, 0xf4, 0x1a, 0x90, 0x59, 0x4d, 0x33, 0x56, 0x52,
+	0x41, 0x92, 0x52, 0x8d, 0x68, 0x4b, 0xd7, 0x75, 0x14, 0x8f, 0xf9, 0xa2, 0x10, 0x54, 0x4a, 0x9a,
+	0x1a, 0x32, 0xe6, 0x37, 0x96, 0x7f, 0xdf, 0xb0, 0x5e, 0xaf, 0x49, 0xd1, 0x9f, 0x1e, 0x3c, 0xba,
+	0x93, 0xb0, 0xa3, 0x9f, 0xde, 0x7f, 0xed, 0xe7, 0xde, 0xee, 0x7e, 0x22, 0x68, 0xa8, 0xab, 0x15,
+	0xfa, 0x03, 0x7f, 0xe8, 0xe3, 0x86, 0xbb, 0x66, 0x19, 0x4b, 0xb3, 0xc4, 0x8a, 0xd5, 0xc4, 0xce,
+	0x44, 0x0f, 0xa1, 0x95, 0xb1, 0xb4, 0x28, 0x85, 0xd6, 0xc5, 0xc7, 0xd6, 0x8a, 0x26, 0xd0, 0x1e,
+	0xf3, 0xaa, 0x50, 0xd2, 0x3d, 0x80, 0x66, 0xc6, 0x52, 0x7a, 0xab, 0xe7, 0xb6, 0x8b, 0x8d, 0x81,
+	0x46, 0xd0, 0x5a, 0xe8, 0x23, 0xe8, 0x3a, 0xde, 0xae, 0x8a, 0x45, 0x46, 0xc7, 0xd0, 0xbb, 0xe0,
+	0x55, 0x32, 0xa7, 0xe9, 0xab, 0xcc, 0x66, 0x36, 0x1d, 0xf4, 0x74, 0x51, 0xc6, 0x88, 0x7e, 0xf5,
+	0xe0, 0xa1, 0xdd, 0x7b, 0x7b, 0xc2, 0x9e, 0x42, 0x4f, 0x61, 0xa6, 0x89, 0x09, 0xdb, 0x86, 0x74,
+	0x62, 0x0b, 0xc7, 0x81, 0x8a, 0xba, 0xba, 0x9f, 0xc3, 0xbe, 0xed, 0xa1, 0x83, 0xb7, 0xb7, 0xe0,
+	0x7d, 0x13, 0x77, 0x84, 0xcf, 0xa0, 0x67, 0x09, 0xa6, 0xaa, 0x8e, 0x9e, 0x94, 0x7e, 0x5c, 0xaf,
+	0x19, 0x07, 0x06, 0xa2, 0x8d, 0xe8, 0x17, 0x0f, 0xe0, 0xcd, 0xe9, 0xe4, 0x62, 0x3c, 0x27, 0xec,
+	0x8a, 0xa2, 0x77, 0xa0, 0xab, 0xcb, 0xab, 0xdd, 0xda, 0x8e, 0x72, 0x7c, 0xab, 0x6e, 0xee, 0x63,
+	0x00, 0x29, 0x92, 0xe9, 0x8c, 0x5e, 0x72, 0x41, 0xed, 0xb3, 0xd6, 0x95, 0x22, 0x39, 0xd3, 0x0e,
+	0xc5, 0x55, 0x61, 0x72, 0x59, 0x52, 0x61, 0x9f, 0xb6, 0x8e, 0x14, 0xc9, 0xa9, 0xb2, 0xd1, 0xbb,
+	0x10, 0x54, 0x44, 0x96, 0x8e, 0xdc, 0x30, 0x2f, 0x9f, 0x72, 0x59, 0xf6, 0x63, 0xd0, 0x96, 0xa5,
+	0x37, 0x4d, 0x72, 0xe5, 0xd1, 0xfc, 0xe8, 0x2b, 0x38, 0x5c, 0x97, 0x29, 0x27, 0xe4, 0x9a, 0x0a,
+	0x27, 0xe9, 0x07, 0xd0, 0x4e, 0x8c, 0x5b, 0x77, 0x21, 0x18, 0x05, 0xf1, 0x1a, 0x8a, 0x5d, 0x2c,
+	0xfa, 0xc7, 0x83, 0xfd, 0xc9, 0x9c, 0x97, 0x8c, 0x4a, 0x89, 0x69, 0xc2, 0x45, 0x8a, 0xde, 0x87,
+	0xbe, 0xbe, 0x1c, 0x8c, 0xe4, 0x53, 0xc1, 0x73, 0x77, 0xe2, 0x9e, 0x73, 0x62, 0x9e, 0x53, 0xd5,
+	0x62, 0x15, 0x53, 0xd3, 0xaa, 0x5b, 0xac, 0x8d, 0xd5, 0xcb, 0xe6, 0xd7, 0x5e, 0x36, 0x04, 0x0d,
+	0xa5, 0x95, 0x3d, 0x9c, 0x5e, 0xa3, 0x2f, 0xa0, 0x93, 0xf0, 0x4a, 0xe5, 0x93, 0xf6, 0xde, 0x3e,
+	0x8e, 0x37, 0xab, 0x50, 0xbd, 0xd4, 0xf1, 0xaf, 0x59, 0x29, 0x96, 0x78, 0x05, 0x3f, 0xfa, 0x12,
+	0xfa, 0x1b, 0x21, 0x74, 0x0f, 0xfc, 0x9f, 0xa8, 0x7b, 0x95, 0xd4, 0x52, 0xd5, 0x76, 0x4d, 0xf2,
+	0x8a, 0xda, 0x9b, 0x64, 0x8c, 0x93, 0xbd, 0xcf, 0xbd, 0xe8, 0x25, 0x1c, 0xba, 0x6d, 0xb6, 0x47,
+	0xf0, 0x63, 0x68, 0x0b, 0xbd, 0xb3, 0xd3, 0xeb, 0x60, 0xab, 0x22, 0xec, 0xe2, 0xd1, 0x47, 0x10,
+	0xa8, 0x31, 0xf9, 0x26, 0x93, 0xfa, 0xeb, 0x54, 0xfb, 0xa2, 0x98, 0x9b, 0xe4, 0xcc, 0xe8, 0x67,
+	0x0f, 0xc2, 0x1a, 0xd2, 0x6c, 0x75, 0x4e, 0xa5, 0x24, 0x57, 0x14, 0x9d, 0xd4, 0x2f, 0x49, 0x30,
+	0x3a, 0x8e, 0xef, 0x42, 0xea, 0x80, 0xd5, 0xc1, 0x50, 0x8e, 0x5e, 0x01, 0xac, 0x9d, 0x75, 0x05,
+	0xba, 0x46, 0x81, 0xa8, 0xae, 0x40, 0x30, 0xea, 0x6d, 0xe4, 0xae, 0xe9, 0xf1, 0x03, 0x74, 0x27,
+	0x94, 0xa9, 0x2f, 0x1e, 0x2b, 0xd7, 0xb2, 0xa9, 0x44, 0x7b, 0x16, 0xa6, 0x9e, 0x76, 0x75, 0x1c,
+	0xca, 0x4a, 0xd3, 0xeb, 0x2e, 0x5e, 0xd9, 0xf5, 0x93, 0xfb, 0x9b, 0x27, 0xff, 0xcb, 0x83, 0xc3,
+	0xb1, 0x81, 0xad, 0x36, 0x70, 0x4a, 0x7f, 0x0f, 0xf7, 0xa4, 0xf3, 0x4d, 0x67, 0xcb, 0x69, 0x4a,
+	0x96, 0x56, 0x83, 0x67, 0xf1, 0x1d, 0x9c, 0x78, 0xe5, 0x38, 0x5b, 0xbe, 0x24, 0x4b, 0xa3, 0xc5,
+	0xbe, 0xdc, 0x70, 0x1e, 0x9d, 0xc3, 0xff, 0x76, 0xc0, 0x76, 0xcc, 0xc7, 0x60, 0x53, 0x1d, 0x58,
+	0x67, 0xaf, 0x6b, 0xf3, 0xbb, 0x07, 0x07, 0xdb, 0x43, 0xf2, 0x1e, 0xb4, 0xe6, 0x94, 0xa4, 0x54,
+	0xe8, 0x74, 0xc1, 0xa8, 0x1b, 0xbb, 0x3f, 0x1e, 0x6c, 0x03, 0xe8, 0x44, 0xe9, 0xc5, 0xca, 0x95,
+	0x5e, 0xc1, 0xe8, 0x49, 0xbc, 0x95, 0x26, 0x1e, 0x5b, 0xc0, 0x6a, 0xb6, 0x8d, 0x69, 0x66, 0xbb,
+	0x16, 0xda, 0xd1, 0xd9, 0x8d, 0xd9, 0xee, 0xd5, 0xea, 0x9d, 0xb5, 0xf4, 0x6f, 0xd8, 0x8b, 0x7f,
+	0x03, 0x00, 0x00, 0xff, 0xff, 0x7e, 0x55, 0x2d, 0x51, 0x92, 0x09, 0x00, 0x00,
+}

pb/pb.proto → internal/pb/pb.proto


Datei-Diff unterdrückt, da er zu groß ist
+ 1127 - 0
internal/pb/pb_pb2.py


pb/utils.go → internal/pb/utils.go


+ 11 - 9
blob_cache.go

@@ -1,4 +1,4 @@
-package hercules
+package plumbing
 
 import (
 	"log"
@@ -8,6 +8,8 @@ import (
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
 	"gopkg.in/src-d/go-git.v4/utils/merkletrie"
+	"gopkg.in/src-d/hercules.v4/internal"
+	"gopkg.in/src-d/hercules.v4/internal/core"
 )
 
 // BlobCache loads the blobs which correspond to the changed files in a commit.
@@ -39,7 +41,7 @@ func (blobCache *BlobCache) Name() string {
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
+// to this list. Also used by core.Registry to build the global map of providers.
 func (blobCache *BlobCache) Provides() []string {
 	arr := [...]string{DependencyBlobCache}
 	return arr[:]
@@ -54,14 +56,14 @@ func (blobCache *BlobCache) Requires() []string {
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (blobCache *BlobCache) ListConfigurationOptions() []ConfigurationOption {
-	options := [...]ConfigurationOption{{
+func (blobCache *BlobCache) ListConfigurationOptions() []core.ConfigurationOption {
+	options := [...]core.ConfigurationOption{{
 		Name: ConfigBlobCacheIgnoreMissingSubmodules,
 		Description: "Specifies whether to panic if some referenced submodules do not exist and thus" +
 			" the corresponding Git objects cannot be loaded. Override this if you know that the " +
 			"history is dirty and you want to get things done.",
 		Flag:    "ignore-missing-submodules",
-		Type:    BoolConfigurationOption,
+		Type:    core.BoolConfigurationOption,
 		Default: false}}
 	return options[:]
 }
@@ -116,7 +118,7 @@ func (blobCache *BlobCache) Consume(deps map[string]interface{}) (map[string]int
 						log.Printf("file from %s %s\n", change.From.Name,
 							change.From.TreeEntry.Hash)
 					} else {
-						cache[change.From.TreeEntry.Hash], err = createDummyBlob(
+						cache[change.From.TreeEntry.Hash], err = internal.CreateDummyBlob(
 							change.From.TreeEntry.Hash)
 					}
 				}
@@ -163,7 +165,7 @@ func (blobCache *BlobCache) getBlob(entry *object.ChangeEntry, fileGetter FileGe
 			// this is not a submodule
 			return nil, err
 		} else if blobCache.IgnoreMissingSubmodules {
-			return createDummyBlob(entry.TreeEntry.Hash)
+			return internal.CreateDummyBlob(entry.TreeEntry.Hash)
 		}
 		file, errModules := fileGetter(".gitmodules")
 		if errModules != nil {
@@ -181,7 +183,7 @@ func (blobCache *BlobCache) getBlob(entry *object.ChangeEntry, fileGetter FileGe
 		_, exists := modules.Submodules[entry.Name]
 		if exists {
 			// we found that this is a submodule
-			return createDummyBlob(entry.TreeEntry.Hash)
+			return internal.CreateDummyBlob(entry.TreeEntry.Hash)
 		}
 		return nil, err
 	}
@@ -189,5 +191,5 @@ func (blobCache *BlobCache) getBlob(entry *object.ChangeEntry, fileGetter FileGe
 }
 
 func init() {
-	Registry.Register(&BlobCache{})
+	core.Registry.Register(&BlobCache{})
 }

+ 4 - 3
blob_cache_test.go

@@ -1,4 +1,4 @@
-package hercules
+package plumbing
 
 import (
 	"testing"
@@ -7,6 +7,7 @@ import (
 	"gopkg.in/src-d/go-git.v4"
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
+	"gopkg.in/src-d/hercules.v4"
 )
 
 var testRepository *git.Repository
@@ -44,10 +45,10 @@ func TestBlobCacheMetadata(t *testing.T) {
 }
 
 func TestBlobCacheRegistration(t *testing.T) {
-	tp, exists := Registry.registered[(&BlobCache{}).Name()]
+	tp, exists := hercules.Registry.registered[(&BlobCache{}).Name()]
 	assert.True(t, exists)
 	assert.Equal(t, tp.Elem().Name(), "BlobCache")
-	tps, exists := Registry.provided[(&BlobCache{}).Provides()[0]]
+	tps, exists := hercules.Registry.provided[(&BlobCache{}).Provides()[0]]
 	assert.True(t, exists)
 	assert.Len(t, tps, 1)
 	assert.Equal(t, tps[0].Elem().Name(), "BlobCache")

+ 6 - 5
day.go

@@ -1,4 +1,4 @@
-package hercules
+package plumbing
 
 import (
 	"time"
@@ -6,6 +6,7 @@ import (
 	"gopkg.in/src-d/go-git.v4"
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
+	"gopkg.in/src-d/hercules.v4/internal/core"
 )
 
 // DaysSinceStart provides the relative date information for every commit.
@@ -32,7 +33,7 @@ func (days *DaysSinceStart) Name() string {
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
+// to this list. Also used by core.Registry to build the global map of providers.
 func (days *DaysSinceStart) Provides() []string {
 	arr := [...]string{DependencyDay}
 	return arr[:]
@@ -46,8 +47,8 @@ func (days *DaysSinceStart) Requires() []string {
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (days *DaysSinceStart) ListConfigurationOptions() []ConfigurationOption {
-	return []ConfigurationOption{}
+func (days *DaysSinceStart) ListConfigurationOptions() []core.ConfigurationOption {
+	return []core.ConfigurationOption{}
 }
 
 // Configure sets the properties previously published by ListConfigurationOptions().
@@ -103,5 +104,5 @@ func (days *DaysSinceStart) Consume(deps map[string]interface{}) (map[string]int
 }
 
 func init() {
-	Registry.Register(&DaysSinceStart{})
+	core.Registry.Register(&DaysSinceStart{})
 }

+ 1 - 1
day_test.go

@@ -1,4 +1,4 @@
-package hercules
+package plumbing
 
 import (
 	"testing"

+ 14 - 6
diff.go

@@ -1,9 +1,10 @@
-package hercules
+package plumbing
 
 import (
 	"bufio"
 	"bytes"
 	"errors"
+	"io"
 	"unicode/utf8"
 
 	"github.com/sergi/go-diff/diffmatchpatch"
@@ -11,6 +12,7 @@ import (
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
 	"gopkg.in/src-d/go-git.v4/utils/merkletrie"
+	"gopkg.in/src-d/hercules.v4/internal/core"
 )
 
 // FileDiff calculates the difference of files which were modified.
@@ -43,7 +45,7 @@ func (diff *FileDiff) Name() string {
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
+// to this list. Also used by core.Registry to build the global map of providers.
 func (diff *FileDiff) Provides() []string {
 	arr := [...]string{DependencyFileDiff}
 	return arr[:]
@@ -58,12 +60,12 @@ func (diff *FileDiff) Requires() []string {
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (diff *FileDiff) ListConfigurationOptions() []ConfigurationOption {
-	options := [...]ConfigurationOption{{
+func (diff *FileDiff) ListConfigurationOptions() []core.ConfigurationOption {
+	options := [...]core.ConfigurationOption{{
 		Name:        ConfigFileDiffDisableCleanup,
 		Description: "Do not apply additional heuristics to improve diffs.",
 		Flag:        "no-diff-cleanup",
-		Type:        BoolConfigurationOption,
+		Type:        core.BoolConfigurationOption,
 		Default:     false},
 	}
 	return options[:]
@@ -170,6 +172,12 @@ func BlobToString(file *object.Blob) (string, error) {
 	return buf.String(), nil
 }
 
+func checkClose(c io.Closer) {
+	if err := c.Close(); err != nil {
+		panic(err)
+	}
+}
+
 func init() {
-	Registry.Register(&FileDiff{})
+	core.Registry.Register(&FileDiff{})
 }

+ 1 - 1
diff_test.go

@@ -1,4 +1,4 @@
-package hercules
+package plumbing
 
 import (
 	"testing"

+ 9 - 8
identity.go

@@ -1,4 +1,4 @@
-package hercules
+package identity
 
 import (
 	"bufio"
@@ -8,6 +8,7 @@ import (
 
 	"gopkg.in/src-d/go-git.v4"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
+	"gopkg.in/src-d/hercules.v4/internal/core"
 )
 
 // IdentityDetector determines the author of a commit. Same person can commit under different
@@ -54,7 +55,7 @@ func (id *IdentityDetector) Name() string {
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
+// to this list. Also used by core.Registry to build the global map of providers.
 func (id *IdentityDetector) Provides() []string {
 	arr := [...]string{DependencyAuthor}
 	return arr[:]
@@ -68,12 +69,12 @@ func (id *IdentityDetector) Requires() []string {
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (id *IdentityDetector) ListConfigurationOptions() []ConfigurationOption {
-	options := [...]ConfigurationOption{{
+func (id *IdentityDetector) ListConfigurationOptions() []core.ConfigurationOption {
+	options := [...]core.ConfigurationOption{{
 		Name:        ConfigIdentityDetectorPeopleDictPath,
 		Description: "Path to the developers' email associations.",
 		Flag:        "people-dict",
-		Type:        StringConfigurationOption,
+		Type:        core.StringConfigurationOption,
 		Default:     ""},
 	}
 	return options[:]
@@ -93,10 +94,10 @@ func (id *IdentityDetector) Configure(facts map[string]interface{}) {
 			id.LoadPeopleDict(peopleDictPath)
 			facts[FactIdentityDetectorPeopleCount] = len(id.ReversedPeopleDict) - 1
 		} else {
-			if _, exists := facts[ConfigPipelineCommits]; !exists {
+			if _, exists := facts[core.ConfigPipelineCommits]; !exists {
 				panic("IdentityDetector needs a list of commits to initialize.")
 			}
-			id.GeneratePeopleDict(facts[ConfigPipelineCommits].([]*object.Commit))
+			id.GeneratePeopleDict(facts[core.ConfigPipelineCommits].([]*object.Commit))
 			facts[FactIdentityDetectorPeopleCount] = len(id.ReversedPeopleDict)
 		}
 	} else {
@@ -279,5 +280,5 @@ func (id IdentityDetector) MergeReversedDicts(rd1, rd2 []string) (map[string][3]
 }
 
 func init() {
-	Registry.Register(&IdentityDetector{})
+	core.Registry.Register(&IdentityDetector{})
 }

+ 1 - 1
identity_test.go

@@ -1,4 +1,4 @@
-package hercules
+package identity
 
 import (
 	"io"

+ 1 - 1
mailmap.go

@@ -1,4 +1,4 @@
-package hercules
+package identity
 
 import (
 	"strings"

+ 3 - 2
mailmap_test.go

@@ -1,8 +1,9 @@
-package hercules
+package identity
 
 import (
-	"github.com/stretchr/testify/assert"
 	"testing"
+
+	"github.com/stretchr/testify/assert"
 )
 
 func TestParseMailmap(t *testing.T) {

+ 11 - 8
renames.go

@@ -1,4 +1,4 @@
-package hercules
+package plumbing
 
 import (
 	"log"
@@ -10,6 +10,8 @@ import (
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
 	"gopkg.in/src-d/go-git.v4/utils/merkletrie"
+	"gopkg.in/src-d/hercules.v4/internal"
+	"gopkg.in/src-d/hercules.v4/internal/core"
 )
 
 // RenameAnalysis improves TreeDiff's results by searching for changed blobs under different
@@ -41,7 +43,7 @@ func (ra *RenameAnalysis) Name() string {
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
+// to this list. Also used by core.Registry to build the global map of providers.
 func (ra *RenameAnalysis) Provides() []string {
 	arr := [...]string{DependencyTreeChanges}
 	return arr[:]
@@ -56,12 +58,12 @@ func (ra *RenameAnalysis) Requires() []string {
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (ra *RenameAnalysis) ListConfigurationOptions() []ConfigurationOption {
-	options := [...]ConfigurationOption{{
+func (ra *RenameAnalysis) ListConfigurationOptions() []core.ConfigurationOption {
+	options := [...]core.ConfigurationOption{{
 		Name:        ConfigRenameAnalysisSimilarityThreshold,
 		Description: "The threshold on the similarity index used to detect renames.",
 		Flag:        "M",
-		Type:        IntConfigurationOption,
+		Type:        core.IntConfigurationOption,
 		Default:     RenameAnalysisDefaultThreshold},
 	}
 	return options[:]
@@ -202,7 +204,7 @@ func (ra *RenameAnalysis) Consume(deps map[string]interface{}) (map[string]inter
 }
 
 func (ra *RenameAnalysis) sizesAreClose(size1 int64, size2 int64) bool {
-	return abs64(size1-size2)*100/max64(1, min64(size1, size2)) <=
+	return internal.Abs64(size1-size2)*100/internal.Max64(1, internal.Min64(size1, size2)) <=
 		int64(100-ra.SimilarityThreshold)
 }
 
@@ -225,7 +227,8 @@ func (ra *RenameAnalysis) blobsAreClose(
 			common += utf8.RuneCountInString(edit.Text)
 		}
 	}
-	return common*100/max(1, min(len(src), len(dst))) >= ra.SimilarityThreshold, nil
+	similarity := common * 100 / internal.Max(1, internal.Min(len(src), len(dst)))
+	return similarity >= ra.SimilarityThreshold, nil
 }
 
 type sortableChange struct {
@@ -280,5 +283,5 @@ func (slice sortableBlobs) Swap(i, j int) {
 }
 
 func init() {
-	Registry.Register(&RenameAnalysis{})
+	core.Registry.Register(&RenameAnalysis{})
 }

+ 1 - 1
renames_test.go

@@ -1,4 +1,4 @@
-package hercules
+package plumbing
 
 import (
 	"github.com/stretchr/testify/assert"

+ 8 - 7
tree_diff.go

@@ -1,4 +1,4 @@
-package hercules
+package plumbing
 
 import (
 	"io"
@@ -6,6 +6,7 @@ import (
 
 	"gopkg.in/src-d/go-git.v4"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
+	"gopkg.in/src-d/hercules.v4/internal/core"
 )
 
 // TreeDiff generates the list of changes for a commit. A change can be either one or two blobs
@@ -37,7 +38,7 @@ func (treediff *TreeDiff) Name() string {
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
+// to this list. Also used by core.Registry to build the global map of providers.
 func (treediff *TreeDiff) Provides() []string {
 	arr := [...]string{DependencyTreeChanges}
 	return arr[:]
@@ -51,17 +52,17 @@ func (treediff *TreeDiff) Requires() []string {
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (treediff *TreeDiff) ListConfigurationOptions() []ConfigurationOption {
-	options := [...]ConfigurationOption{{
+func (treediff *TreeDiff) ListConfigurationOptions() []core.ConfigurationOption {
+	options := [...]core.ConfigurationOption{{
 		Name:        ConfigTreeDiffEnableBlacklist,
 		Description: "Skip blacklisted directories.",
 		Flag:        "skip-blacklist",
-		Type:        BoolConfigurationOption,
+		Type:        core.BoolConfigurationOption,
 		Default:     false}, {
 		Name:        ConfigTreeDiffBlacklistedDirs,
 		Description: "List of blacklisted directories. Separated by comma \",\".",
 		Flag:        "blacklisted-dirs",
-		Type:        StringsConfigurationOption,
+		Type:        core.StringsConfigurationOption,
 		Default:     defaultBlacklistedDirs},
 	}
 	return options[:]
@@ -141,5 +142,5 @@ func (treediff *TreeDiff) Consume(deps map[string]interface{}) (map[string]inter
 }
 
 func init() {
-	Registry.Register(&TreeDiff{})
+	core.Registry.Register(&TreeDiff{})
 }

+ 1 - 1
tree_diff_test.go

@@ -1,4 +1,4 @@
-package hercules
+package plumbing
 
 import (
 	"testing"

+ 3 - 3
changes_xpather.go

@@ -1,9 +1,9 @@
-package hercules
+package uast
 
 import (
 	"bytes"
-	"log"
 	"io"
+	"log"
 
 	"github.com/minio/highwayhash"
 	"gopkg.in/bblfsh/client-go.v2/tools"
@@ -22,7 +22,7 @@ var hashKey = []byte{
 }
 
 // Extract returns the list of new or changed UAST nodes filtered by XPath.
-func (xpather ChangesXPather) Extract(changes []UASTChange) []*uast.Node {
+func (xpather ChangesXPather) Extract(changes []Change) []*uast.Node {
 	result := []*uast.Node{}
 	for _, change := range changes {
 		if change.After == nil {

+ 1 - 1
changes_xpather_test.go

@@ -1,6 +1,6 @@
 // +build !disable_babelfish
 
-package hercules
+package uast
 
 import (
 	"io/ioutil"

+ 15 - 13
diff_refiner.go

@@ -1,4 +1,4 @@
-package hercules
+package uast
 
 import (
 	"unicode/utf8"
@@ -6,6 +6,8 @@ import (
 	"github.com/sergi/go-diff/diffmatchpatch"
 	"gopkg.in/bblfsh/sdk.v1/uast"
 	"gopkg.in/src-d/go-git.v4"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/plumbing"
 )
 
 // FileDiffRefiner uses UASTs to improve the human interpretability of diffs.
@@ -22,9 +24,9 @@ func (ref *FileDiffRefiner) Name() string {
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
+// to this list. Also used by core.Registry to build the global map of providers.
 func (ref *FileDiffRefiner) Provides() []string {
-	arr := [...]string{DependencyFileDiff}
+	arr := [...]string{plumbing.DependencyFileDiff}
 	return arr[:]
 }
 
@@ -32,7 +34,7 @@ func (ref *FileDiffRefiner) Provides() []string {
 // Each requested entity will be inserted into `deps` of Consume(). In turn, those
 // entities are Provides() upstream.
 func (ref *FileDiffRefiner) Requires() []string {
-	arr := [...]string{DependencyFileDiff, DependencyUastChanges}
+	arr := [...]string{plumbing.DependencyFileDiff, DependencyUastChanges}
 	return arr[:]
 }
 
@@ -43,8 +45,8 @@ func (ref *FileDiffRefiner) Features() []string {
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (ref *FileDiffRefiner) ListConfigurationOptions() []ConfigurationOption {
-	return []ConfigurationOption{}
+func (ref *FileDiffRefiner) ListConfigurationOptions() []core.ConfigurationOption {
+	return []core.ConfigurationOption{}
 }
 
 // Configure sets the properties previously published by ListConfigurationOptions().
@@ -61,15 +63,15 @@ func (ref *FileDiffRefiner) Initialize(repository *git.Repository) {
 // This function returns the mapping with analysis results. The keys must be the same as
 // in Provides(). If there was an error, nil is returned.
 func (ref *FileDiffRefiner) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
-	changesList := deps[DependencyUastChanges].([]UASTChange)
-	changes := map[string]UASTChange{}
+	changesList := deps[DependencyUastChanges].([]Change)
+	changes := map[string]Change{}
 	for _, change := range changesList {
 		if change.Before != nil && change.After != nil {
 			changes[change.Change.To.Name] = change
 		}
 	}
-	diffs := deps[DependencyFileDiff].(map[string]FileDiffData)
-	result := map[string]FileDiffData{}
+	diffs := deps[plumbing.DependencyFileDiff].(map[string]plumbing.FileDiffData)
+	result := map[string]plumbing.FileDiffData{}
 	for fileName, oldDiff := range diffs {
 		uastChange, exists := changes[fileName]
 		if !exists {
@@ -114,7 +116,7 @@ func (ref *FileDiffRefiner) Consume(deps map[string]interface{}) (map[string]int
 				}
 			}
 		})
-		newDiff := FileDiffData{
+		newDiff := plumbing.FileDiffData{
 			OldLinesOfCode: oldDiff.OldLinesOfCode,
 			NewLinesOfCode: oldDiff.NewLinesOfCode,
 			Diffs:          []diffmatchpatch.Diff{},
@@ -156,7 +158,7 @@ func (ref *FileDiffRefiner) Consume(deps map[string]interface{}) (map[string]int
 		}
 		result[fileName] = newDiff
 	}
-	return map[string]interface{}{DependencyFileDiff: result}, nil
+	return map[string]interface{}{plumbing.DependencyFileDiff: result}, nil
 }
 
 // VisitEachNode is a handy routine to execute a callback on every node in the subtree,
@@ -185,5 +187,5 @@ func countNodesInInterval(occupiedMap [][]*uast.Node, start, end int) int {
 }
 
 func init() {
-	Registry.Register(&FileDiffRefiner{})
+	core.Registry.Register(&FileDiffRefiner{})
 }

+ 1 - 1
diff_refiner_test.go

@@ -1,4 +1,4 @@
-package hercules
+package uast
 
 import (
 	"io/ioutil"

+ 84 - 82
uast.go

@@ -1,4 +1,4 @@
-package hercules
+package uast
 
 import (
 	"bytes"
@@ -25,12 +25,14 @@ import (
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
 	"gopkg.in/src-d/go-git.v4/utils/ioutil"
 	"gopkg.in/src-d/go-git.v4/utils/merkletrie"
-	"gopkg.in/src-d/hercules.v3/pb"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
+	items "gopkg.in/src-d/hercules.v4/internal/plumbing"
 )
 
-// UASTExtractor retrieves UASTs from Babelfish server which correspond to changed files in a commit.
+// Extractor retrieves UASTs from Babelfish server which correspond to changed files in a commit.
 // It is a PipelineItem.
-type UASTExtractor struct {
+type Extractor struct {
 	Endpoint       string
 	Context        func() (context.Context, context.CancelFunc)
 	PoolSize       int
@@ -45,26 +47,26 @@ type UASTExtractor struct {
 const (
 	uastExtractionSkipped = -(1 << 31)
 
-	// ConfigUASTEndpoint is the name of the configuration option (UASTExtractor.Configure())
+	// ConfigUASTEndpoint is the name of the configuration option (Extractor.Configure())
 	// which sets the Babelfish server address.
 	ConfigUASTEndpoint = "ConfigUASTEndpoint"
-	// ConfigUASTTimeout is the name of the configuration option (UASTExtractor.Configure())
+	// ConfigUASTTimeout is the name of the configuration option (Extractor.Configure())
 	// which sets the maximum amount of time to wait for a Babelfish server response.
 	ConfigUASTTimeout = "ConfigUASTTimeout"
-	// ConfigUASTPoolSize is the name of the configuration option (UASTExtractor.Configure())
+	// ConfigUASTPoolSize is the name of the configuration option (Extractor.Configure())
 	// which sets the number of goroutines to run for UAST parse queries.
 	ConfigUASTPoolSize = "ConfigUASTPoolSize"
-	// ConfigUASTFailOnErrors is the name of the configuration option (UASTExtractor.Configure())
+	// ConfigUASTFailOnErrors is the name of the configuration option (Extractor.Configure())
 	// which enables early exit in case of any Babelfish UAST parsing errors.
 	ConfigUASTFailOnErrors = "ConfigUASTFailOnErrors"
-	// ConfigUASTLanguages is the name of the configuration option (UASTExtractor.Configure())
+	// ConfigUASTLanguages is the name of the configuration option (Extractor.Configure())
 	// which sets the list of languages to parse. Language names are at
 	// https://doc.bblf.sh/languages.html Names are joined with a comma ",".
 	ConfigUASTLanguages = "ConfigUASTLanguages"
 
 	// FeatureUast is the name of the Pipeline feature which activates all the items related to UAST.
 	FeatureUast = "uast"
-	// DependencyUasts is the name of the dependency provided by UASTExtractor.
+	// DependencyUasts is the name of the dependency provided by Extractor.
 	DependencyUasts = "uasts"
 )
 
@@ -93,14 +95,14 @@ func (w worker) Job(data interface{}) interface{} {
 }
 
 // Name of this PipelineItem. Uniquely identifies the type, used for mapping keys, etc.
-func (exr *UASTExtractor) Name() string {
+func (exr *Extractor) Name() string {
 	return "UAST"
 }
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
-func (exr *UASTExtractor) Provides() []string {
+// to this list. Also used by core.Registry to build the global map of providers.
+func (exr *Extractor) Provides() []string {
 	arr := [...]string{DependencyUasts}
 	return arr[:]
 }
@@ -108,51 +110,51 @@ func (exr *UASTExtractor) Provides() []string {
 // Requires returns the list of names of entities which are needed by this PipelineItem.
 // Each requested entity will be inserted into `deps` of Consume(). In turn, those
 // entities are Provides() upstream.
-func (exr *UASTExtractor) Requires() []string {
-	arr := [...]string{DependencyTreeChanges, DependencyBlobCache}
+func (exr *Extractor) Requires() []string {
+	arr := [...]string{items.DependencyTreeChanges, items.DependencyBlobCache}
 	return arr[:]
 }
 
 // Features which must be enabled for this PipelineItem to be automatically inserted into the DAG.
-func (exr *UASTExtractor) Features() []string {
+func (exr *Extractor) Features() []string {
 	arr := [...]string{FeatureUast}
 	return arr[:]
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (exr *UASTExtractor) ListConfigurationOptions() []ConfigurationOption {
-	options := [...]ConfigurationOption{{
+func (exr *Extractor) ListConfigurationOptions() []core.ConfigurationOption {
+	options := [...]core.ConfigurationOption{{
 		Name:        ConfigUASTEndpoint,
 		Description: "How many days there are in a single band.",
 		Flag:        "bblfsh",
-		Type:        StringConfigurationOption,
+		Type:        core.StringConfigurationOption,
 		Default:     "0.0.0.0:9432"}, {
 		Name:        ConfigUASTTimeout,
 		Description: "Babelfish's server timeout in seconds.",
 		Flag:        "bblfsh-timeout",
-		Type:        IntConfigurationOption,
+		Type:        core.IntConfigurationOption,
 		Default:     20}, {
 		Name:        ConfigUASTPoolSize,
 		Description: "Number of goroutines to extract UASTs.",
 		Flag:        "bblfsh-pool-size",
-		Type:        IntConfigurationOption,
+		Type:        core.IntConfigurationOption,
 		Default:     runtime.NumCPU() * 2}, {
 		Name:        ConfigUASTFailOnErrors,
 		Description: "Panic if there is a UAST extraction error.",
 		Flag:        "bblfsh-fail-on-error",
-		Type:        BoolConfigurationOption,
+		Type:        core.BoolConfigurationOption,
 		Default:     false}, {
 		Name:        ConfigUASTLanguages,
 		Description: "Programming languages from which to extract UASTs. Separated by comma \",\".",
 		Flag:        "languages",
-		Type:        StringConfigurationOption,
+		Type:        core.StringConfigurationOption,
 		Default:     "Python,Java,Go,JavaScript,Ruby,PHP"},
 	}
 	return options[:]
 }
 
 // Configure sets the properties previously published by ListConfigurationOptions().
-func (exr *UASTExtractor) Configure(facts map[string]interface{}) {
+func (exr *Extractor) Configure(facts map[string]interface{}) {
 	if val, exists := facts[ConfigUASTEndpoint].(string); exists {
 		exr.Endpoint = val
 	}
@@ -178,7 +180,7 @@ func (exr *UASTExtractor) Configure(facts map[string]interface{}) {
 
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
-func (exr *UASTExtractor) Initialize(repository *git.Repository) {
+func (exr *Extractor) Initialize(repository *git.Repository) {
 	if exr.Context == nil {
 		exr.Context = func() (context.Context, context.CancelFunc) {
 			return context.Background(), nil
@@ -219,9 +221,9 @@ func (exr *UASTExtractor) Initialize(repository *git.Repository) {
 // Additionally, "commit" is always present there and represents the analysed *object.Commit.
 // This function returns the mapping with analysis results. The keys must be the same as
 // in Provides(). If there was an error, nil is returned.
-func (exr *UASTExtractor) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
-	cache := deps[DependencyBlobCache].(map[plumbing.Hash]*object.Blob)
-	treeDiffs := deps[DependencyTreeChanges].(object.Changes)
+func (exr *Extractor) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
+	cache := deps[items.DependencyBlobCache].(map[plumbing.Hash]*object.Blob)
+	treeDiffs := deps[items.DependencyTreeChanges].(object.Changes)
 	uasts := map[plumbing.Hash]*uast.Node{}
 	lock := sync.RWMutex{}
 	errs := make([]error, 0)
@@ -286,7 +288,7 @@ func (exr *UASTExtractor) Consume(deps map[string]interface{}) (map[string]inter
 	return map[string]interface{}{DependencyUasts: uasts}, nil
 }
 
-func (exr *UASTExtractor) extractUAST(
+func (exr *Extractor) extractUAST(
 	client *bblfsh.Client, file *object.File) (*uast.Node, error) {
 	request := client.NewParseRequest()
 	contents, err := file.Contents()
@@ -315,7 +317,7 @@ func (exr *UASTExtractor) extractUAST(
 	return response.UAST, nil
 }
 
-func (exr *UASTExtractor) extractTask(data interface{}) interface{} {
+func (exr *Extractor) extractTask(data interface{}) interface{} {
 	task := data.(uastTask)
 	defer func() { task.Status <- 0 }()
 	node, err := exr.extractUAST(task.Client, task.File)
@@ -332,33 +334,33 @@ func (exr *UASTExtractor) extractTask(data interface{}) interface{} {
 	return nil
 }
 
-// UASTChange is the type of the items in the list of changes which is provided by UASTChanges.
-type UASTChange struct {
+// Change is the type of the items in the list of changes which is provided by Changes.
+type Change struct {
 	Before *uast.Node
 	After  *uast.Node
 	Change *object.Change
 }
 
 const (
-	// DependencyUastChanges is the name of the dependency provided by UASTChanges.
+	// DependencyUastChanges is the name of the dependency provided by Changes.
 	DependencyUastChanges = "changed_uasts"
 )
 
-// UASTChanges is a structured analog of TreeDiff: it provides UASTs for every logical change
+// Changes is a structured analog of TreeDiff: it provides UASTs for every logical change
 // in a commit. It is a PipelineItem.
-type UASTChanges struct {
+type Changes struct {
 	cache map[plumbing.Hash]*uast.Node
 }
 
 // Name of this PipelineItem. Uniquely identifies the type, used for mapping keys, etc.
-func (uc *UASTChanges) Name() string {
+func (uc *Changes) Name() string {
 	return "UASTChanges"
 }
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
-func (uc *UASTChanges) Provides() []string {
+// to this list. Also used by core.Registry to build the global map of providers.
+func (uc *Changes) Provides() []string {
 	arr := [...]string{DependencyUastChanges}
 	return arr[:]
 }
@@ -366,28 +368,28 @@ func (uc *UASTChanges) Provides() []string {
 // Requires returns the list of names of entities which are needed by this PipelineItem.
 // Each requested entity will be inserted into `deps` of Consume(). In turn, those
 // entities are Provides() upstream.
-func (uc *UASTChanges) Requires() []string {
-	arr := [...]string{DependencyUasts, DependencyTreeChanges}
+func (uc *Changes) Requires() []string {
+	arr := [...]string{DependencyUasts, items.DependencyTreeChanges}
 	return arr[:]
 }
 
 // Features which must be enabled for this PipelineItem to be automatically inserted into the DAG.
-func (uc *UASTChanges) Features() []string {
+func (uc *Changes) Features() []string {
 	arr := [...]string{FeatureUast}
 	return arr[:]
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (uc *UASTChanges) ListConfigurationOptions() []ConfigurationOption {
-	return []ConfigurationOption{}
+func (uc *Changes) ListConfigurationOptions() []core.ConfigurationOption {
+	return []core.ConfigurationOption{}
 }
 
 // Configure sets the properties previously published by ListConfigurationOptions().
-func (uc *UASTChanges) Configure(facts map[string]interface{}) {}
+func (uc *Changes) Configure(facts map[string]interface{}) {}
 
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
-func (uc *UASTChanges) Initialize(repository *git.Repository) {
+func (uc *Changes) Initialize(repository *git.Repository) {
 	uc.cache = map[plumbing.Hash]*uast.Node{}
 }
 
@@ -396,10 +398,10 @@ func (uc *UASTChanges) Initialize(repository *git.Repository) {
 // Additionally, "commit" is always present there and represents the analysed *object.Commit.
 // This function returns the mapping with analysis results. The keys must be the same as
 // in Provides(). If there was an error, nil is returned.
-func (uc *UASTChanges) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
+func (uc *Changes) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
 	uasts := deps[DependencyUasts].(map[plumbing.Hash]*uast.Node)
-	treeDiffs := deps[DependencyTreeChanges].(object.Changes)
-	commit := make([]UASTChange, 0, len(treeDiffs))
+	treeDiffs := deps[items.DependencyTreeChanges].(object.Changes)
+	commit := make([]Change, 0, len(treeDiffs))
 	for _, change := range treeDiffs {
 		action, err := change.Action()
 		if err != nil {
@@ -409,17 +411,17 @@ func (uc *UASTChanges) Consume(deps map[string]interface{}) (map[string]interfac
 		case merkletrie.Insert:
 			hashTo := change.To.TreeEntry.Hash
 			uastTo := uasts[hashTo]
-			commit = append(commit, UASTChange{Before: nil, After: uastTo, Change: change})
+			commit = append(commit, Change{Before: nil, After: uastTo, Change: change})
 			uc.cache[hashTo] = uastTo
 		case merkletrie.Delete:
 			hashFrom := change.From.TreeEntry.Hash
-			commit = append(commit, UASTChange{Before: uc.cache[hashFrom], After: nil, Change: change})
+			commit = append(commit, Change{Before: uc.cache[hashFrom], After: nil, Change: change})
 			delete(uc.cache, hashFrom)
 		case merkletrie.Modify:
 			hashFrom := change.From.TreeEntry.Hash
 			hashTo := change.To.TreeEntry.Hash
 			uastTo := uasts[hashTo]
-			commit = append(commit, UASTChange{Before: uc.cache[hashFrom], After: uastTo, Change: change})
+			commit = append(commit, Change{Before: uc.cache[hashFrom], After: uastTo, Change: change})
 			delete(uc.cache, hashFrom)
 			uc.cache[hashTo] = uastTo
 		}
@@ -427,77 +429,77 @@ func (uc *UASTChanges) Consume(deps map[string]interface{}) (map[string]interfac
 	return map[string]interface{}{DependencyUastChanges: commit}, nil
 }
 
-// UASTChangesSaver dumps changed files and corresponding UASTs for every commit.
+// ChangesSaver dumps changed files and corresponding UASTs for every commit.
 // it is a LeafPipelineItem.
-type UASTChangesSaver struct {
+type ChangesSaver struct {
 	// OutputPath points to the target directory with UASTs
 	OutputPath string
 
 	repository *git.Repository
-	result     [][]UASTChange
+	result     [][]Change
 }
 
 const (
-	// ConfigUASTChangesSaverOutputPath is the name of the configuration option
-	// (UASTChangesSaver.Configure()) which sets the target directory where to save the files.
-	ConfigUASTChangesSaverOutputPath = "UASTChangesSaver.OutputPath"
+	// ConfigChangesSaverOutputPath is the name of the configuration option
+	// (ChangesSaver.Configure()) which sets the target directory where to save the files.
+	ConfigChangesSaverOutputPath = "ChangesSaver.OutputPath"
 )
 
 // Name of this PipelineItem. Uniquely identifies the type, used for mapping keys, etc.
-func (saver *UASTChangesSaver) Name() string {
+func (saver *ChangesSaver) Name() string {
 	return "UASTChangesSaver"
 }
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
-func (saver *UASTChangesSaver) Provides() []string {
+// to this list. Also used by core.Registry to build the global map of providers.
+func (saver *ChangesSaver) Provides() []string {
 	return []string{}
 }
 
 // Requires returns the list of names of entities which are needed by this PipelineItem.
 // Each requested entity will be inserted into `deps` of Consume(). In turn, those
 // entities are Provides() upstream.
-func (saver *UASTChangesSaver) Requires() []string {
+func (saver *ChangesSaver) Requires() []string {
 	arr := [...]string{DependencyUastChanges}
 	return arr[:]
 }
 
 // Features which must be enabled for this PipelineItem to be automatically inserted into the DAG.
-func (saver *UASTChangesSaver) Features() []string {
+func (saver *ChangesSaver) Features() []string {
 	arr := [...]string{FeatureUast}
 	return arr[:]
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (saver *UASTChangesSaver) ListConfigurationOptions() []ConfigurationOption {
-	options := [...]ConfigurationOption{{
-		Name:        ConfigUASTChangesSaverOutputPath,
+func (saver *ChangesSaver) ListConfigurationOptions() []core.ConfigurationOption {
+	options := [...]core.ConfigurationOption{{
+		Name:        ConfigChangesSaverOutputPath,
 		Description: "The target directory where to store the changed UAST files.",
 		Flag:        "changed-uast-dir",
-		Type:        StringConfigurationOption,
+		Type:        core.StringConfigurationOption,
 		Default:     "."},
 	}
 	return options[:]
 }
 
 // Flag for the command line switch which enables this analysis.
-func (saver *UASTChangesSaver) Flag() string {
+func (saver *ChangesSaver) Flag() string {
 	return "dump-uast-changes"
 }
 
 // Configure sets the properties previously published by ListConfigurationOptions().
-func (saver *UASTChangesSaver) Configure(facts map[string]interface{}) {
-	if val, exists := facts[ConfigUASTChangesSaverOutputPath]; exists {
+func (saver *ChangesSaver) Configure(facts map[string]interface{}) {
+	if val, exists := facts[ConfigChangesSaverOutputPath]; exists {
 		saver.OutputPath = val.(string)
 	}
 }
 
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
-func (saver *UASTChangesSaver) Initialize(repository *git.Repository) {
+func (saver *ChangesSaver) Initialize(repository *git.Repository) {
 	saver.repository = repository
-	saver.result = [][]UASTChange{}
+	saver.result = [][]Change{}
 }
 
 // Consume runs this PipelineItem on the next commit data.
@@ -505,21 +507,21 @@ func (saver *UASTChangesSaver) Initialize(repository *git.Repository) {
 // Additionally, "commit" is always present there and represents the analysed *object.Commit.
 // This function returns the mapping with analysis results. The keys must be the same as
 // in Provides(). If there was an error, nil is returned.
-func (saver *UASTChangesSaver) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
-	changes := deps[DependencyUastChanges].([]UASTChange)
+func (saver *ChangesSaver) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
+	changes := deps[DependencyUastChanges].([]Change)
 	saver.result = append(saver.result, changes)
 	return nil, nil
 }
 
 // Finalize returns the result of the analysis. Further Consume() calls are not expected.
-func (saver *UASTChangesSaver) Finalize() interface{} {
+func (saver *ChangesSaver) Finalize() interface{} {
 	return saver.result
 }
 
 // Serialize converts the analysis result as returned by Finalize() to text or bytes.
 // The text format is YAML and the bytes format is Protocol Buffers.
-func (saver *UASTChangesSaver) Serialize(result interface{}, binary bool, writer io.Writer) error {
-	saverResult := result.([][]UASTChange)
+func (saver *ChangesSaver) Serialize(result interface{}, binary bool, writer io.Writer) error {
+	saverResult := result.([][]Change)
 	fileNames := saver.dumpFiles(saverResult)
 	if binary {
 		return saver.serializeBinary(fileNames, writer)
@@ -528,7 +530,7 @@ func (saver *UASTChangesSaver) Serialize(result interface{}, binary bool, writer
 	return nil
 }
 
-func (saver *UASTChangesSaver) dumpFiles(result [][]UASTChange) []*pb.UASTChange {
+func (saver *ChangesSaver) dumpFiles(result [][]Change) []*pb.UASTChange {
 	fileNames := []*pb.UASTChange{}
 	for i, changes := range result {
 		for j, change := range changes {
@@ -560,7 +562,7 @@ func (saver *UASTChangesSaver) dumpFiles(result [][]UASTChange) []*pb.UASTChange
 	return fileNames
 }
 
-func (saver *UASTChangesSaver) serializeText(result []*pb.UASTChange, writer io.Writer) {
+func (saver *ChangesSaver) serializeText(result []*pb.UASTChange, writer io.Writer) {
 	for _, sc := range result {
 		kv := [...]string{
 			"file: " + sc.FileName,
@@ -571,7 +573,7 @@ func (saver *UASTChangesSaver) serializeText(result []*pb.UASTChange, writer io.
 	}
 }
 
-func (saver *UASTChangesSaver) serializeBinary(result []*pb.UASTChange, writer io.Writer) error {
+func (saver *ChangesSaver) serializeBinary(result []*pb.UASTChange, writer io.Writer) error {
 	message := pb.UASTChangesSaverResults{Changes: result}
 	serialized, err := proto.Marshal(&message)
 	if err != nil {
@@ -582,7 +584,7 @@ func (saver *UASTChangesSaver) serializeBinary(result []*pb.UASTChange, writer i
 }
 
 func init() {
-	Registry.Register(&UASTExtractor{})
-	Registry.Register(&UASTChanges{})
-	Registry.Register(&UASTChangesSaver{})
+	core.Registry.Register(&Extractor{})
+	core.Registry.Register(&Changes{})
+	core.Registry.Register(&ChangesSaver{})
 }

+ 2 - 2
uast_test.go

@@ -1,6 +1,6 @@
 // +build !disable_babelfish
 
-package hercules
+package uast
 
 import (
 	"bytes"
@@ -16,7 +16,7 @@ import (
 	"gopkg.in/bblfsh/sdk.v1/uast"
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
-	"gopkg.in/src-d/hercules.v3/pb"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
 )
 
 func fixtureUASTExtractor() *UASTExtractor {

rbtree/rbtree.go → internal/rbtree/rbtree.go


test_data/1.java → internal/test_data/1.java


test_data/2.java → internal/test_data/2.java


test_data/blob → internal/test_data/blob


test_data/burndown.pb → internal/test_data/burndown.pb


test_data/couples.pb → internal/test_data/couples.pb


test_data/gitmodules → internal/test_data/gitmodules


+ 0 - 0
internal/test_data/gitmodules_empty


test_data/identities → internal/test_data/identities


test_data/uast1.pb → internal/test_data/uast1.pb


test_data/uast2.pb → internal/test_data/uast2.pb


toposort/toposort.go → internal/toposort/toposort.go


toposort/toposort_test.go → internal/toposort/toposort_test.go


yaml/utils.go → internal/yaml/utils.go


+ 3 - 3
labours.py

@@ -28,9 +28,9 @@ if sys.version_info[0] < 3:
 
 
 PB_MESSAGES = {
-    "Burndown": "pb.pb_pb2.BurndownAnalysisResults",
-    "Couples": "pb.pb_pb2.CouplesAnalysisResults",
-    "Shotness": "pb.pb_pb2.ShotnessAnalysisResults",
+    "Burndown": "internal.pb.pb_pb2.BurndownAnalysisResults",
+    "Couples": "internal.pb.pb_pb2.CouplesAnalysisResults",
+    "Shotness": "internal.pb.pb_pb2.ShotnessAnalysisResults",
 }
 
 

+ 44 - 39
burndown.go

@@ -1,4 +1,4 @@
-package hercules
+package leaves
 
 import (
 	"errors"
@@ -15,8 +15,12 @@ import (
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
 	"gopkg.in/src-d/go-git.v4/utils/merkletrie"
-	"gopkg.in/src-d/hercules.v3/pb"
-	"gopkg.in/src-d/hercules.v3/yaml"
+	"gopkg.in/src-d/hercules.v4/internal/burndown"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
+	items "gopkg.in/src-d/hercules.v4/internal/plumbing"
+	"gopkg.in/src-d/hercules.v4/internal/plumbing/identity"
+	"gopkg.in/src-d/hercules.v4/internal/yaml"
 )
 
 // BurndownAnalysis allows to gather the line burndown statistics for a Git repository.
@@ -55,7 +59,7 @@ type BurndownAnalysis struct {
 	// peopleHistories is the periodic snapshots of each person's status.
 	peopleHistories [][][]int64
 	// files is the mapping <file path> -> *File.
-	files map[string]*File
+	files map[string]*burndown.File
 	// matrix is the mutual deletions and self insertions.
 	matrix []map[int]int64
 	// people is the people's individual time stats.
@@ -126,7 +130,7 @@ func (analyser *BurndownAnalysis) Name() string {
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
+// to this list. Also used by core.Registry to build the global map of providers.
 func (analyser *BurndownAnalysis) Provides() []string {
 	return []string{}
 }
@@ -136,37 +140,38 @@ func (analyser *BurndownAnalysis) Provides() []string {
 // entities are Provides() upstream.
 func (analyser *BurndownAnalysis) Requires() []string {
 	arr := [...]string{
-		DependencyFileDiff, DependencyTreeChanges, DependencyBlobCache, DependencyDay, DependencyAuthor}
+		items.DependencyFileDiff, items.DependencyTreeChanges, items.DependencyBlobCache,
+		items.DependencyDay, identity.DependencyAuthor}
 	return arr[:]
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (analyser *BurndownAnalysis) ListConfigurationOptions() []ConfigurationOption {
-	options := [...]ConfigurationOption{{
+func (analyser *BurndownAnalysis) ListConfigurationOptions() []core.ConfigurationOption {
+	options := [...]core.ConfigurationOption{{
 		Name:        ConfigBurndownGranularity,
 		Description: "How many days there are in a single band.",
 		Flag:        "granularity",
-		Type:        IntConfigurationOption,
+		Type:        core.IntConfigurationOption,
 		Default:     DefaultBurndownGranularity}, {
 		Name:        ConfigBurndownSampling,
 		Description: "How frequently to record the state in days.",
 		Flag:        "sampling",
-		Type:        IntConfigurationOption,
+		Type:        core.IntConfigurationOption,
 		Default:     DefaultBurndownGranularity}, {
 		Name:        ConfigBurndownTrackFiles,
 		Description: "Record detailed statistics per each file.",
 		Flag:        "burndown-files",
-		Type:        BoolConfigurationOption,
+		Type:        core.BoolConfigurationOption,
 		Default:     false}, {
 		Name:        ConfigBurndownTrackPeople,
 		Description: "Record detailed statistics per each developer.",
 		Flag:        "burndown-people",
-		Type:        BoolConfigurationOption,
+		Type:        core.BoolConfigurationOption,
 		Default:     false}, {
 		Name:        ConfigBurndownDebug,
 		Description: "Validate the trees on each step.",
 		Flag:        "burndown-debug",
-		Type:        BoolConfigurationOption,
+		Type:        core.BoolConfigurationOption,
 		Default:     false},
 	}
 	return options[:]
@@ -184,9 +189,9 @@ func (analyser *BurndownAnalysis) Configure(facts map[string]interface{}) {
 		analyser.TrackFiles = val
 	}
 	if people, exists := facts[ConfigBurndownTrackPeople].(bool); people {
-		if val, exists := facts[FactIdentityDetectorPeopleCount].(int); exists {
+		if val, exists := facts[identity.FactIdentityDetectorPeopleCount].(int); exists {
 			analyser.PeopleNumber = val
-			analyser.reversedPeopleDict = facts[FactIdentityDetectorReversedPeopleDict].([]string)
+			analyser.reversedPeopleDict = facts[identity.FactIdentityDetectorReversedPeopleDict].([]string)
 		}
 	} else if exists {
 		analyser.PeopleNumber = 0
@@ -224,7 +229,7 @@ func (analyser *BurndownAnalysis) Initialize(repository *git.Repository) {
 	analyser.globalHistory = [][]int64{}
 	analyser.fileHistories = map[string][][]int64{}
 	analyser.peopleHistories = make([][][]int64, analyser.PeopleNumber)
-	analyser.files = map[string]*File{}
+	analyser.files = map[string]*burndown.File{}
 	analyser.matrix = make([]map[int]int64, analyser.PeopleNumber)
 	analyser.people = make([]map[int]int64, analyser.PeopleNumber)
 	analyser.day = 0
@@ -241,17 +246,17 @@ func (analyser *BurndownAnalysis) Consume(deps map[string]interface{}) (map[stri
 	if sampling == 0 {
 		sampling = 1
 	}
-	author := deps[DependencyAuthor].(int)
-	analyser.day = deps[DependencyDay].(int)
+	author := deps[identity.DependencyAuthor].(int)
+	analyser.day = deps[items.DependencyDay].(int)
 	delta := (analyser.day / sampling) - (analyser.previousDay / sampling)
 	if delta > 0 {
 		analyser.previousDay = analyser.day
 		gs, fss, pss := analyser.groupStatus()
 		analyser.updateHistories(gs, fss, pss, delta)
 	}
-	cache := deps[DependencyBlobCache].(map[plumbing.Hash]*object.Blob)
-	treeDiffs := deps[DependencyTreeChanges].(object.Changes)
-	fileDiffs := deps[DependencyFileDiff].(map[string]FileDiffData)
+	cache := deps[items.DependencyBlobCache].(map[plumbing.Hash]*object.Blob)
+	treeDiffs := deps[items.DependencyTreeChanges].(object.Changes)
+	fileDiffs := deps[items.DependencyFileDiff].(map[string]items.FileDiffData)
 	for _, change := range treeDiffs {
 		action, _ := change.Action()
 		var err error
@@ -289,7 +294,7 @@ func (analyser *BurndownAnalysis) Finalize() interface{} {
 		mrow := make([]int64, analyser.PeopleNumber+2)
 		peopleMatrix[i] = mrow
 		for key, val := range row {
-			if key == AuthorMissing {
+			if key == identity.AuthorMissing {
 				key = -1
 			} else if key == authorSelf {
 				key = -2
@@ -364,7 +369,7 @@ func (analyser *BurndownAnalysis) Deserialize(pbmessage []byte) (interface{}, er
 
 // MergeResults combines two BurndownResult-s together.
 func (analyser *BurndownAnalysis) MergeResults(
-	r1, r2 interface{}, c1, c2 *CommonAnalysisResult) interface{} {
+	r1, r2 interface{}, c1, c2 *core.CommonAnalysisResult) interface{} {
 	bar1 := r1.(BurndownResult)
 	bar2 := r2.(BurndownResult)
 	merged := BurndownResult{}
@@ -379,7 +384,7 @@ func (analyser *BurndownAnalysis) MergeResults(
 		merged.granularity = bar2.granularity
 	}
 	var people map[string][3]int
-	people, merged.reversedPeopleDict = IdentityDetector{}.MergeReversedDicts(
+	people, merged.reversedPeopleDict = identity.IdentityDetector{}.MergeReversedDicts(
 		bar1.reversedPeopleDict, bar2.reversedPeopleDict)
 	var wg sync.WaitGroup
 	if len(bar1.GlobalHistory) > 0 || len(bar2.GlobalHistory) > 0 {
@@ -498,7 +503,7 @@ func (analyser *BurndownAnalysis) MergeResults(
 // resamples them to days so that they become square, sums and resamples back to the
 // least of (sampling1, sampling2) and (granularity1, granularity2).
 func mergeMatrices(m1, m2 [][]int64, granularity1, sampling1, granularity2, sampling2 int,
-	c1, c2 *CommonAnalysisResult) [][]int64 {
+	c1, c2 *core.CommonAnalysisResult) [][]int64 {
 	commonMerged := *c1
 	commonMerged.Merge(c2)
 
@@ -802,7 +807,7 @@ func (analyser *BurndownAnalysis) packPersonWithDay(person int, day int) int {
 
 func (analyser *BurndownAnalysis) unpackPersonWithDay(value int) (int, int) {
 	if analyser.PeopleNumber == 0 {
-		return AuthorMissing, value
+		return identity.AuthorMissing, value
 	}
 	return value >> 14, value & 0x3FFF
 }
@@ -817,7 +822,7 @@ func (analyser *BurndownAnalysis) updateStatus(
 func (analyser *BurndownAnalysis) updatePeople(
 	peopleUncasted interface{}, _ int, previousValue int, delta int) {
 	previousAuthor, previousTime := analyser.unpackPersonWithDay(previousValue)
-	if previousAuthor == AuthorMissing {
+	if previousAuthor == identity.AuthorMissing {
 		return
 	}
 	people := peopleUncasted.([]map[int]int64)
@@ -835,7 +840,7 @@ func (analyser *BurndownAnalysis) updateMatrix(
 	matrix := matrixUncasted.([]map[int]int64)
 	newAuthor, _ := analyser.unpackPersonWithDay(currentTime)
 	oldAuthor, _ := analyser.unpackPersonWithDay(previousTime)
-	if oldAuthor == AuthorMissing {
+	if oldAuthor == identity.AuthorMissing {
 		return
 	}
 	if newAuthor == oldAuthor && delta > 0 {
@@ -856,24 +861,24 @@ func (analyser *BurndownAnalysis) updateMatrix(
 
 func (analyser *BurndownAnalysis) newFile(
 	author int, day int, size int, global map[int]int64, people []map[int]int64,
-	matrix []map[int]int64) *File {
-	statuses := make([]Status, 1)
-	statuses[0] = NewStatus(global, analyser.updateStatus)
+	matrix []map[int]int64) *burndown.File {
+	statuses := make([]burndown.Status, 1)
+	statuses[0] = burndown.NewStatus(global, analyser.updateStatus)
 	if analyser.TrackFiles {
-		statuses = append(statuses, NewStatus(map[int]int64{}, analyser.updateStatus))
+		statuses = append(statuses, burndown.NewStatus(map[int]int64{}, analyser.updateStatus))
 	}
 	if analyser.PeopleNumber > 0 {
-		statuses = append(statuses, NewStatus(people, analyser.updatePeople))
-		statuses = append(statuses, NewStatus(matrix, analyser.updateMatrix))
+		statuses = append(statuses, burndown.NewStatus(people, analyser.updatePeople))
+		statuses = append(statuses, burndown.NewStatus(matrix, analyser.updateMatrix))
 		day = analyser.packPersonWithDay(author, day)
 	}
-	return NewFile(day, size, statuses...)
+	return burndown.NewFile(day, size, statuses...)
 }
 
 func (analyser *BurndownAnalysis) handleInsertion(
 	change *object.Change, author int, cache map[plumbing.Hash]*object.Blob) error {
 	blob := cache[change.To.TreeEntry.Hash]
-	lines, err := CountLines(blob)
+	lines, err := items.CountLines(blob)
 	if err != nil {
 		if err.Error() == "binary" {
 			return nil
@@ -895,7 +900,7 @@ func (analyser *BurndownAnalysis) handleDeletion(
 	change *object.Change, author int, cache map[plumbing.Hash]*object.Blob) error {
 
 	blob := cache[change.From.TreeEntry.Hash]
-	lines, err := CountLines(blob)
+	lines, err := items.CountLines(blob)
 	if err != nil {
 		if err.Error() == "binary" {
 			return nil
@@ -911,7 +916,7 @@ func (analyser *BurndownAnalysis) handleDeletion(
 
 func (analyser *BurndownAnalysis) handleModification(
 	change *object.Change, author int, cache map[plumbing.Hash]*object.Blob,
-	diffs map[string]FileDiffData) error {
+	diffs map[string]items.FileDiffData) error {
 
 	file, exists := analyser.files[change.From.Name]
 	if !exists {
@@ -1124,5 +1129,5 @@ func (analyser *BurndownAnalysis) updateHistories(
 }
 
 func init() {
-	Registry.Register(&BurndownAnalysis{})
+	core.Registry.Register(&BurndownAnalysis{})
 }

+ 2 - 2
burndown_test.go

@@ -1,4 +1,4 @@
-package hercules
+package leaves
 
 import (
 	"bytes"
@@ -11,7 +11,7 @@ import (
 	"github.com/stretchr/testify/assert"
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
-	"gopkg.in/src-d/hercules.v3/pb"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
 )
 
 func TestBurndownMeta(t *testing.T) {

+ 18 - 15
comment_sentiment.go

@@ -1,6 +1,6 @@
 // +build tensorflow
 
-package hercules
+package leaves
 
 import (
 	"fmt"
@@ -16,7 +16,10 @@ import (
 	progress "gopkg.in/cheggaaa/pb.v1"
 	"gopkg.in/src-d/go-git.v4"
 	"gopkg.in/src-d/go-git.v4/plumbing"
-	"gopkg.in/src-d/hercules.v3/pb"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
+	items "gopkg.in/src-d/hercules.v4/internal/plumbing"
+	uast_items "gopkg.in/src-d/hercules.v4/internal/plumbing/uast"
 	"gopkg.in/vmarkovtsev/BiDiSentiment.v1"
 )
 
@@ -27,7 +30,7 @@ type CommentSentimentAnalysis struct {
 
 	commentsByDay map[int][]string
 	commitsByDay  map[int][]plumbing.Hash
-	xpather       *ChangesXPather
+	xpather       *uast_items.ChangesXPather
 }
 
 // CommentSentimentResult contains the sentiment values per day, where 1 means very negative
@@ -65,7 +68,7 @@ func (sent *CommentSentimentAnalysis) Name() string {
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
+// to this list. Also used by core.Registry to build the global map of providers.
 func (sent *CommentSentimentAnalysis) Provides() []string {
 	return []string{}
 }
@@ -74,29 +77,29 @@ func (sent *CommentSentimentAnalysis) Provides() []string {
 // Each requested entity will be inserted into `deps` of Consume(). In turn, those
 // entities are Provides() upstream.
 func (sent *CommentSentimentAnalysis) Requires() []string {
-	arr := [...]string{DependencyUastChanges, DependencyDay}
+	arr := [...]string{uast_items.DependencyUastChanges, items.DependencyDay}
 	return arr[:]
 }
 
 // Features which must be enabled for this PipelineItem to be automatically inserted into the DAG.
 func (sent *CommentSentimentAnalysis) Features() []string {
-	arr := [...]string{FeatureUast}
+	arr := [...]string{uast_items.FeatureUast}
 	return arr[:]
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (sent *CommentSentimentAnalysis) ListConfigurationOptions() []ConfigurationOption {
-	options := [...]ConfigurationOption{{
+func (sent *CommentSentimentAnalysis) ListConfigurationOptions() []core.ConfigurationOption {
+	options := [...]core.ConfigurationOption{{
 		Name:        ConfigCommentSentimentMinLength,
 		Description: "Minimum length of the comment to be analyzed.",
 		Flag:        "min-comment-len",
-		Type:        IntConfigurationOption,
+		Type:        core.IntConfigurationOption,
 		Default:     DefaultCommentSentimentCommentMinLength}, {
 		Name: ConfigCommentSentimentGap,
 		Description: "Sentiment value threshold, values between 0.5 - X/2 and 0.5 + x/2 will not be " +
 			"considered. Must be >= 0 and < 1. The purpose is to exclude neutral comments.",
 		Flag:    "sentiment-gap",
-		Type:    FloatConfigurationOption,
+		Type:    core.FloatConfigurationOption,
 		Default: DefaultCommentSentimentGap},
 	}
 	return options[:]
@@ -116,7 +119,7 @@ func (sent *CommentSentimentAnalysis) Configure(facts map[string]interface{}) {
 		sent.MinCommentLength = val.(int)
 	}
 	sent.validate()
-	sent.commitsByDay = facts[FactCommitsByDay].(map[int][]plumbing.Hash)
+	sent.commitsByDay = facts[items.FactCommitsByDay].(map[int][]plumbing.Hash)
 }
 
 func (sent *CommentSentimentAnalysis) validate() {
@@ -136,7 +139,7 @@ func (sent *CommentSentimentAnalysis) validate() {
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (sent *CommentSentimentAnalysis) Initialize(repository *git.Repository) {
 	sent.commentsByDay = map[int][]string{}
-	sent.xpather = &ChangesXPather{XPath: "//*[@roleComment]"}
+	sent.xpather = &uast_items.ChangesXPather{XPath: "//*[@roleComment]"}
 	sent.validate()
 }
 
@@ -146,8 +149,8 @@ func (sent *CommentSentimentAnalysis) Initialize(repository *git.Repository) {
 // This function returns the mapping with analysis results. The keys must be the same as
 // in Provides(). If there was an error, nil is returned.
 func (sent *CommentSentimentAnalysis) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
-	changes := deps[DependencyUastChanges].([]UASTChange)
-	day := deps[DependencyDay].(int)
+	changes := deps[uast_items.DependencyUastChanges].([]uast_items.Change)
+	day := deps[items.DependencyDay].(int)
 	commentNodes := sent.xpather.Extract(changes)
 	comments := sent.mergeComments(commentNodes)
 	dayComments := sent.commentsByDay[day]
@@ -348,5 +351,5 @@ func (sent *CommentSentimentAnalysis) mergeComments(nodes []*uast.Node) []string
 }
 
 func init() {
-	Registry.Register(&CommentSentimentAnalysis{})
+	core.Registry.Register(&CommentSentimentAnalysis{})
 }

+ 3 - 3
comment_sentiment_test.go

@@ -1,6 +1,6 @@
 // +build tensorflow
 
-package hercules
+package leaves
 
 import (
 	"bytes"
@@ -11,9 +11,9 @@ import (
 	"github.com/gogo/protobuf/proto"
 	"github.com/stretchr/testify/assert"
 	"gopkg.in/bblfsh/client-go.v2"
-	"gopkg.in/src-d/go-git.v4/plumbing"
-	"gopkg.in/src-d/hercules.v3/pb"
 	"gopkg.in/bblfsh/client-go.v2/tools"
+	"gopkg.in/src-d/go-git.v4/plumbing"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
 )
 
 func fixtureCommentSentiment() *CommentSentimentAnalysis {

+ 19 - 16
couples.go

@@ -1,4 +1,4 @@
-package hercules
+package leaves
 
 import (
 	"fmt"
@@ -9,8 +9,11 @@ import (
 	"gopkg.in/src-d/go-git.v4"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
 	"gopkg.in/src-d/go-git.v4/utils/merkletrie"
-	"gopkg.in/src-d/hercules.v3/pb"
-	"gopkg.in/src-d/hercules.v3/yaml"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
+	items "gopkg.in/src-d/hercules.v4/internal/plumbing"
+	"gopkg.in/src-d/hercules.v4/internal/plumbing/identity"
+	"gopkg.in/src-d/hercules.v4/internal/yaml"
 )
 
 // CouplesAnalysis calculates the number of common commits for files and authors.
@@ -49,7 +52,7 @@ func (couples *CouplesAnalysis) Name() string {
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
+// to this list. Also used by core.Registry to build the global map of providers.
 func (couples *CouplesAnalysis) Provides() []string {
 	return []string{}
 }
@@ -58,20 +61,20 @@ func (couples *CouplesAnalysis) Provides() []string {
 // Each requested entity will be inserted into `deps` of Consume(). In turn, those
 // entities are Provides() upstream.
 func (couples *CouplesAnalysis) Requires() []string {
-	arr := [...]string{DependencyAuthor, DependencyTreeChanges}
+	arr := [...]string{identity.DependencyAuthor, items.DependencyTreeChanges}
 	return arr[:]
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (couples *CouplesAnalysis) ListConfigurationOptions() []ConfigurationOption {
-	return []ConfigurationOption{}
+func (couples *CouplesAnalysis) ListConfigurationOptions() []core.ConfigurationOption {
+	return []core.ConfigurationOption{}
 }
 
 // Configure sets the properties previously published by ListConfigurationOptions().
 func (couples *CouplesAnalysis) Configure(facts map[string]interface{}) {
-	if val, exists := facts[FactIdentityDetectorPeopleCount].(int); exists {
+	if val, exists := facts[identity.FactIdentityDetectorPeopleCount].(int); exists {
 		couples.PeopleNumber = val
-		couples.reversedPeopleDict = facts[FactIdentityDetectorReversedPeopleDict].([]string)
+		couples.reversedPeopleDict = facts[identity.FactIdentityDetectorReversedPeopleDict].([]string)
 	}
 }
 
@@ -97,12 +100,12 @@ func (couples *CouplesAnalysis) Initialize(repository *git.Repository) {
 // This function returns the mapping with analysis results. The keys must be the same as
 // in Provides(). If there was an error, nil is returned.
 func (couples *CouplesAnalysis) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
-	author := deps[DependencyAuthor].(int)
-	if author == AuthorMissing {
+	author := deps[identity.DependencyAuthor].(int)
+	if author == identity.AuthorMissing {
 		author = couples.PeopleNumber
 	}
 	couples.peopleCommits[author]++
-	treeDiff := deps[DependencyTreeChanges].(object.Changes)
+	treeDiff := deps[items.DependencyTreeChanges].(object.Changes)
 	context := make([]string, 0)
 	deleteFile := func(name string) {
 		// we do not remove the file from people - the context does not expire
@@ -262,14 +265,14 @@ func (couples *CouplesAnalysis) Deserialize(pbmessage []byte) (interface{}, erro
 }
 
 // MergeResults combines two CouplesAnalysis-s together.
-func (couples *CouplesAnalysis) MergeResults(r1, r2 interface{}, c1, c2 *CommonAnalysisResult) interface{} {
+func (couples *CouplesAnalysis) MergeResults(r1, r2 interface{}, c1, c2 *core.CommonAnalysisResult) interface{} {
 	cr1 := r1.(CouplesResult)
 	cr2 := r2.(CouplesResult)
 	merged := CouplesResult{}
 	var people, files map[string][3]int
-	people, merged.reversedPeopleDict = IdentityDetector{}.MergeReversedDicts(
+	people, merged.reversedPeopleDict = identity.IdentityDetector{}.MergeReversedDicts(
 		cr1.reversedPeopleDict, cr2.reversedPeopleDict)
-	files, merged.Files = IdentityDetector{}.MergeReversedDicts(cr1.Files, cr2.Files)
+	files, merged.Files = identity.IdentityDetector{}.MergeReversedDicts(cr1.Files, cr2.Files)
 	merged.PeopleFiles = make([][]int, len(merged.reversedPeopleDict))
 	peopleFilesDicts := make([]map[int]bool, len(merged.reversedPeopleDict))
 	addPeopleFiles := func(peopleFiles [][]int, reversedPeopleDict []string,
@@ -461,5 +464,5 @@ func (couples *CouplesAnalysis) serializeBinary(result *CouplesResult, writer io
 }
 
 func init() {
-	Registry.Register(&CouplesAnalysis{})
+	core.Registry.Register(&CouplesAnalysis{})
 }

+ 2 - 2
couples_test.go

@@ -1,4 +1,4 @@
-package hercules
+package leaves
 
 import (
 	"bytes"
@@ -10,7 +10,7 @@ import (
 	"github.com/gogo/protobuf/proto"
 	"github.com/stretchr/testify/assert"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
-	"gopkg.in/src-d/hercules.v3/pb"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
 )
 
 func fixtureCouples() *CouplesAnalysis {

+ 10 - 8
file_history.go

@@ -1,4 +1,4 @@
-package hercules
+package leaves
 
 import (
 	"fmt"
@@ -11,7 +11,9 @@ import (
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
 	"gopkg.in/src-d/go-git.v4/utils/merkletrie"
-	"gopkg.in/src-d/hercules.v3/pb"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
+	items "gopkg.in/src-d/hercules.v4/internal/plumbing"
 )
 
 // FileHistory contains the intermediate state which is mutated by Consume(). It should implement
@@ -32,7 +34,7 @@ func (history *FileHistory) Name() string {
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
+// to this list. Also used by core.Registry to build the global map of providers.
 func (history *FileHistory) Provides() []string {
 	return []string{}
 }
@@ -41,13 +43,13 @@ func (history *FileHistory) Provides() []string {
 // Each requested entity will be inserted into `deps` of Consume(). In turn, those
 // entities are Provides() upstream.
 func (history *FileHistory) Requires() []string {
-	arr := [...]string{DependencyTreeChanges}
+	arr := [...]string{items.DependencyTreeChanges}
 	return arr[:]
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (history *FileHistory) ListConfigurationOptions() []ConfigurationOption {
-	return []ConfigurationOption{}
+func (history *FileHistory) ListConfigurationOptions() []core.ConfigurationOption {
+	return []core.ConfigurationOption{}
 }
 
 // Flag for the command line switch which enables this analysis.
@@ -72,7 +74,7 @@ func (history *FileHistory) Initialize(repository *git.Repository) {
 // in Provides(). If there was an error, nil is returned.
 func (history *FileHistory) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
 	commit := deps["commit"].(*object.Commit).Hash
-	changes := deps[DependencyTreeChanges].(object.Changes)
+	changes := deps[items.DependencyTreeChanges].(object.Changes)
 	for _, change := range changes {
 		action, _ := change.Action()
 		switch action {
@@ -150,5 +152,5 @@ func (history *FileHistory) serializeBinary(result *FileHistoryResult, writer io
 }
 
 func init() {
-	Registry.Register(&FileHistory{})
+	core.Registry.Register(&FileHistory{})
 }

+ 2 - 2
file_history_test.go

@@ -1,4 +1,4 @@
-package hercules
+package leaves
 
 import (
 	"bytes"
@@ -8,7 +8,7 @@ import (
 	"github.com/stretchr/testify/assert"
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
-	"gopkg.in/src-d/hercules.v3/pb"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
 )
 
 func fixtureFileHistory() *FileHistory {

+ 16 - 13
shotness.go

@@ -1,4 +1,4 @@
-package hercules
+package leaves
 
 import (
 	"fmt"
@@ -13,7 +13,10 @@ import (
 	"gopkg.in/bblfsh/sdk.v1/uast"
 	"gopkg.in/src-d/go-git.v4"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
-	"gopkg.in/src-d/hercules.v3/pb"
+	"gopkg.in/src-d/hercules.v4/internal/core"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
+	items "gopkg.in/src-d/hercules.v4/internal/plumbing"
+	uast_items "gopkg.in/src-d/hercules.v4/internal/plumbing/uast"
 )
 
 // ShotnessAnalysis contains the intermediate state which is mutated by Consume(). It should implement
@@ -75,7 +78,7 @@ func (shotness *ShotnessAnalysis) Name() string {
 
 // Provides returns the list of names of entities which are produced by this PipelineItem.
 // Each produced entity will be inserted into `deps` of dependent Consume()-s according
-// to this list. Also used by hercules.Registry to build the global map of providers.
+// to this list. Also used by core.Registry to build the global map of providers.
 func (shotness *ShotnessAnalysis) Provides() []string {
 	return []string{}
 }
@@ -84,28 +87,28 @@ func (shotness *ShotnessAnalysis) Provides() []string {
 // Each requested entity will be inserted into `deps` of Consume(). In turn, those
 // entities are Provides() upstream.
 func (shotness *ShotnessAnalysis) Requires() []string {
-	arr := [...]string{DependencyFileDiff, DependencyUastChanges}
+	arr := [...]string{items.DependencyFileDiff, uast_items.DependencyUastChanges}
 	return arr[:]
 }
 
 // Features which must be enabled for this PipelineItem to be automatically inserted into the DAG.
 func (shotness *ShotnessAnalysis) Features() []string {
-	arr := [...]string{FeatureUast}
+	arr := [...]string{uast_items.FeatureUast}
 	return arr[:]
 }
 
 // ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
-func (shotness *ShotnessAnalysis) ListConfigurationOptions() []ConfigurationOption {
-	opts := [...]ConfigurationOption{{
+func (shotness *ShotnessAnalysis) ListConfigurationOptions() []core.ConfigurationOption {
+	opts := [...]core.ConfigurationOption{{
 		Name:        ConfigShotnessXpathStruct,
 		Description: "UAST XPath query to use for filtering the nodes.",
 		Flag:        "shotness-xpath-struct",
-		Type:        StringConfigurationOption,
+		Type:        core.StringConfigurationOption,
 		Default:     DefaultShotnessXpathStruct}, {
 		Name:        ConfigShotnessXpathName,
 		Description: "UAST XPath query to determine the names of the filtered nodes.",
 		Flag:        "shotness-xpath-name",
-		Type:        StringConfigurationOption,
+		Type:        core.StringConfigurationOption,
 		Default:     DefaultShotnessXpathName},
 	}
 	return opts[:]
@@ -144,8 +147,8 @@ func (shotness *ShotnessAnalysis) Initialize(repository *git.Repository) {
 // in Provides(). If there was an error, nil is returned.
 func (shotness *ShotnessAnalysis) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
 	commit := deps["commit"].(*object.Commit)
-	changesList := deps[DependencyUastChanges].([]UASTChange)
-	diffs := deps[DependencyFileDiff].(map[string]FileDiffData)
+	changesList := deps[uast_items.DependencyUastChanges].([]uast_items.Change)
+	diffs := deps[items.DependencyFileDiff].(map[string]items.FileDiffData)
 	allNodes := map[string]bool{}
 
 	addNode := func(name string, node *uast.Node, fileName string) {
@@ -253,7 +256,7 @@ func (shotness *ShotnessAnalysis) Consume(deps map[string]interface{}) (map[stri
 					endLine = node.EndPosition.Line
 				} else {
 					// we need to determine node.EndPosition.Line
-					VisitEachNode(node, func(child *uast.Node) {
+					uast_items.VisitEachNode(node, func(child *uast.Node) {
 						if child.StartPosition != nil {
 							candidate := child.StartPosition.Line
 							if child.EndPosition != nil {
@@ -467,5 +470,5 @@ func reverseNodeMap(nodes map[string]*uast.Node) map[*uast.Node]string {
 }
 
 func init() {
-	Registry.Register(&ShotnessAnalysis{})
+	core.Registry.Register(&ShotnessAnalysis{})
 }

+ 3 - 2
shotness_test.go

@@ -1,4 +1,4 @@
-package hercules
+package leaves
 
 import (
 	"io/ioutil"
@@ -6,12 +6,13 @@ import (
 	"testing"
 
 	"bytes"
+
 	"github.com/gogo/protobuf/proto"
 	"github.com/sergi/go-diff/diffmatchpatch"
 	"github.com/stretchr/testify/assert"
 	"gopkg.in/bblfsh/sdk.v1/uast"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
-	"gopkg.in/src-d/hercules.v3/pb"
+	"gopkg.in/src-d/hercules.v4/internal/pb"
 )
 
 func fixtureShotness() *ShotnessAnalysis {