Browse Source

Merge pull request #133 from vmarkovtsev/master

DevsAnalysis
Vadim Markovtsev 6 years ago
parent
commit
0adaec97d2

+ 0 - 1
.appveyor.yml

@@ -18,7 +18,6 @@ build_script:
   - set PATH=%PATH:C:\Program Files\Git\usr\bin;=%
   - set PATH=%PATH:C:\Program Files\Git\usr\bin;=%
   - cd %GOPATH%\src\gopkg.in\src-d\hercules.v5
   - cd %GOPATH%\src\gopkg.in\src-d\hercules.v5
   - go get -v github.com/golang/dep/cmd/dep
   - go get -v github.com/golang/dep/cmd/dep
-  - set DISABLE_TENSORFLOW=1
   - make
   - make
   - 7z a c:\gopath\src\gopkg.in\src-d\hercules.v5\hercules.win64.zip %GOPATH%\bin\hercules.exe
   - 7z a c:\gopath\src\gopkg.in\src-d\hercules.v5\hercules.win64.zip %GOPATH%\bin\hercules.exe
 
 

+ 14 - 8
.travis.yml

@@ -24,7 +24,10 @@ cache:
     - $HOME/gopath/src
     - $HOME/gopath/src
 before_cache:
 before_cache:
   - cd $HOME/gopath
   - cd $HOME/gopath
+  - mv $HOME/gopath/src/gopkg.in/src-d/hercules.v5/vendor $HOME/gopath/src/gopkg.in/src-d/.vendor
   - rm -rf $HOME/gopath/src/gopkg.in/src-d/hercules.v5
   - rm -rf $HOME/gopath/src/gopkg.in/src-d/hercules.v5
+  - mkdir $HOME/gopath/src/gopkg.in/src-d/hercules.v5
+  - mv $HOME/gopath/src/gopkg.in/src-d/.vendor $HOME/gopath/src/gopkg.in/src-d/hercules.v5/vendor
 
 
 matrix:
 matrix:
   fast_finish: true
   fast_finish: true
@@ -33,10 +36,12 @@ matrix:
 
 
 stages:
 stages:
   - test
   - test
-  - deploy
+  - name: deploy
+    # require any tag name to deploy
+    if: tag =~ .*
 
 
 env:
 env:
-  - PROTOC_VERSION=3.6.0 TENSORFLOW_VERSION=1.8.0
+  - PROTOC_VERSION=3.6.0 TENSORFLOW_VERSION=1.11.0
 
 
 before_install:
 before_install:
   - wget -O protoc.zip https://github.com/google/protobuf/releases/download/v$PROTOC_VERSION/protoc-$PROTOC_VERSION-linux-x86_64.zip
   - wget -O protoc.zip https://github.com/google/protobuf/releases/download/v$PROTOC_VERSION/protoc-$PROTOC_VERSION-linux-x86_64.zip
@@ -47,6 +52,7 @@ before_install:
   - export PATH=~/usr/bin:$GOPATH/bin:$PATH
   - export PATH=~/usr/bin:$GOPATH/bin:$PATH
   - make --version
   - make --version
   - pip3 --version
   - pip3 --version
+  - pip3 install --user cython
   - pip3 install --user --no-build-isolation -r requirements.txt tensorflow flake8
   - pip3 install --user --no-build-isolation -r requirements.txt tensorflow flake8
   - docker run -d --privileged -p 9432:9432 --name bblfshd bblfsh/bblfshd
   - docker run -d --privileged -p 9432:9432 --name bblfshd bblfsh/bblfshd
   - docker exec -it bblfshd bblfshctl driver install python bblfsh/python-driver:latest
   - docker exec -it bblfshd bblfshctl driver install python bblfsh/python-driver:latest
@@ -56,7 +62,7 @@ before_install:
   - sudo ldconfig
   - sudo ldconfig
 install:
 install:
   - git clean -xfd
   - git clean -xfd
-  - make
+  - make TAGS=tensorflow
 script:
 script:
   - set -e
   - set -e
   - if [ $TRAVIS_GO_VERSION = "1.11.*" ]; then test -z "$(gofmt -s -l . | grep -v vendor/)"; fi
   - if [ $TRAVIS_GO_VERSION = "1.11.*" ]; then test -z "$(gofmt -s -l . | grep -v vendor/)"; fi
@@ -65,13 +71,13 @@ script:
   - flake8
   - flake8
   - go test -coverpkg=all -v -coverprofile=coverage.txt -covermode=count gopkg.in/src-d/hercules.v5/... && sed -i '/cmd\/hercules\|core.go/d' coverage.txt
   - go test -coverpkg=all -v -coverprofile=coverage.txt -covermode=count gopkg.in/src-d/hercules.v5/... && sed -i '/cmd\/hercules\|core.go/d' coverage.txt
   - $GOPATH/bin/hercules version
   - $GOPATH/bin/hercules version
-  - $GOPATH/bin/hercules --burndown --couples --quiet --pb https://github.com/src-d/hercules > 1.pb
+  - $GOPATH/bin/hercules --burndown --couples --devs --quiet --pb https://github.com/src-d/hercules > 1.pb
   - cp 1.pb 2.pb
   - cp 1.pb 2.pb
   - $GOPATH/bin/hercules combine 1.pb 2.pb > 12.pb
   - $GOPATH/bin/hercules combine 1.pb 2.pb > 12.pb
   - ($GOPATH/bin/hercules generate-plugin -n MyPlug -o myplug && cd myplug && make)
   - ($GOPATH/bin/hercules generate-plugin -n MyPlug -o myplug && cd myplug && make)
   - (cd contrib/_plugin_example && make)
   - (cd contrib/_plugin_example && make)
-  - $GOPATH/bin/hercules --burndown --burndown-files --burndown-people --couples --quiet https://github.com/src-d/hercules | python3 labours.py -m all -o out --backend Agg --disable-projector
-  - $GOPATH/bin/hercules --burndown --burndown-files --burndown-people --couples --quiet --pb https://github.com/src-d/hercules | python3 labours.py -f pb -m all -o out --backend Agg --disable-projector
+  - $GOPATH/bin/hercules --burndown --burndown-files --burndown-people --couples --devs --quiet https://github.com/src-d/hercules | python3 labours.py -m all -o out --backend Agg --disable-projector
+  - $GOPATH/bin/hercules --burndown --burndown-files --burndown-people --couples --devs --quiet --pb https://github.com/src-d/hercules | python3 labours.py -f pb -m all -o out --backend Agg --disable-projector
   - # $GOPATH/bin/hercules --sentiment --quiet --languages Python https://github.com/src-d/hercules > /dev/null
   - # $GOPATH/bin/hercules --sentiment --quiet --languages Python https://github.com/src-d/hercules > /dev/null
   - set +e
   - set +e
   - if [ $TRAVIS_GO_VERSION = "1.11.*" ]; then bash <(curl -s https://codecov.io/bash); fi
   - if [ $TRAVIS_GO_VERSION = "1.11.*" ]; then bash <(curl -s https://codecov.io/bash); fi
@@ -99,7 +105,7 @@ jobs:
         - gzip -S .darwin_amd64.gz $GOPATH/bin/hercules
         - gzip -S .darwin_amd64.gz $GOPATH/bin/hercules
       script: skip
       script: skip
       install:
       install:
-        - DISABLE_TENSORFLOW=1 make
+        - make
       deploy:
       deploy:
         provider: releases
         provider: releases
         api_key:
         api_key:
@@ -119,7 +125,7 @@ jobs:
         - export PATH=$GOPATH/bin:$PATH
         - export PATH=$GOPATH/bin:$PATH
       script: skip
       script: skip
       install:
       install:
-        - DISABLE_TENSORFLOW=1 make
+        - make
       after_success:
       after_success:
         - gzip -S .linux_amd64.gz $GOPATH/bin/hercules
         - gzip -S .linux_amd64.gz $GOPATH/bin/hercules
       deploy:
       deploy:

+ 1 - 0
Dockerfile

@@ -29,6 +29,7 @@ echo "	$@"\n\
 echo\n\' > /browser && \
 echo\n\' > /browser && \
     chmod +x /browser && \
     chmod +x /browser && \
     curl https://bootstrap.pypa.io/get-pip.py | python3 && \
     curl https://bootstrap.pypa.io/get-pip.py | python3 && \
+    pip3 install --no-cache-dir --no-build-isolation cython && \
     pip3 install --no-cache-dir --no-build-isolation -r /root/src/gopkg.in/src-d/hercules.v5/requirements.txt https://github.com/mind/wheels/releases/download/tf1.7-cpu/tensorflow-1.7.0-cp36-cp36m-linux_x86_64.whl && \
     pip3 install --no-cache-dir --no-build-isolation -r /root/src/gopkg.in/src-d/hercules.v5/requirements.txt https://github.com/mind/wheels/releases/download/tf1.7-cpu/tensorflow-1.7.0-cp36-cp36m-linux_x86_64.whl && \
     rm -rf /root/* && \
     rm -rf /root/* && \
     apt-get remove -y software-properties-common golang-1.10-go python3-dev libyaml-dev libxml2-dev curl git make unzip g++ && \
     apt-get remove -y software-properties-common golang-1.10-go python3-dev libyaml-dev libxml2-dev curl git make unzip g++ && \

+ 1 - 3
Makefile

@@ -5,9 +5,7 @@ else
 EXE = .exe
 EXE = .exe
 endif
 endif
 PKG = $(shell go env GOOS)_$(shell go env GOARCH)
 PKG = $(shell go env GOOS)_$(shell go env GOARCH)
-ifneq (${DISABLE_TENSORFLOW},1)
-TAGS ?= tensorflow
-endif
+TAGS ?=
 BBLFSH_DEP =
 BBLFSH_DEP =
 
 
 all: ${GOPATH}/bin/hercules${EXE}
 all: ${GOPATH}/bin/hercules${EXE}

+ 19 - 13
README.md

@@ -40,7 +40,7 @@ Blog posts: [1](https://blog.sourced.tech/post/hercules.v4), [2](https://blog.so
 <p align="center">The DAG of burndown and couples analyses with UAST diff refining. Generated with <code>hercules --burndown --burndown-people --couples --feature=uast --dry-run --dump-dag doc/dag.dot https://github.com/src-d/hercules</code></p>
 <p align="center">The DAG of burndown and couples analyses with UAST diff refining. Generated with <code>hercules --burndown --burndown-people --couples --feature=uast --dry-run --dump-dag doc/dag.dot https://github.com/src-d/hercules</code></p>
 
 
 ![git/git image](doc/linux.png)
 ![git/git image](doc/linux.png)
-<p align="center">torvalds/linux line burndown (granularity 30, sampling 30, resampled by year). Generated with <code>hercules --burndown --first-parent --pb https://github.com/torvalds/linux | python3 labours.py -f pb -m project</code></p>
+<p align="center">torvalds/linux line burndown (granularity 30, sampling 30, resampled by year). Generated with <code>hercules --burndown --first-parent --pb https://github.com/torvalds/linux | python3 labours.py -f pb -m burndown-project</code></p>
 
 
 ## Installation
 ## Installation
 
 
@@ -65,8 +65,6 @@ make
 ```
 ```
 
 
 Replace `$GOPATH` with `%GOPATH%` on Windows.
 Replace `$GOPATH` with `%GOPATH%` on Windows.
-By default the build requires [`libtensorflow`](https://www.tensorflow.org/install/install_go).
-Disable the analyses which require Tensorflow with `DISABLE_TENSORFLOW=1 make`.
 
 
 ## Contributions
 ## Contributions
 
 
@@ -87,18 +85,18 @@ Some examples:
 
 
 ```
 ```
 # Use "memory" go-git backend and display the burndown plot. "memory" is the fastest but the repository's git data must fit into RAM.
 # Use "memory" go-git backend and display the burndown plot. "memory" is the fastest but the repository's git data must fit into RAM.
-hercules --burndown https://github.com/src-d/go-git | python3 labours.py -m project --resample month
+hercules --burndown https://github.com/src-d/go-git | python3 labours.py -m burndown-project --resample month
 # Use "file system" go-git backend and print some basic information about the repository.
 # Use "file system" go-git backend and print some basic information about the repository.
 hercules /path/to/cloned/go-git
 hercules /path/to/cloned/go-git
 # Use "file system" go-git backend, cache the cloned repository to /tmp/repo-cache, use Protocol Buffers and display the burndown plot without resampling.
 # Use "file system" go-git backend, cache the cloned repository to /tmp/repo-cache, use Protocol Buffers and display the burndown plot without resampling.
-hercules --burndown --pb https://github.com/git/git /tmp/repo-cache | python3 labours.py -m project -f pb --resample raw
+hercules --burndown --pb https://github.com/git/git /tmp/repo-cache | python3 labours.py -m burndown-project -f pb --resample raw
 
 
 # Now something fun
 # Now something fun
 # Get the linear history from git rev-list, reverse it
 # Get the linear history from git rev-list, reverse it
 # Pipe to hercules, produce burndown snapshots for every 30 days grouped by 30 days
 # Pipe to hercules, produce burndown snapshots for every 30 days grouped by 30 days
 # Save the raw data to cache.yaml, so that later is possible to python3 labours.py -i cache.yaml
 # Save the raw data to cache.yaml, so that later is possible to python3 labours.py -i cache.yaml
 # Pipe the raw data to labours.py, set text font size to 16pt, use Agg matplotlib backend and save the plot to output.png
 # Pipe the raw data to labours.py, set text font size to 16pt, use Agg matplotlib backend and save the plot to output.png
-git rev-list HEAD | tac | hercules --commits - --burndown https://github.com/git/git | tee cache.yaml | python3 labours.py -m project --font-size 16 --backend Agg --output git.png
+git rev-list HEAD | tac | hercules --commits - --burndown https://github.com/git/git | tee cache.yaml | python3 labours.py -m burndown-project --font-size 16 --backend Agg --output git.png
 ```
 ```
 
 
 `labours.py -i /path/to/yaml` allows to read the output from `hercules` which was saved on disk.
 `labours.py -i /path/to/yaml` allows to read the output from `hercules` which was saved on disk.
@@ -119,7 +117,7 @@ hercules --some-analysis /tmp/repo-cache
 #### Docker image
 #### Docker image
 
 
 ```
 ```
-docker run --rm srcd/hercules hercules --burndown --pb https://github.com/git/git | docker run --rm -i -v $(pwd):/io srcd/hercules labours.py -f pb -m project -o /io/git_git.png
+docker run --rm srcd/hercules hercules --burndown --pb https://github.com/git/git | docker run --rm -i -v $(pwd):/io srcd/hercules labours.py -f pb -m burndown-project -o /io/git_git.png
 ```
 ```
 
 
 ### Built-in analyses
 ### Built-in analyses
@@ -128,7 +126,7 @@ docker run --rm srcd/hercules hercules --burndown --pb https://github.com/git/gi
 
 
 ```
 ```
 hercules --burndown
 hercules --burndown
-python3 labours.py -m project
+python3 labours.py -m burndown-project
 ```
 ```
 
 
 Line burndown statistics for the whole repository.
 Line burndown statistics for the whole repository.
@@ -150,7 +148,7 @@ Unresampled bands are apparently not aligned and start from the project's birth
 
 
 ```
 ```
 hercules --burndown --burndown-files
 hercules --burndown --burndown-files
-python3 labours.py -m file
+python3 labours.py -m burndown-file
 ```
 ```
 
 
 Burndown statistics for every file in the repository which is alive in the latest revision.
 Burndown statistics for every file in the repository which is alive in the latest revision.
@@ -161,7 +159,7 @@ Note: it will generate separate graph for every file. You might don't want to ru
 
 
 ```
 ```
 hercules --burndown --burndown-people [-people-dict=/path/to/identities]
 hercules --burndown --burndown-people [-people-dict=/path/to/identities]
-python3 labours.py -m person
+python3 labours.py -m burndown-person
 ```
 ```
 
 
 Burndown statistics for the repository's contributors. If `-people-dict` is not specified, the identities are
 Burndown statistics for the repository's contributors. If `-people-dict` is not specified, the identities are
@@ -185,7 +183,7 @@ by `|`. The case is ignored.
 
 
 ```
 ```
 hercules --burndown --burndown-people [-people-dict=/path/to/identities]
 hercules --burndown --burndown-people [-people-dict=/path/to/identities]
-python3 labours.py -m churn_matrix
+python3 labours.py -m churn-matrix
 ```
 ```
 
 
 Besides the burndown information, `-people` collects the added and deleted line statistics per
 Besides the burndown information, `-people` collects the added and deleted line statistics per
@@ -278,10 +276,18 @@ E.g. [`sadly, we need to hide the rect from the documentation finder for now`](h
 is positive. Don't expect too much though - as was written, the sentiment model is
 is positive. Don't expect too much though - as was written, the sentiment model is
 general purpose and the code comments have different nature, so there is no magic (for now).
 general purpose and the code comments have different nature, so there is no magic (for now).
 
 
+Hercules must be built with "tensorflow" tag - it is not by default:
+
+```
+make TAGS=tensorflow
+```
+
+Such a build requires [`libtensorflow`](https://www.tensorflow.org/install/install_go).
+
 #### Everything in a single pass
 #### Everything in a single pass
 
 
 ```
 ```
-hercules --burndown --burndown-files --burndown-people --couples --shotness [-people-dict=/path/to/identities]
+hercules --burndown --burndown-files --burndown-people --couples --shotness --devs [-people-dict=/path/to/identities]
 python3 labours.py -m all
 python3 labours.py -m all
 ```
 ```
 
 
@@ -296,7 +302,7 @@ Hercules has a plugin system and allows to run custom analyses. See [PLUGINS.md]
 ```
 ```
 hercules --burndown --pb https://github.com/src-d/go-git > go-git.pb
 hercules --burndown --pb https://github.com/src-d/go-git > go-git.pb
 hercules --burndown --pb https://github.com/src-d/hercules > hercules.pb
 hercules --burndown --pb https://github.com/src-d/hercules > hercules.pb
-hercules combine go-git.pb hercules.pb | python3 labours.py -f pb -m project --resample M
+hercules combine go-git.pb hercules.pb | python3 labours.py -f pb -m burndown-project --resample M
 ```
 ```
 
 
 ### Bad unicode errors
 ### Bad unicode errors

+ 167 - 73
internal/pb/pb.pb.go

@@ -22,6 +22,9 @@ It has these top-level messages:
 	ShotnessAnalysisResults
 	ShotnessAnalysisResults
 	FileHistory
 	FileHistory
 	FileHistoryResultMessage
 	FileHistoryResultMessage
+	DevDay
+	DayDevs
+	DevsAnalysisResults
 	Sentiment
 	Sentiment
 	CommentSentimentResults
 	CommentSentimentResults
 	AnalysisResults
 	AnalysisResults
@@ -528,6 +531,86 @@ func (m *FileHistoryResultMessage) GetFiles() map[string]*FileHistory {
 	return nil
 	return nil
 }
 }
 
 
+type DevDay struct {
+	Commits int32 `protobuf:"varint,1,opt,name=commits,proto3" json:"commits,omitempty"`
+	Added   int32 `protobuf:"varint,2,opt,name=added,proto3" json:"added,omitempty"`
+	Removed int32 `protobuf:"varint,3,opt,name=removed,proto3" json:"removed,omitempty"`
+	Changed int32 `protobuf:"varint,4,opt,name=changed,proto3" json:"changed,omitempty"`
+}
+
+func (m *DevDay) Reset()                    { *m = DevDay{} }
+func (m *DevDay) String() string            { return proto.CompactTextString(m) }
+func (*DevDay) ProtoMessage()               {}
+func (*DevDay) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{14} }
+
+func (m *DevDay) GetCommits() int32 {
+	if m != nil {
+		return m.Commits
+	}
+	return 0
+}
+
+func (m *DevDay) GetAdded() int32 {
+	if m != nil {
+		return m.Added
+	}
+	return 0
+}
+
+func (m *DevDay) GetRemoved() int32 {
+	if m != nil {
+		return m.Removed
+	}
+	return 0
+}
+
+func (m *DevDay) GetChanged() int32 {
+	if m != nil {
+		return m.Changed
+	}
+	return 0
+}
+
+type DayDevs struct {
+	Devs map[int32]*DevDay `protobuf:"bytes,1,rep,name=devs" json:"devs,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value"`
+}
+
+func (m *DayDevs) Reset()                    { *m = DayDevs{} }
+func (m *DayDevs) String() string            { return proto.CompactTextString(m) }
+func (*DayDevs) ProtoMessage()               {}
+func (*DayDevs) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{15} }
+
+func (m *DayDevs) GetDevs() map[int32]*DevDay {
+	if m != nil {
+		return m.Devs
+	}
+	return nil
+}
+
+type DevsAnalysisResults struct {
+	Days     map[int32]*DayDevs `protobuf:"bytes,1,rep,name=days" json:"days,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value"`
+	DevIndex []string           `protobuf:"bytes,2,rep,name=dev_index,json=devIndex" json:"dev_index,omitempty"`
+}
+
+func (m *DevsAnalysisResults) Reset()                    { *m = DevsAnalysisResults{} }
+func (m *DevsAnalysisResults) String() string            { return proto.CompactTextString(m) }
+func (*DevsAnalysisResults) ProtoMessage()               {}
+func (*DevsAnalysisResults) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{16} }
+
+func (m *DevsAnalysisResults) GetDays() map[int32]*DayDevs {
+	if m != nil {
+		return m.Days
+	}
+	return nil
+}
+
+func (m *DevsAnalysisResults) GetDevIndex() []string {
+	if m != nil {
+		return m.DevIndex
+	}
+	return nil
+}
+
 type Sentiment struct {
 type Sentiment struct {
 	Value    float32  `protobuf:"fixed32,1,opt,name=value,proto3" json:"value,omitempty"`
 	Value    float32  `protobuf:"fixed32,1,opt,name=value,proto3" json:"value,omitempty"`
 	Comments []string `protobuf:"bytes,2,rep,name=comments" json:"comments,omitempty"`
 	Comments []string `protobuf:"bytes,2,rep,name=comments" json:"comments,omitempty"`
@@ -537,7 +620,7 @@ type Sentiment struct {
 func (m *Sentiment) Reset()                    { *m = Sentiment{} }
 func (m *Sentiment) Reset()                    { *m = Sentiment{} }
 func (m *Sentiment) String() string            { return proto.CompactTextString(m) }
 func (m *Sentiment) String() string            { return proto.CompactTextString(m) }
 func (*Sentiment) ProtoMessage()               {}
 func (*Sentiment) ProtoMessage()               {}
-func (*Sentiment) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{14} }
+func (*Sentiment) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{17} }
 
 
 func (m *Sentiment) GetValue() float32 {
 func (m *Sentiment) GetValue() float32 {
 	if m != nil {
 	if m != nil {
@@ -567,7 +650,7 @@ type CommentSentimentResults struct {
 func (m *CommentSentimentResults) Reset()                    { *m = CommentSentimentResults{} }
 func (m *CommentSentimentResults) Reset()                    { *m = CommentSentimentResults{} }
 func (m *CommentSentimentResults) String() string            { return proto.CompactTextString(m) }
 func (m *CommentSentimentResults) String() string            { return proto.CompactTextString(m) }
 func (*CommentSentimentResults) ProtoMessage()               {}
 func (*CommentSentimentResults) ProtoMessage()               {}
-func (*CommentSentimentResults) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{15} }
+func (*CommentSentimentResults) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{18} }
 
 
 func (m *CommentSentimentResults) GetSentimentByDay() map[int32]*Sentiment {
 func (m *CommentSentimentResults) GetSentimentByDay() map[int32]*Sentiment {
 	if m != nil {
 	if m != nil {
@@ -585,7 +668,7 @@ type AnalysisResults struct {
 func (m *AnalysisResults) Reset()                    { *m = AnalysisResults{} }
 func (m *AnalysisResults) Reset()                    { *m = AnalysisResults{} }
 func (m *AnalysisResults) String() string            { return proto.CompactTextString(m) }
 func (m *AnalysisResults) String() string            { return proto.CompactTextString(m) }
 func (*AnalysisResults) ProtoMessage()               {}
 func (*AnalysisResults) ProtoMessage()               {}
-func (*AnalysisResults) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{16} }
+func (*AnalysisResults) Descriptor() ([]byte, []int) { return fileDescriptorPb, []int{19} }
 
 
 func (m *AnalysisResults) GetHeader() *Metadata {
 func (m *AnalysisResults) GetHeader() *Metadata {
 	if m != nil {
 	if m != nil {
@@ -616,6 +699,9 @@ func init() {
 	proto.RegisterType((*ShotnessAnalysisResults)(nil), "ShotnessAnalysisResults")
 	proto.RegisterType((*ShotnessAnalysisResults)(nil), "ShotnessAnalysisResults")
 	proto.RegisterType((*FileHistory)(nil), "FileHistory")
 	proto.RegisterType((*FileHistory)(nil), "FileHistory")
 	proto.RegisterType((*FileHistoryResultMessage)(nil), "FileHistoryResultMessage")
 	proto.RegisterType((*FileHistoryResultMessage)(nil), "FileHistoryResultMessage")
+	proto.RegisterType((*DevDay)(nil), "DevDay")
+	proto.RegisterType((*DayDevs)(nil), "DayDevs")
+	proto.RegisterType((*DevsAnalysisResults)(nil), "DevsAnalysisResults")
 	proto.RegisterType((*Sentiment)(nil), "Sentiment")
 	proto.RegisterType((*Sentiment)(nil), "Sentiment")
 	proto.RegisterType((*CommentSentimentResults)(nil), "CommentSentimentResults")
 	proto.RegisterType((*CommentSentimentResults)(nil), "CommentSentimentResults")
 	proto.RegisterType((*AnalysisResults)(nil), "AnalysisResults")
 	proto.RegisterType((*AnalysisResults)(nil), "AnalysisResults")
@@ -624,74 +710,82 @@ func init() {
 func init() { proto.RegisterFile("pb.proto", fileDescriptorPb) }
 func init() { proto.RegisterFile("pb.proto", fileDescriptorPb) }
 
 
 var fileDescriptorPb = []byte{
 var fileDescriptorPb = []byte{
-	// 1098 bytes of a gzipped FileDescriptorProto
-	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x56, 0xcd, 0x92, 0xdb, 0xc4,
-	0x13, 0x2f, 0x59, 0xfe, 0x6c, 0xd9, 0xbb, 0xc9, 0x24, 0xff, 0xac, 0xe2, 0x7f, 0x6d, 0x30, 0x22,
-	0x80, 0x21, 0x41, 0xa1, 0x9c, 0x0b, 0x84, 0x0b, 0x1b, 0x87, 0x54, 0x72, 0x58, 0xa0, 0xc6, 0x1b,
-	0x38, 0xaa, 0x64, 0x69, 0x76, 0x2d, 0xb0, 0x66, 0x54, 0x33, 0xd2, 0xee, 0xfa, 0x65, 0xb8, 0x51,
-	0x45, 0x51, 0x45, 0x71, 0xe0, 0x05, 0x78, 0x1a, 0x2e, 0xbc, 0x04, 0x35, 0x5f, 0xb6, 0xec, 0xf2,
-	0x06, 0x6e, 0xea, 0xee, 0x5f, 0xf7, 0x74, 0xff, 0xba, 0xa7, 0x47, 0xd0, 0x2d, 0xe6, 0x61, 0xc1,
-	0x59, 0xc9, 0x82, 0xbf, 0x1a, 0xd0, 0x3d, 0x25, 0x65, 0x9c, 0xc6, 0x65, 0x8c, 0x7c, 0xe8, 0x5c,
-	0x12, 0x2e, 0x32, 0x46, 0x7d, 0x67, 0xe4, 0x8c, 0x5b, 0xd8, 0x8a, 0x08, 0x41, 0x73, 0x11, 0x8b,
-	0x85, 0xdf, 0x18, 0x39, 0xe3, 0x1e, 0x56, 0xdf, 0xe8, 0x01, 0x00, 0x27, 0x05, 0x13, 0x59, 0xc9,
-	0xf8, 0xca, 0x77, 0x95, 0xa5, 0xa6, 0x41, 0x1f, 0xc0, 0xe1, 0x9c, 0x5c, 0x64, 0x34, 0xaa, 0x68,
-	0x76, 0x1d, 0x95, 0x59, 0x4e, 0xfc, 0xe6, 0xc8, 0x19, 0xbb, 0x78, 0xa0, 0xd4, 0x6f, 0x68, 0x76,
-	0x7d, 0x96, 0xe5, 0x04, 0x05, 0x30, 0x20, 0x34, 0xad, 0xa1, 0x5a, 0x0a, 0xe5, 0x11, 0x9a, 0xae,
-	0x31, 0x3e, 0x74, 0x12, 0x96, 0xe7, 0x59, 0x29, 0xfc, 0xb6, 0xce, 0xcc, 0x88, 0xe8, 0x3e, 0x74,
-	0x79, 0x45, 0xb5, 0x63, 0x47, 0x39, 0x76, 0x78, 0x45, 0x95, 0xd3, 0x2b, 0xb8, 0x6d, 0x4d, 0x51,
-	0x41, 0x78, 0x94, 0x95, 0x24, 0xf7, 0xbb, 0x23, 0x77, 0xec, 0x4d, 0x8e, 0x43, 0x5b, 0x74, 0x88,
-	0x35, 0xfa, 0x5b, 0xc2, 0x5f, 0x97, 0x24, 0xff, 0x8a, 0x96, 0x7c, 0x85, 0x0f, 0xf8, 0x96, 0x72,
-	0x78, 0x02, 0x77, 0xf6, 0xc0, 0xd0, 0x2d, 0x70, 0x7f, 0x24, 0x2b, 0xc5, 0x55, 0x0f, 0xcb, 0x4f,
-	0x74, 0x17, 0x5a, 0x97, 0xf1, 0xb2, 0x22, 0x8a, 0x28, 0x07, 0x6b, 0xe1, 0x59, 0xe3, 0x33, 0x27,
-	0x78, 0x0a, 0x47, 0xcf, 0x2b, 0x4e, 0x53, 0x76, 0x45, 0x67, 0x45, 0xcc, 0x05, 0x39, 0x8d, 0x4b,
-	0x9e, 0x5d, 0x63, 0x76, 0xa5, 0x8b, 0x5b, 0x56, 0x39, 0x15, 0xbe, 0x33, 0x72, 0xc7, 0x03, 0x6c,
-	0xc5, 0xe0, 0x57, 0x07, 0xee, 0xee, 0xf3, 0x92, 0xfd, 0xa0, 0x71, 0x4e, 0xcc, 0xd1, 0xea, 0x1b,
-	0x3d, 0x84, 0x03, 0x5a, 0xe5, 0x73, 0xc2, 0x23, 0x76, 0x1e, 0x71, 0x76, 0x25, 0x54, 0x12, 0x2d,
-	0xdc, 0xd7, 0xda, 0x6f, 0xce, 0x31, 0xbb, 0x12, 0xe8, 0x63, 0xb8, 0xbd, 0x41, 0xd9, 0x63, 0x5d,
-	0x05, 0x3c, 0xb4, 0xc0, 0xa9, 0x56, 0xa3, 0xc7, 0xd0, 0x54, 0x71, 0x9a, 0x8a, 0x33, 0x3f, 0xbc,
-	0xa1, 0x00, 0xac, 0x50, 0xc1, 0xef, 0x8d, 0x4d, 0x89, 0x27, 0x34, 0x5e, 0xae, 0x44, 0x26, 0x30,
-	0x11, 0xd5, 0xb2, 0x14, 0x68, 0x04, 0xde, 0x05, 0x8f, 0x69, 0xb5, 0x8c, 0x79, 0x56, 0xae, 0xcc,
-	0x74, 0xd5, 0x55, 0x68, 0x08, 0x5d, 0x11, 0xe7, 0xc5, 0x32, 0xa3, 0x17, 0x26, 0xef, 0xb5, 0x8c,
-	0x9e, 0x40, 0xa7, 0xe0, 0xec, 0x07, 0x92, 0x94, 0x2a, 0x53, 0x6f, 0xf2, 0xbf, 0xfd, 0xa9, 0x58,
-	0x14, 0x7a, 0x04, 0xad, 0xf3, 0x6c, 0x49, 0x6c, 0xe6, 0x37, 0xc0, 0x35, 0x06, 0x7d, 0x02, 0xed,
-	0x82, 0xb0, 0x62, 0x29, 0x07, 0xef, 0x2d, 0x68, 0x03, 0x42, 0xaf, 0x01, 0xe9, 0xaf, 0x28, 0xa3,
-	0x25, 0xe1, 0x71, 0x52, 0xca, 0xfb, 0xd2, 0x56, 0x79, 0x0d, 0xc3, 0x29, 0xcb, 0x0b, 0x4e, 0x84,
-	0x20, 0xa9, 0x76, 0xc6, 0xec, 0xca, 0xf8, 0xdf, 0xd6, 0x5e, 0xaf, 0x37, 0x4e, 0xc1, 0x1f, 0x0e,
-	0xdc, 0xbf, 0xd1, 0x61, 0x4f, 0x3f, 0x9d, 0xff, 0xda, 0xcf, 0xc6, 0xfe, 0x7e, 0x22, 0x68, 0xca,
-	0x91, 0xf7, 0xdd, 0x91, 0x3b, 0x76, 0x71, 0xd3, 0xde, 0xf9, 0x8c, 0xa6, 0x59, 0x62, 0xc8, 0x6a,
-	0x61, 0x2b, 0xa2, 0x7b, 0xd0, 0xce, 0x68, 0x5a, 0x94, 0x5c, 0xf1, 0xe2, 0x62, 0x23, 0x05, 0x33,
-	0xe8, 0x4c, 0x59, 0x55, 0x48, 0xea, 0xee, 0x42, 0x2b, 0xa3, 0x29, 0xb9, 0x56, 0x73, 0xdb, 0xc3,
-	0x5a, 0x40, 0x13, 0x68, 0xe7, 0xaa, 0x04, 0x95, 0xc7, 0xdb, 0x59, 0x31, 0xc8, 0xe0, 0x21, 0xf4,
-	0xcf, 0x58, 0x95, 0x2c, 0x48, 0xfa, 0x32, 0x33, 0x91, 0x75, 0x07, 0x1d, 0x95, 0x94, 0x16, 0x82,
-	0x5f, 0x1c, 0xb8, 0x67, 0xce, 0xde, 0x9d, 0xb0, 0x47, 0xd0, 0x97, 0x98, 0x28, 0xd1, 0x66, 0xd3,
-	0x90, 0x6e, 0x68, 0xe0, 0xd8, 0x93, 0x56, 0x9b, 0xf7, 0x13, 0x38, 0x30, 0x3d, 0xb4, 0xf0, 0xce,
-	0x0e, 0x7c, 0xa0, 0xed, 0xd6, 0xe1, 0x53, 0xe8, 0x1b, 0x07, 0x9d, 0x95, 0xde, 0x22, 0x83, 0xb0,
-	0x9e, 0x33, 0xf6, 0x34, 0x44, 0x09, 0xc1, 0xcf, 0x0e, 0xc0, 0x9b, 0x93, 0xd9, 0xd9, 0x74, 0x11,
-	0xd3, 0x0b, 0x82, 0xfe, 0x0f, 0x3d, 0x95, 0x5e, 0xed, 0xd6, 0x76, 0xa5, 0xe2, 0x6b, 0x79, 0x73,
-	0x8f, 0x01, 0x04, 0x4f, 0xa2, 0x39, 0x39, 0x67, 0x9c, 0x98, 0x1d, 0xdb, 0x13, 0x3c, 0x79, 0xae,
-	0x14, 0xd2, 0x57, 0x9a, 0xe3, 0xf3, 0x92, 0x70, 0xb3, 0x67, 0xbb, 0x82, 0x27, 0x27, 0x52, 0x46,
-	0xef, 0x80, 0x57, 0xc5, 0xa2, 0xb4, 0xce, 0x4d, 0xbd, 0x86, 0xa5, 0xca, 0x78, 0x1f, 0x83, 0x92,
-	0x8c, 0x7b, 0x4b, 0x07, 0x97, 0x1a, 0xe5, 0x1f, 0x7c, 0x09, 0x47, 0x9b, 0x34, 0xc5, 0x2c, 0xbe,
-	0x24, 0xdc, 0x52, 0xfa, 0x3e, 0x74, 0x12, 0xad, 0x56, 0x5d, 0xf0, 0x26, 0x5e, 0xb8, 0x81, 0x62,
-	0x6b, 0x0b, 0xfe, 0x76, 0xe0, 0x60, 0xb6, 0x60, 0x25, 0x25, 0x42, 0x60, 0x92, 0x30, 0x9e, 0xa2,
-	0xf7, 0x60, 0xa0, 0x2e, 0x07, 0x8d, 0x97, 0x11, 0x67, 0x4b, 0x5b, 0x71, 0xdf, 0x2a, 0x31, 0x5b,
-	0x12, 0xd9, 0x62, 0x69, 0x93, 0xd3, 0xaa, 0x5a, 0xac, 0x84, 0xf5, 0x66, 0x73, 0x6b, 0x9b, 0x0d,
-	0x41, 0x53, 0x72, 0x65, 0x8a, 0x53, 0xdf, 0xe8, 0x73, 0xe8, 0x26, 0xac, 0x92, 0xf1, 0x84, 0xb9,
-	0xb7, 0xc7, 0xe1, 0x76, 0x16, 0xb2, 0x97, 0xca, 0xae, 0x77, 0xfa, 0x1a, 0x3e, 0xfc, 0x02, 0x06,
-	0x5b, 0xa6, 0xfa, 0x1e, 0x6f, 0xed, 0xd9, 0xe3, 0xad, 0xfa, 0x1e, 0x7f, 0x01, 0x47, 0xf6, 0x98,
-	0xdd, 0x11, 0xfc, 0x08, 0x3a, 0x5c, 0x9d, 0x6c, 0xf9, 0x3a, 0xdc, 0xc9, 0x08, 0x5b, 0x7b, 0xf0,
-	0x21, 0x78, 0x72, 0x4c, 0x5e, 0x65, 0x42, 0x3d, 0x95, 0xb5, 0xe7, 0x4d, 0xdf, 0x24, 0x2b, 0x06,
-	0x3f, 0x39, 0xe0, 0xd7, 0x90, 0xfa, 0xa8, 0x53, 0x22, 0x44, 0x7c, 0x41, 0xd0, 0xb3, 0xfa, 0x25,
-	0xf1, 0x26, 0x0f, 0xc3, 0x9b, 0x90, 0xca, 0x60, 0x78, 0xd0, 0x2e, 0xc3, 0x97, 0x00, 0x1b, 0xe5,
-	0x9e, 0x97, 0x2c, 0xa8, 0x33, 0xe0, 0x4d, 0xfa, 0x5b, 0xb1, 0x6b, 0x7c, 0x7c, 0x0f, 0xbd, 0x19,
-	0xa1, 0xf2, 0x8d, 0xa5, 0xe5, 0x86, 0x36, 0x19, 0xa8, 0x61, 0x60, 0x72, 0xb5, 0xcb, 0x72, 0x08,
-	0x2d, 0x75, 0xaf, 0x7b, 0x78, 0x2d, 0xd7, 0x2b, 0x77, 0xb7, 0x2b, 0xff, 0xd3, 0x81, 0xa3, 0xa9,
-	0x86, 0xad, 0x0f, 0xb0, 0x4c, 0x7f, 0x07, 0xb7, 0x84, 0xd5, 0x45, 0xf3, 0x55, 0x94, 0xc6, 0x2b,
-	0xc3, 0xc1, 0xe3, 0xf0, 0x06, 0x9f, 0x70, 0xad, 0x78, 0xbe, 0x7a, 0x11, 0xaf, 0xcc, 0x3b, 0x2f,
-	0xb6, 0x94, 0xc3, 0x53, 0xb8, 0xb3, 0x07, 0xb6, 0x67, 0x3e, 0x46, 0xdb, 0xec, 0xc0, 0x26, 0x7a,
-	0x9d, 0x9b, 0xdf, 0x1c, 0x38, 0xdc, 0x1d, 0x92, 0x77, 0xa1, 0xbd, 0x20, 0x71, 0x4a, 0xb8, 0x0a,
-	0xe7, 0x4d, 0x7a, 0xeb, 0x3f, 0x11, 0x6c, 0x0c, 0xe8, 0x99, 0xe4, 0x8b, 0x96, 0x6b, 0xbe, 0xbc,
-	0xc9, 0x83, 0x70, 0x27, 0x4c, 0x38, 0x35, 0x80, 0xf5, 0x6c, 0x6b, 0x51, 0xcf, 0x76, 0xcd, 0xf4,
-	0x6f, 0xff, 0x28, 0xfd, 0x5a, 0xbe, 0xf3, 0xb6, 0xfa, 0x27, 0x7c, 0xfa, 0x4f, 0x00, 0x00, 0x00,
-	0xff, 0xff, 0x0a, 0xd8, 0x24, 0x36, 0x1f, 0x0a, 0x00, 0x00,
+	// 1232 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x56, 0xcd, 0x72, 0x1b, 0x45,
+	0x10, 0xae, 0xd5, 0xbf, 0x7a, 0x25, 0x3b, 0x99, 0x98, 0x58, 0x11, 0xe5, 0x20, 0x96, 0x10, 0x0c,
+	0x09, 0x1b, 0x4a, 0xb9, 0x40, 0xb8, 0xc4, 0xb1, 0x49, 0xc5, 0x07, 0x03, 0x35, 0x4e, 0xe0, 0xb8,
+	0x35, 0xd6, 0xb6, 0xed, 0x05, 0xed, 0xac, 0x6a, 0x66, 0x57, 0xb6, 0x5e, 0x86, 0x1b, 0x55, 0x40,
+	0x15, 0xc5, 0x81, 0x17, 0xe0, 0x69, 0xb8, 0xf0, 0x12, 0xd4, 0xfc, 0x49, 0x2b, 0xd5, 0x3a, 0x70,
+	0xd9, 0xda, 0xee, 0xfe, 0x7a, 0xe6, 0xeb, 0x9f, 0xe9, 0x19, 0xe8, 0xcc, 0xce, 0xc2, 0x99, 0xc8,
+	0xf2, 0x2c, 0xf8, 0xbb, 0x06, 0x9d, 0x13, 0xcc, 0x59, 0xcc, 0x72, 0x46, 0x06, 0xd0, 0x9e, 0xa3,
+	0x90, 0x49, 0xc6, 0x07, 0xde, 0xc8, 0xdb, 0x6f, 0x52, 0x27, 0x12, 0x02, 0x8d, 0x4b, 0x26, 0x2f,
+	0x07, 0xb5, 0x91, 0xb7, 0xdf, 0xa5, 0xfa, 0x9f, 0xdc, 0x07, 0x10, 0x38, 0xcb, 0x64, 0x92, 0x67,
+	0x62, 0x31, 0xa8, 0x6b, 0x4b, 0x49, 0x43, 0x1e, 0xc2, 0xf6, 0x19, 0x5e, 0x24, 0x3c, 0x2a, 0x78,
+	0x72, 0x1d, 0xe5, 0x49, 0x8a, 0x83, 0xc6, 0xc8, 0xdb, 0xaf, 0xd3, 0xbe, 0x56, 0xbf, 0xe1, 0xc9,
+	0xf5, 0xeb, 0x24, 0x45, 0x12, 0x40, 0x1f, 0x79, 0x5c, 0x42, 0x35, 0x35, 0xca, 0x47, 0x1e, 0x2f,
+	0x31, 0x03, 0x68, 0x4f, 0xb2, 0x34, 0x4d, 0x72, 0x39, 0x68, 0x19, 0x66, 0x56, 0x24, 0xf7, 0xa0,
+	0x23, 0x0a, 0x6e, 0x1c, 0xdb, 0xda, 0xb1, 0x2d, 0x0a, 0xae, 0x9d, 0x5e, 0xc1, 0x6d, 0x67, 0x8a,
+	0x66, 0x28, 0xa2, 0x24, 0xc7, 0x74, 0xd0, 0x19, 0xd5, 0xf7, 0xfd, 0xf1, 0x5e, 0xe8, 0x82, 0x0e,
+	0xa9, 0x41, 0x7f, 0x8b, 0xe2, 0x38, 0xc7, 0xf4, 0x2b, 0x9e, 0x8b, 0x05, 0xdd, 0x12, 0x6b, 0xca,
+	0xe1, 0x01, 0xdc, 0xa9, 0x80, 0x91, 0x5b, 0x50, 0xff, 0x11, 0x17, 0x3a, 0x57, 0x5d, 0xaa, 0x7e,
+	0xc9, 0x0e, 0x34, 0xe7, 0x6c, 0x5a, 0xa0, 0x4e, 0x94, 0x47, 0x8d, 0xf0, 0xac, 0xf6, 0xb9, 0x17,
+	0x3c, 0x85, 0xdd, 0x17, 0x85, 0xe0, 0x71, 0x76, 0xc5, 0x4f, 0x67, 0x4c, 0x48, 0x3c, 0x61, 0xb9,
+	0x48, 0xae, 0x69, 0x76, 0x65, 0x82, 0x9b, 0x16, 0x29, 0x97, 0x03, 0x6f, 0x54, 0xdf, 0xef, 0x53,
+	0x27, 0x06, 0xbf, 0x79, 0xb0, 0x53, 0xe5, 0xa5, 0xea, 0xc1, 0x59, 0x8a, 0x76, 0x6b, 0xfd, 0x4f,
+	0x1e, 0xc0, 0x16, 0x2f, 0xd2, 0x33, 0x14, 0x51, 0x76, 0x1e, 0x89, 0xec, 0x4a, 0x6a, 0x12, 0x4d,
+	0xda, 0x33, 0xda, 0x6f, 0xce, 0x69, 0x76, 0x25, 0xc9, 0x27, 0x70, 0x7b, 0x85, 0x72, 0xdb, 0xd6,
+	0x35, 0x70, 0xdb, 0x01, 0x0f, 0x8d, 0x9a, 0x3c, 0x86, 0x86, 0x5e, 0xa7, 0xa1, 0x73, 0x36, 0x08,
+	0x6f, 0x08, 0x80, 0x6a, 0x54, 0xf0, 0x47, 0x6d, 0x15, 0xe2, 0x01, 0x67, 0xd3, 0x85, 0x4c, 0x24,
+	0x45, 0x59, 0x4c, 0x73, 0x49, 0x46, 0xe0, 0x5f, 0x08, 0xc6, 0x8b, 0x29, 0x13, 0x49, 0xbe, 0xb0,
+	0xdd, 0x55, 0x56, 0x91, 0x21, 0x74, 0x24, 0x4b, 0x67, 0xd3, 0x84, 0x5f, 0x58, 0xde, 0x4b, 0x99,
+	0x3c, 0x81, 0xf6, 0x4c, 0x64, 0x3f, 0xe0, 0x24, 0xd7, 0x4c, 0xfd, 0xf1, 0x3b, 0xd5, 0x54, 0x1c,
+	0x8a, 0x3c, 0x82, 0xe6, 0x79, 0x32, 0x45, 0xc7, 0xfc, 0x06, 0xb8, 0xc1, 0x90, 0x4f, 0xa1, 0x35,
+	0xc3, 0x6c, 0x36, 0x55, 0x8d, 0xf7, 0x16, 0xb4, 0x05, 0x91, 0x63, 0x20, 0xe6, 0x2f, 0x4a, 0x78,
+	0x8e, 0x82, 0x4d, 0x72, 0x75, 0x5e, 0x5a, 0x9a, 0xd7, 0x30, 0x3c, 0xcc, 0xd2, 0x99, 0x40, 0x29,
+	0x31, 0x36, 0xce, 0x34, 0xbb, 0xb2, 0xfe, 0xb7, 0x8d, 0xd7, 0xf1, 0xca, 0x29, 0xf8, 0xd3, 0x83,
+	0x7b, 0x37, 0x3a, 0x54, 0xd4, 0xd3, 0xfb, 0xbf, 0xf5, 0xac, 0x55, 0xd7, 0x93, 0x40, 0x43, 0xb5,
+	0xfc, 0xa0, 0x3e, 0xaa, 0xef, 0xd7, 0x69, 0xc3, 0x9d, 0xf9, 0x84, 0xc7, 0xc9, 0xc4, 0x26, 0xab,
+	0x49, 0x9d, 0x48, 0xee, 0x42, 0x2b, 0xe1, 0xf1, 0x2c, 0x17, 0x3a, 0x2f, 0x75, 0x6a, 0xa5, 0xe0,
+	0x14, 0xda, 0x87, 0x59, 0x31, 0x53, 0xa9, 0xdb, 0x81, 0x66, 0xc2, 0x63, 0xbc, 0xd6, 0x7d, 0xdb,
+	0xa5, 0x46, 0x20, 0x63, 0x68, 0xa5, 0x3a, 0x04, 0xcd, 0xe3, 0xed, 0x59, 0xb1, 0xc8, 0xe0, 0x01,
+	0xf4, 0x5e, 0x67, 0xc5, 0xe4, 0x12, 0xe3, 0x97, 0x89, 0x5d, 0xd9, 0x54, 0xd0, 0xd3, 0xa4, 0x8c,
+	0x10, 0xfc, 0xe2, 0xc1, 0x5d, 0xbb, 0xf7, 0x66, 0x87, 0x3d, 0x82, 0x9e, 0xc2, 0x44, 0x13, 0x63,
+	0xb6, 0x05, 0xe9, 0x84, 0x16, 0x4e, 0x7d, 0x65, 0x75, 0xbc, 0x9f, 0xc0, 0x96, 0xad, 0xa1, 0x83,
+	0xb7, 0x37, 0xe0, 0x7d, 0x63, 0x77, 0x0e, 0x9f, 0x41, 0xcf, 0x3a, 0x18, 0x56, 0x66, 0x8a, 0xf4,
+	0xc3, 0x32, 0x67, 0xea, 0x1b, 0x88, 0x16, 0x82, 0x9f, 0x3d, 0x80, 0x37, 0x07, 0xa7, 0xaf, 0x0f,
+	0x2f, 0x19, 0xbf, 0x40, 0xf2, 0x2e, 0x74, 0x35, 0xbd, 0xd2, 0xa9, 0xed, 0x28, 0xc5, 0xd7, 0xea,
+	0xe4, 0xee, 0x01, 0x48, 0x31, 0x89, 0xce, 0xf0, 0x3c, 0x13, 0x68, 0x67, 0x6c, 0x57, 0x8a, 0xc9,
+	0x0b, 0xad, 0x50, 0xbe, 0xca, 0xcc, 0xce, 0x73, 0x14, 0x76, 0xce, 0x76, 0xa4, 0x98, 0x1c, 0x28,
+	0x99, 0xbc, 0x07, 0x7e, 0xc1, 0x64, 0xee, 0x9c, 0x1b, 0x66, 0x0c, 0x2b, 0x95, 0xf5, 0xde, 0x03,
+	0x2d, 0x59, 0xf7, 0xa6, 0x59, 0x5c, 0x69, 0xb4, 0x7f, 0xf0, 0x1c, 0x76, 0x57, 0x34, 0xe5, 0x29,
+	0x9b, 0xa3, 0x70, 0x29, 0xfd, 0x10, 0xda, 0x13, 0xa3, 0xd6, 0x55, 0xf0, 0xc7, 0x7e, 0xb8, 0x82,
+	0x52, 0x67, 0x0b, 0xfe, 0xf1, 0x60, 0xeb, 0xf4, 0x32, 0xcb, 0x39, 0x4a, 0x49, 0x71, 0x92, 0x89,
+	0x98, 0x7c, 0x00, 0x7d, 0x7d, 0x38, 0x38, 0x9b, 0x46, 0x22, 0x9b, 0xba, 0x88, 0x7b, 0x4e, 0x49,
+	0xb3, 0x29, 0xaa, 0x12, 0x2b, 0x9b, 0xea, 0x56, 0x5d, 0x62, 0x2d, 0x2c, 0x27, 0x5b, 0xbd, 0x34,
+	0xd9, 0x08, 0x34, 0x54, 0xae, 0x6c, 0x70, 0xfa, 0x9f, 0x7c, 0x01, 0x9d, 0x49, 0x56, 0xa8, 0xf5,
+	0xa4, 0x3d, 0xb7, 0x7b, 0xe1, 0x3a, 0x0b, 0x55, 0x4b, 0x6d, 0x37, 0x33, 0x7d, 0x09, 0x1f, 0x7e,
+	0x09, 0xfd, 0x35, 0x53, 0x79, 0x8e, 0x37, 0x2b, 0xe6, 0x78, 0xb3, 0x3c, 0xc7, 0x8f, 0x60, 0xd7,
+	0x6d, 0xb3, 0xd9, 0x82, 0x1f, 0x43, 0x5b, 0xe8, 0x9d, 0x5d, 0xbe, 0xb6, 0x37, 0x18, 0x51, 0x67,
+	0x0f, 0x3e, 0x02, 0x5f, 0xb5, 0xc9, 0xab, 0x44, 0xea, 0xab, 0xb2, 0x74, 0xbd, 0x99, 0x93, 0xe4,
+	0xc4, 0xe0, 0x27, 0x0f, 0x06, 0x25, 0xa4, 0xd9, 0xea, 0x04, 0xa5, 0x64, 0x17, 0x48, 0x9e, 0x95,
+	0x0f, 0x89, 0x3f, 0x7e, 0x10, 0xde, 0x84, 0xd4, 0x06, 0x9b, 0x07, 0xe3, 0x32, 0x7c, 0x09, 0xb0,
+	0x52, 0x56, 0xdc, 0x64, 0x41, 0x39, 0x03, 0xfe, 0xb8, 0xb7, 0xb6, 0x76, 0x29, 0x1f, 0x53, 0x68,
+	0x1d, 0xe1, 0xfc, 0x88, 0x6d, 0x04, 0xb1, 0x76, 0x47, 0xef, 0x40, 0x93, 0xc5, 0x31, 0xc6, 0x2e,
+	0x9b, 0x5a, 0x50, 0x78, 0x81, 0x69, 0x36, 0xc7, 0xd8, 0xde, 0x3f, 0x4e, 0xd4, 0x2b, 0xe9, 0xe6,
+	0x8a, 0x75, 0xc9, 0x9b, 0xae, 0xd7, 0xe2, 0x40, 0x42, 0xfb, 0x88, 0x2d, 0x8e, 0x70, 0x2e, 0xc9,
+	0x43, 0x68, 0xc4, 0x38, 0x77, 0xb1, 0x93, 0xd0, 0xea, 0x43, 0xf5, 0x31, 0x91, 0x6a, 0xfb, 0xf0,
+	0x39, 0x74, 0x97, 0xaa, 0x8a, 0x4a, 0xef, 0xad, 0xc7, 0xd9, 0x0e, 0x4d, 0x34, 0xe5, 0x10, 0x7f,
+	0xf5, 0xe0, 0x8e, 0x5a, 0x62, 0xb3, 0xde, 0x63, 0x35, 0x4e, 0x17, 0x8e, 0xc1, 0xfd, 0xb0, 0x02,
+	0xa3, 0x58, 0x2d, 0xd9, 0xb0, 0x85, 0x54, 0x67, 0x39, 0xc6, 0x79, 0x64, 0xa6, 0x66, 0x4d, 0xd7,
+	0xba, 0x13, 0xe3, 0xfc, 0x58, 0xc9, 0xc3, 0x03, 0xe8, 0x2e, 0xf1, 0x15, 0x54, 0xef, 0xaf, 0x53,
+	0xed, 0xb8, 0x90, 0xcb, 0x5c, 0xbf, 0x87, 0xee, 0x29, 0x72, 0xf5, 0xe4, 0xe1, 0xf9, 0xaa, 0x8b,
+	0xd5, 0x22, 0x35, 0x0b, 0x53, 0x37, 0xad, 0x2a, 0x0c, 0xf2, 0x5c, 0x3a, 0x06, 0x4e, 0x2e, 0xd7,
+	0xb0, 0xbe, 0xde, 0x88, 0x7f, 0x79, 0xb0, 0x7b, 0x68, 0x60, 0xcb, 0x0d, 0x5c, 0x22, 0xbe, 0x83,
+	0x5b, 0xd2, 0xe9, 0xa2, 0xb3, 0x45, 0x14, 0xb3, 0x85, 0x4d, 0xca, 0xe3, 0xf0, 0x06, 0x9f, 0x70,
+	0xa9, 0x78, 0xb1, 0x38, 0x62, 0x0b, 0xfb, 0xec, 0x92, 0x6b, 0xca, 0xe1, 0x09, 0xdc, 0xa9, 0x80,
+	0x55, 0x64, 0x66, 0xb4, 0x9e, 0x19, 0x58, 0xad, 0x5e, 0xce, 0xcd, 0xef, 0x1e, 0x6c, 0x6f, 0xd6,
+	0xf0, 0x7d, 0x68, 0x5d, 0x22, 0x8b, 0x51, 0xe8, 0xe5, 0xfc, 0x71, 0x77, 0xf9, 0x30, 0xa4, 0xd6,
+	0x40, 0x9e, 0xa9, 0x7c, 0xf1, 0x7c, 0x99, 0x2f, 0x55, 0xea, 0xcd, 0x32, 0x1f, 0x5a, 0xc0, 0x72,
+	0xd4, 0x18, 0xd1, 0x8c, 0x9a, 0x92, 0xe9, 0xbf, 0x9e, 0x8c, 0xbd, 0x12, 0xdf, 0xb3, 0x96, 0x7e,
+	0xa2, 0x3f, 0xfd, 0x37, 0x00, 0x00, 0xff, 0xff, 0xc2, 0x05, 0x8d, 0xb5, 0xae, 0x0b, 0x00, 0x00,
 }
 }

+ 16 - 0
internal/pb/pb.proto

@@ -107,6 +107,22 @@ message FileHistoryResultMessage {
     map<string, FileHistory> files = 1;
     map<string, FileHistory> files = 1;
 }
 }
 
 
+message DevDay {
+    int32 commits = 1;
+    int32 added = 2;
+    int32 removed = 3;
+    int32 changed = 4;
+}
+
+message DayDevs {
+    map<int32, DevDay> devs = 1;
+}
+
+message DevsAnalysisResults {
+    map<int32, DayDevs> days = 1;
+    repeated string dev_index = 2;
+}
+
 message Sentiment {
 message Sentiment {
     float value = 1;
     float value = 1;
     repeated string comments = 2;
     repeated string comments = 2;

File diff suppressed because it is too large
+ 342 - 104
internal/pb/pb_pb2.py


+ 2 - 2
internal/plumbing/uast/uast.go

@@ -571,8 +571,8 @@ func (saver *ChangesSaver) serializeBinary(result []*pb.UASTChange, writer io.Wr
 	if err != nil {
 	if err != nil {
 		return err
 		return err
 	}
 	}
-	writer.Write(serialized)
-	return nil
+	_, err = writer.Write(serialized)
+	return err
 }
 }
 
 
 func init() {
 func init() {

+ 226 - 34
labours.py

@@ -1,5 +1,8 @@
 #!/usr/bin/env python3
 #!/usr/bin/env python3
 import argparse
 import argparse
+from collections import defaultdict, namedtuple
+from datetime import datetime, timedelta
+from importlib import import_module
 import io
 import io
 import json
 import json
 import os
 import os
@@ -11,8 +14,7 @@ import tempfile
 import threading
 import threading
 import time
 import time
 import warnings
 import warnings
-from datetime import datetime, timedelta
-from importlib import import_module
+
 
 
 try:
 try:
     from clint.textui import progress
     from clint.textui import progress
@@ -28,13 +30,6 @@ if sys.version_info[0] < 3:
     input = raw_input  # noqa: F821
     input = raw_input  # noqa: F821
 
 
 
 
-PB_MESSAGES = {
-    "Burndown": "internal.pb.pb_pb2.BurndownAnalysisResults",
-    "Couples": "internal.pb.pb_pb2.CouplesAnalysisResults",
-    "Shotness": "internal.pb.pb_pb2.ShotnessAnalysisResults",
-}
-
-
 def list_matplotlib_styles():
 def list_matplotlib_styles():
     script = "import sys; from matplotlib import pyplot; " \
     script = "import sys; from matplotlib import pyplot; " \
              "sys.stdout.write(repr(pyplot.style.available))"
              "sys.stdout.write(repr(pyplot.style.available))"
@@ -64,8 +59,9 @@ def parse_args():
                         help="Occupy 100%% height for every measurement.")
                         help="Occupy 100%% height for every measurement.")
     parser.add_argument("--couples-tmp-dir", help="Temporary directory to work with couples.")
     parser.add_argument("--couples-tmp-dir", help="Temporary directory to work with couples.")
     parser.add_argument("-m", "--mode",
     parser.add_argument("-m", "--mode",
-                        choices=["project", "file", "person", "churn_matrix", "ownership",
-                                 "couples", "shotness", "sentiment", "all", "run_times"],
+                        choices=["burndown-project", "burndown-file", "burndown-person",
+                                 "churn-matrix", "ownership", "couples", "shotness", "sentiment",
+                                 "devs", "all", "run-times"],
                         help="What to plot.")
                         help="What to plot.")
     parser.add_argument(
     parser.add_argument(
         "--resample", default="year",
         "--resample", default="year",
@@ -121,6 +117,12 @@ class Reader(object):
     def get_shotness(self):
     def get_shotness(self):
         raise NotImplementedError
         raise NotImplementedError
 
 
+    def get_sentiment(self):
+        raise NotImplementedError
+
+    def get_devs(self):
+        raise NotImplementedError
+
 
 
 class YamlReader(Reader):
 class YamlReader(Reader):
     def read(self, file):
     def read(self, file):
@@ -224,6 +226,12 @@ class YamlReader(Reader):
             "Value": float(vals[0])
             "Value": float(vals[0])
         } for key, vals in self.data["Sentiment"].items()})
         } for key, vals in self.data["Sentiment"].items()})
 
 
+    def get_devs(self):
+        people = self.data["Devs"]["people"]
+        days = {int(d): {int(dev): DevDay(*(int(x) for x in day)) for dev, day in devs.items()}
+                for d, devs in self.data["Devs"]["days"].items()}
+        return days, people
+
     def _parse_burndown_matrix(self, matrix):
     def _parse_burndown_matrix(self, matrix):
         return numpy.array([numpy.fromstring(line, dtype=int, sep=" ")
         return numpy.array([numpy.fromstring(line, dtype=int, sep=" ")
                             for line in matrix.split("\n")])
                             for line in matrix.split("\n")])
@@ -336,6 +344,13 @@ class ProtobufReader(Reader):
             raise KeyError
             raise KeyError
         return byday
         return byday
 
 
+    def get_devs(self):
+        people = list(self.contents["Devs"].dev_index)
+        days = {d: {dev: DevDay(stats.commits, stats.added, stats.removed, stats.changed)
+                    for dev, stats in day.devs.items()}
+                for d, day in self.contents["Devs"].days.items()}
+        return days, people
+
     def _parse_burndown_matrix(self, matrix):
     def _parse_burndown_matrix(self, matrix):
         dense = numpy.zeros((matrix.number_of_rows, matrix.number_of_columns), dtype=int)
         dense = numpy.zeros((matrix.number_of_rows, matrix.number_of_columns), dtype=int)
         for y, row in enumerate(matrix.rows):
         for y, row in enumerate(matrix.rows):
@@ -350,6 +365,12 @@ class ProtobufReader(Reader):
 
 
 
 
 READERS = {"yaml": YamlReader, "yml": YamlReader, "pb": ProtobufReader}
 READERS = {"yaml": YamlReader, "yml": YamlReader, "pb": ProtobufReader}
+PB_MESSAGES = {
+    "Burndown": "internal.pb.pb_pb2.BurndownAnalysisResults",
+    "Couples": "internal.pb.pb_pb2.CouplesAnalysisResults",
+    "Shotness": "internal.pb.pb_pb2.ShotnessAnalysisResults",
+    "Devs": "internal.pb.pb_pb2.DevsAnalysisResults",
+}
 
 
 
 
 def read_input(args):
 def read_input(args):
@@ -366,6 +387,9 @@ def read_input(args):
     return reader
     return reader
 
 
 
 
+DevDay = namedtuple("DevDay", ("Commits", "Added", "Removed", "Changed"))
+
+
 def calculate_average_lifetime(matrix):
 def calculate_average_lifetime(matrix):
     lifetimes = numpy.zeros(matrix.shape[1] - 1)
     lifetimes = numpy.zeros(matrix.shape[1] - 1)
     for band in matrix:
     for band in matrix:
@@ -717,7 +741,6 @@ def plot_burndown(args, target, name, matrix, date_range_sampling, labels, granu
         legend_loc = 3
         legend_loc = 3
     else:
     else:
         legend_loc = 2
         legend_loc = 2
-    pyplot.style.use("ggplot")
     legend = pyplot.legend(loc=legend_loc, fontsize=args.font_size)
     legend = pyplot.legend(loc=legend_loc, fontsize=args.font_size)
     pyplot.ylabel("Lines of code")
     pyplot.ylabel("Lines of code")
     pyplot.xlabel("Time")
     pyplot.xlabel("Time")
@@ -1086,12 +1109,12 @@ def show_shotness_stats(data):
         print("%8d  %s:%s [%s]" % (count, r.file, r.name, r.internal_role))
         print("%8d  %s:%s [%s]" % (count, r.file, r.name, r.internal_role))
 
 
 
 
-def show_sentiment_stats(args, name, resample, start, data):
+def show_sentiment_stats(args, name, resample, start_date, data):
     matplotlib, pyplot = import_pyplot(args.backend, args.style)
     matplotlib, pyplot = import_pyplot(args.backend, args.style)
 
 
-    start = datetime.fromtimestamp(start)
+    start_date = datetime.fromtimestamp(start_date)
     data = sorted(data.items())
     data = sorted(data.items())
-    xdates = [start + timedelta(days=d[0]) for d in data]
+    xdates = [start_date + timedelta(days=d[0]) for d in data]
     xpos = []
     xpos = []
     ypos = []
     ypos = []
     xneg = []
     xneg = []
@@ -1152,6 +1175,168 @@ def show_sentiment_stats(args, name, resample, start, data):
     deploy_plot(title, args.output, args.style)
     deploy_plot(title, args.output, args.style)
 
 
 
 
+def show_devs(args, name, start_date, end_date, data):
+    try:
+        from fastdtw import fastdtw
+    except ImportError as e:
+        print("Cannot import fastdtw: %s\nInstall it from https://github.com/slaypni/fastdtw" % e)
+        sys.exit(1)
+    try:
+        from ortools.constraint_solver import pywrapcp, routing_enums_pb2
+    except ImportError as e:
+        print("Cannot import ortools: %s\nInstall it from "
+              "https://developers.google.com/optimization/install/python/" % e)
+        sys.exit(1)
+    try:
+        from hdbscan import HDBSCAN
+    except ImportError as e:
+        print("Cannot import ortools: %s\nInstall it from "
+              "https://developers.google.com/optimization/install/python/" % e)
+        sys.exit(1)
+    from scipy.signal import convolve, slepian
+
+    days, people = data
+    max_people = 50
+    if len(people) > max_people:
+        print("Picking top 100 developers by commit count")
+        # pick top N developers by commit count
+        commits = defaultdict(int)
+        for devs in days.values():
+            for dev, stats in devs.items():
+                commits[dev] += stats.Commits
+        commits = sorted(((v, k) for k, v in commits.items()), reverse=True)
+        chosen_people = {people[k] for _, k in commits[:max_people]}
+    else:
+        chosen_people = set(people)
+    devseries = defaultdict(list)
+    for day, devs in sorted(days.items()):
+        for dev, stats in devs.items():
+            if people[dev] in chosen_people:
+                devseries[dev].append((day, stats.Commits))
+    print("Calculating the distance matrix")
+    # max-normalize the time series using a sliding window
+    keys = list(devseries.keys())
+    series = list(devseries.values())
+    for i, s in enumerate(series):
+        arr = numpy.array(s).transpose().astype(numpy.float32)
+        commits = arr[1]
+        if len(commits) < 7:
+            commits /= commits.max()
+        else:
+            # 4 is sizeof(float32)
+            windows = numpy.lib.stride_tricks.as_strided(commits, [len(commits) - 6, 7], [4, 4])
+            commits = numpy.concatenate((
+                [windows[0, 0] / windows[0].max(),
+                 windows[0, 1] / windows[0].max(),
+                 windows[0, 2] / windows[0].max()],
+                windows[:, 3] / windows.max(axis=1),
+                [windows[-1, 4] / windows[-1].max(),
+                 windows[-1, 5] / windows[-1].max(),
+                 windows[-1, 6] / windows[-1].max()]
+            ))
+        arr[1] = commits * 7  # 7 is a pure heuristic here and is not related to window size
+        series[i] = list(arr.transpose())
+    # calculate the distance matrix using dynamic time warping metric
+    dists = numpy.full((len(series)+1, len(series)+1), -100500, dtype=numpy.float32)
+    for x in range(len(series)):
+        dists[x, x] = 0
+        for y in range(x + 1, len(series)):
+            # L1 norm
+            dist, _ = fastdtw(series[x], series[y], radius=5, dist=1)
+            dists[x, y] = dists[y, x] = dist
+    # preparation for seriation ordering
+    dists[len(series), :] = 0
+    dists[:, len(series)] = 0
+    assert (dists >= 0).all()
+    print("Ordering the series")
+    # solve the TSP on the distance matrix
+    routing = pywrapcp.RoutingModel(dists.shape[0], 1, len(series))
+
+    def dist_callback(x, y):
+        # ortools wants integers, so we approximate here
+        return int(dists[x][y] * 1000)
+
+    routing.SetArcCostEvaluatorOfAllVehicles(dist_callback)
+    search_parameters = pywrapcp.RoutingModel.DefaultSearchParameters()
+    search_parameters.local_search_metaheuristic = (
+        routing_enums_pb2.LocalSearchMetaheuristic.GUIDED_LOCAL_SEARCH)
+    search_parameters.time_limit_ms = 2000
+    assignment = routing.SolveWithParameters(search_parameters)
+    index = routing.Start(0)
+    route = []
+    while not routing.IsEnd(index):
+        node = routing.IndexToNode(index)
+        if node < len(keys):
+            route.append(node)
+        index = assignment.Value(routing.NextVar(index))
+    route_map = {v: i for i, v in enumerate(route)}
+
+    # determine clusters
+    opt_dist_chain = numpy.cumsum(numpy.array(
+        [0] + [dists[route[i], route[i + 1]] for i in range(len(route)-1)]))
+    clusters = HDBSCAN(min_cluster_size=2).fit_predict(opt_dist_chain[:, numpy.newaxis])
+    route = [keys[node] for node in route]
+
+    print("Plotting")
+    # smooth time series
+    start_date = datetime.fromtimestamp(start_date)
+    start_date = datetime(start_date.year, start_date.month, start_date.day)
+    end_date = datetime.fromtimestamp(end_date)
+    end_date = datetime(end_date.year, end_date.month, end_date.day)
+    size = (end_date - start_date).days + 1
+    plot_x = [start_date + timedelta(days=i) for i in range(size)]
+    resolution = 64
+    window = slepian(size // resolution, 0.5)
+    series = list(devseries.values())
+    final = numpy.zeros((len(devseries), size), dtype=numpy.float32)
+    for i, s in enumerate(series):
+        arr = numpy.array(s).transpose()
+        full_history = numpy.zeros(size, dtype=numpy.float32)
+        full_history[arr[0]] = arr[1]
+        final[route_map[i]] = convolve(full_history, window, "same")
+
+    matplotlib, pyplot = import_pyplot(args.backend, args.style)
+    prop_cycle = pyplot.rcParams["axes.prop_cycle"]
+    colors = prop_cycle.by_key()["color"]
+    fig, axes = pyplot.subplots(final.shape[0], 1)
+    for ax, series, cluster, dev_i in zip(axes, final, clusters, route):
+        if cluster >= 0:
+            color = colors[cluster % len(colors)]
+        else:
+            # outlier
+            color = "grey"
+        ax.plot(plot_x, series, color=color)
+        ax.set_axis_off()
+        author = people[dev_i]
+        ax.text(0.03, 0.5, author[:36] + (author[36:] and "..."),
+                horizontalalignment="right", verticalalignment="center",
+                transform=ax.transAxes, fontsize=14)
+        ax.text(0.97, 0.5, sum(p[1] for p in devseries[dev_i]),
+                horizontalalignment="left", verticalalignment="center",
+                transform=ax.transAxes, fontsize=14)
+    axes[-1].set_axis_on()
+    target_num_labels = 12
+    num_months = (end_date.year - start_date.year) * 12 + end_date.month - start_date.month
+    interval = int(numpy.ceil(num_months / target_num_labels))
+    if interval >= 8:
+        interval = int(numpy.ceil(num_months / (12 * target_num_labels)))
+        axes[-1].xaxis.set_major_locator(matplotlib.dates.YearLocator(interval=interval))
+        axes[-1].xaxis.set_major_formatter(matplotlib.dates.DateFormatter("%Y"))
+    else:
+        axes[-1].xaxis.set_major_locator(matplotlib.dates.MonthLocator(interval=interval))
+        axes[-1].xaxis.set_major_formatter(matplotlib.dates.DateFormatter("%Y-%m"))
+    for tick in axes[-1].xaxis.get_major_ticks():
+        tick.label.set_fontsize(16)
+    axes[-1].spines["left"].set_visible(False)
+    axes[-1].spines["right"].set_visible(False)
+    axes[-1].spines["top"].set_visible(False)
+    axes[-1].get_yaxis().set_visible(False)
+    axes[-1].set_facecolor((1.0,) * 3 + (0.0,))
+
+    title = "%s commits" % name
+    deploy_plot(title, args.output, args.style)
+
+
 def main():
 def main():
     args = parse_args()
     args = parse_args()
     reader = read_input(args)
     reader = read_input(args)
@@ -1169,6 +1354,7 @@ def main():
     shotness_warning = "Structural hotness stats were not collected. Re-run hercules with " \
     shotness_warning = "Structural hotness stats were not collected. Re-run hercules with " \
                        "--shotness. Also check --languages - the output may be empty."
                        "--shotness. Also check --languages - the output may be empty."
     sentiment_warning = "Sentiment stats were not collected. Re-run hercules with --sentiment."
     sentiment_warning = "Sentiment stats were not collected. Re-run hercules with --sentiment."
+    devs_warning = "Devs stats were not collected. Re-run hercules with --devs."
 
 
     def run_times():
     def run_times():
         rt = reader.get_run_times()
         rt = reader.get_run_times()
@@ -1262,25 +1448,30 @@ def main():
             return
             return
         show_sentiment_stats(args, reader.get_name(), args.resample, reader.get_header()[0], data)
         show_sentiment_stats(args, reader.get_name(), args.resample, reader.get_header()[0], data)
 
 
-    if args.mode == "run_times":
-        run_times()
-    elif args.mode == "project":
-        project_burndown()
-    elif args.mode == "file":
-        files_burndown()
-    elif args.mode == "person":
-        people_burndown()
-    elif args.mode == "churn_matrix":
-        churn_matrix()
-    elif args.mode == "ownership":
-        ownership_burndown()
-    elif args.mode == "couples":
-        couples()
-    elif args.mode == "shotness":
-        shotness()
-    elif args.mode == "sentiment":
-        sentiment()
-    elif args.mode == "all":
+    def devs():
+        try:
+            data = reader.get_devs()
+        except KeyError:
+            print(devs_warning)
+            return
+        show_devs(args, reader.get_name(), *reader.get_header(), data)
+
+    modes = {
+        "run-times": run_times,
+        "burndown-project": project_burndown,
+        "burndown-file": files_burndown,
+        "burndown-person": people_burndown,
+        "churn-matrix": churn_matrix,
+        "ownership": ownership_burndown,
+        "couples": couples,
+        "shotness": shotness,
+        "sentiment": sentiment,
+        "devs": devs,
+    }
+    try:
+        modes[args.mode]()
+    except KeyError:
+        assert args.mode == "all"
         project_burndown()
         project_burndown()
         files_burndown()
         files_burndown()
         people_burndown()
         people_burndown()
@@ -1289,6 +1480,7 @@ def main():
         couples()
         couples()
         shotness()
         shotness()
         sentiment()
         sentiment()
+        devs()
 
 
     if web_server.running:
     if web_server.running:
         secs = int(os.getenv("COUPLES_SERVER_TIME", "60"))
         secs = int(os.getenv("COUPLES_SERVER_TIME", "60"))

+ 2 - 2
leaves/burndown.go

@@ -865,8 +865,8 @@ func (analyser *BurndownAnalysis) serializeBinary(result *BurndownResult, writer
 	if err != nil {
 	if err != nil {
 		return err
 		return err
 	}
 	}
-	writer.Write(serialized)
-	return nil
+	_, err = writer.Write(serialized)
+	return err
 }
 }
 
 
 func sortedKeys(m map[string]DenseHistory) []string {
 func sortedKeys(m map[string]DenseHistory) []string {

+ 86 - 3
leaves/burndown_test.go

@@ -342,7 +342,7 @@ func TestBurndownConsumeFinalize(t *testing.T) {
 	}
 	}
 }
 }
 
 
-func TestBurndownSerialize(t *testing.T) {
+func bakeBurndownForSerialization(t *testing.T, firstAuthor, secondAuthor int) BurndownResult {
 	burndown := BurndownAnalysis{
 	burndown := BurndownAnalysis{
 		Granularity:  30,
 		Granularity:  30,
 		Sampling:     30,
 		Sampling:     30,
@@ -352,7 +352,7 @@ func TestBurndownSerialize(t *testing.T) {
 	burndown.Initialize(test.Repository)
 	burndown.Initialize(test.Repository)
 	deps := map[string]interface{}{}
 	deps := map[string]interface{}{}
 	// stage 1
 	// stage 1
-	deps[identity.DependencyAuthor] = 0
+	deps[identity.DependencyAuthor] = firstAuthor
 	deps[items.DependencyDay] = 0
 	deps[items.DependencyDay] = 0
 	cache := map[plumbing.Hash]*items.CachedBlob{}
 	cache := map[plumbing.Hash]*items.CachedBlob{}
 	AddHash(t, cache, "291286b4ac41952cbd1389fda66420ec03c1a9fe")
 	AddHash(t, cache, "291286b4ac41952cbd1389fda66420ec03c1a9fe")
@@ -413,7 +413,7 @@ func TestBurndownSerialize(t *testing.T) {
 
 
 	// stage 2
 	// stage 2
 	// 2b1ed978194a94edeabbca6de7ff3b5771d4d665
 	// 2b1ed978194a94edeabbca6de7ff3b5771d4d665
-	deps[identity.DependencyAuthor] = 1
+	deps[identity.DependencyAuthor] = secondAuthor
 	deps[items.DependencyDay] = 30
 	deps[items.DependencyDay] = 30
 	cache = map[plumbing.Hash]*items.CachedBlob{}
 	cache = map[plumbing.Hash]*items.CachedBlob{}
 	AddHash(t, cache, "291286b4ac41952cbd1389fda66420ec03c1a9fe")
 	AddHash(t, cache, "291286b4ac41952cbd1389fda66420ec03c1a9fe")
@@ -481,6 +481,12 @@ func TestBurndownSerialize(t *testing.T) {
 	burndown.reversedPeopleDict = people[:]
 	burndown.reversedPeopleDict = people[:]
 	burndown.Consume(deps)
 	burndown.Consume(deps)
 	out := burndown.Finalize().(BurndownResult)
 	out := burndown.Finalize().(BurndownResult)
+	return out
+}
+
+func TestBurndownSerialize(t *testing.T) {
+	out := bakeBurndownForSerialization(t, 0, 1)
+	burndown := &BurndownAnalysis{}
 
 
 	buffer := &bytes.Buffer{}
 	buffer := &bytes.Buffer{}
 	burndown.Serialize(out, false, buffer)
 	burndown.Serialize(out, false, buffer)
@@ -557,6 +563,83 @@ func TestBurndownSerialize(t *testing.T) {
 	assert.Equal(t, msg.PeopleInteraction.Indptr, indptr[:])
 	assert.Equal(t, msg.PeopleInteraction.Indptr, indptr[:])
 }
 }
 
 
+func TestBurndownSerializeAuthorMissing(t *testing.T) {
+	out := bakeBurndownForSerialization(t, 0, identity.AuthorMissing)
+	burndown := &BurndownAnalysis{}
+
+	buffer := &bytes.Buffer{}
+	burndown.Serialize(out, false, buffer)
+	assert.Equal(t, buffer.String(), `  granularity: 30
+  sampling: 30
+  "project": |-
+    1145    0
+     464  369
+  files:
+    "burndown.go": |-
+      926   0
+      293 250
+    "cmd/hercules/main.go": |-
+      207   0
+      171 119
+  people_sequence:
+    - "one@srcd"
+    - "two@srcd"
+  people:
+    "one@srcd": |-
+      1145    0
+       464    0
+    "two@srcd": |-
+      0 0
+      0 0
+  people_interaction: |-
+    1145 -681    0    0
+       0    0    0    0
+`)
+	buffer = &bytes.Buffer{}
+	burndown.Serialize(out, true, buffer)
+	msg := pb.BurndownAnalysisResults{}
+	proto.Unmarshal(buffer.Bytes(), &msg)
+	assert.Equal(t, msg.Granularity, int32(30))
+	assert.Equal(t, msg.Sampling, int32(30))
+	assert.Equal(t, msg.Project.Name, "project")
+	assert.Equal(t, msg.Project.NumberOfRows, int32(2))
+	assert.Equal(t, msg.Project.NumberOfColumns, int32(2))
+	assert.Len(t, msg.Project.Rows, 2)
+	assert.Len(t, msg.Project.Rows[0].Columns, 1)
+	assert.Equal(t, msg.Project.Rows[0].Columns[0], uint32(1145))
+	assert.Len(t, msg.Project.Rows[1].Columns, 2)
+	assert.Equal(t, msg.Project.Rows[1].Columns[0], uint32(464))
+	assert.Equal(t, msg.Project.Rows[1].Columns[1], uint32(369))
+	assert.Len(t, msg.Files, 2)
+	assert.Equal(t, msg.Files[0].Name, "burndown.go")
+	assert.Equal(t, msg.Files[1].Name, "cmd/hercules/main.go")
+	assert.Len(t, msg.Files[0].Rows, 2)
+	assert.Len(t, msg.Files[0].Rows[0].Columns, 1)
+	assert.Equal(t, msg.Files[0].Rows[0].Columns[0], uint32(926))
+	assert.Len(t, msg.Files[0].Rows[1].Columns, 2)
+	assert.Equal(t, msg.Files[0].Rows[1].Columns[0], uint32(293))
+	assert.Equal(t, msg.Files[0].Rows[1].Columns[1], uint32(250))
+	assert.Len(t, msg.People, 2)
+	assert.Equal(t, msg.People[0].Name, "one@srcd")
+	assert.Equal(t, msg.People[1].Name, "two@srcd")
+	assert.Len(t, msg.People[0].Rows, 2)
+	assert.Len(t, msg.People[0].Rows[0].Columns, 1)
+	assert.Len(t, msg.People[0].Rows[1].Columns, 1)
+	assert.Equal(t, msg.People[0].Rows[0].Columns[0], uint32(1145))
+	assert.Equal(t, msg.People[0].Rows[1].Columns[0], uint32(464))
+	assert.Len(t, msg.People[1].Rows, 2)
+	assert.Len(t, msg.People[1].Rows[0].Columns, 0)
+	assert.Len(t, msg.People[1].Rows[1].Columns, 0)
+	assert.Equal(t, msg.PeopleInteraction.NumberOfRows, int32(2))
+	assert.Equal(t, msg.PeopleInteraction.NumberOfColumns, int32(4))
+	data := [...]int64{1145, -681}
+	assert.Equal(t, msg.PeopleInteraction.Data, data[:])
+	indices := [...]int32{0, 1}
+	assert.Equal(t, msg.PeopleInteraction.Indices, indices[:])
+	indptr := [...]int64{0, 2, 2}
+	assert.Equal(t, msg.PeopleInteraction.Indptr, indptr[:])
+}
+
 type panickingCloser struct {
 type panickingCloser struct {
 }
 }
 
 

+ 2 - 2
leaves/couples.go

@@ -478,8 +478,8 @@ func (couples *CouplesAnalysis) serializeBinary(result *CouplesResult, writer io
 	if err != nil {
 	if err != nil {
 		return err
 		return err
 	}
 	}
-	writer.Write(serialized)
-	return nil
+	_, err = writer.Write(serialized)
+	return err
 }
 }
 
 
 // currentFiles return the list of files in the last consumed commit.
 // currentFiles return the list of files in the last consumed commit.

+ 99 - 0
leaves/couples_test.go

@@ -189,6 +189,105 @@ func TestCouplesConsumeFinalize(t *testing.T) {
 	assert.Equal(t, cr.FilesMatrix[2][2], int64(3))
 	assert.Equal(t, cr.FilesMatrix[2][2], int64(3))
 }
 }
 
 
+func TestCouplesConsumeFinalizeAuthorMissing(t *testing.T) {
+	c := fixtureCouples()
+	deps := map[string]interface{}{}
+	deps[identity.DependencyAuthor] = 0
+	deps[core.DependencyCommit], _ = test.Repository.CommitObject(gitplumbing.NewHash(
+		"a3ee37f91f0d705ec9c41ae88426f0ae44b2fbc3"))
+	deps[core.DependencyIsMerge] = false
+	deps[plumbing.DependencyTreeChanges] = generateChanges("+LICENSE2", "+file2.go", "+rbtree2.go")
+	c.Consume(deps)
+	deps[plumbing.DependencyTreeChanges] = generateChanges("+README.md", "-LICENSE2", "=analyser.go", ">file2.go>file_test.go")
+	c.Consume(deps)
+	deps[identity.DependencyAuthor] = 1
+	deps[plumbing.DependencyTreeChanges] = generateChanges("=README.md", "=analyser.go", "-rbtree2.go")
+	c.Consume(deps)
+	deps[identity.DependencyAuthor] = identity.AuthorMissing
+	deps[plumbing.DependencyTreeChanges] = generateChanges("=file_test.go")
+	c.Consume(deps)
+	assert.Equal(t, len(c.people[0]), 6)
+	assert.Equal(t, c.people[0]["README.md"], 1)
+	assert.Equal(t, c.people[0]["LICENSE2"], 2)
+	assert.Equal(t, c.people[0]["analyser.go"], 1)
+	assert.Equal(t, c.people[0]["file2.go"], 1)
+	assert.Equal(t, c.people[0]["file_test.go"], 1)
+	assert.Equal(t, c.people[0]["rbtree2.go"], 1)
+	assert.Equal(t, len(c.people[1]), 3)
+	assert.Equal(t, c.people[1]["README.md"], 1)
+	assert.Equal(t, c.people[1]["analyser.go"], 1)
+	assert.Equal(t, c.people[1]["rbtree2.go"], 1)
+	assert.Equal(t, len(c.people[2]), 0)
+	assert.Equal(t, len(c.files["README.md"]), 3)
+	assert.Equal(t, c.files["README.md"], map[string]int{
+		"README.md":    2,
+		"analyser.go":  2,
+		"file_test.go": 1,
+	})
+	assert.Equal(t, c.files["LICENSE2"], map[string]int{
+		"LICENSE2":   1,
+		"file2.go":   1,
+		"rbtree2.go": 1,
+	})
+	assert.Equal(t, c.files["file2.go"], map[string]int{
+		"LICENSE2":   1,
+		"file2.go":   1,
+		"rbtree2.go": 1,
+	})
+	assert.Equal(t, c.files["rbtree2.go"], map[string]int{
+		"LICENSE2":   1,
+		"file2.go":   1,
+		"rbtree2.go": 1,
+	})
+	assert.Equal(t, c.files["analyser.go"], map[string]int{
+		"analyser.go":  2,
+		"README.md":    2,
+		"file_test.go": 1,
+	})
+	assert.Equal(t, c.files["file_test.go"], map[string]int{
+		"file_test.go": 2,
+		"README.md":    1,
+		"analyser.go":  1,
+	})
+	assert.Equal(t, c.peopleCommits[0], 2)
+	assert.Equal(t, c.peopleCommits[1], 1)
+	assert.Equal(t, c.peopleCommits[2], 0)
+	cr := c.Finalize().(CouplesResult)
+	assert.Equal(t, len(cr.Files), 3)
+	assert.Equal(t, cr.Files[0], "README.md")
+	assert.Equal(t, cr.Files[1], "analyser.go")
+	assert.Equal(t, cr.Files[2], "file_test.go")
+	assert.Equal(t, len(cr.PeopleFiles[0]), 3)
+	assert.Equal(t, cr.PeopleFiles[0][0], 0)
+	assert.Equal(t, cr.PeopleFiles[0][1], 1)
+	assert.Equal(t, cr.PeopleFiles[0][2], 2)
+	assert.Equal(t, len(cr.PeopleFiles[1]), 2)
+	assert.Equal(t, cr.PeopleFiles[1][0], 0)
+	assert.Equal(t, cr.PeopleFiles[1][1], 1)
+	assert.Equal(t, len(cr.PeopleFiles[2]), 0)
+	assert.Equal(t, len(cr.PeopleMatrix[0]), 3)
+	assert.Equal(t, cr.PeopleMatrix[0][0], int64(7))
+	assert.Equal(t, cr.PeopleMatrix[0][1], int64(3))
+	assert.Equal(t, cr.PeopleMatrix[0][2], int64(0))
+	assert.Equal(t, len(cr.PeopleMatrix[1]), 2)
+	assert.Equal(t, cr.PeopleMatrix[1][0], int64(3))
+	assert.Equal(t, cr.PeopleMatrix[1][1], int64(3))
+	assert.Equal(t, len(cr.PeopleMatrix[2]), 0)
+	assert.Equal(t, len(cr.FilesMatrix), 3)
+	assert.Equal(t, len(cr.FilesMatrix[0]), 3)
+	assert.Equal(t, cr.FilesMatrix[0][2], int64(1))
+	assert.Equal(t, cr.FilesMatrix[0][0], int64(2))
+	assert.Equal(t, cr.FilesMatrix[0][1], int64(2))
+	assert.Equal(t, len(cr.FilesMatrix[1]), 3)
+	assert.Equal(t, cr.FilesMatrix[1][2], int64(1))
+	assert.Equal(t, cr.FilesMatrix[1][0], int64(2))
+	assert.Equal(t, cr.FilesMatrix[1][1], int64(2))
+	assert.Equal(t, len(cr.FilesMatrix[2]), 3)
+	assert.Equal(t, cr.FilesMatrix[2][0], int64(1))
+	assert.Equal(t, cr.FilesMatrix[2][1], int64(1))
+	assert.Equal(t, cr.FilesMatrix[2][2], int64(3))
+}
+
 func TestCouplesFork(t *testing.T) {
 func TestCouplesFork(t *testing.T) {
 	couples1 := fixtureCouples()
 	couples1 := fixtureCouples()
 	clones := couples1.Fork(1)
 	clones := couples1.Fork(1)

+ 414 - 0
leaves/devs.go

@@ -0,0 +1,414 @@
+package leaves
+
+import (
+	"fmt"
+	"io"
+	"sort"
+	"unicode/utf8"
+
+	"github.com/gogo/protobuf/proto"
+	"github.com/sergi/go-diff/diffmatchpatch"
+	"gopkg.in/src-d/go-git.v4"
+	"gopkg.in/src-d/go-git.v4/plumbing"
+	"gopkg.in/src-d/go-git.v4/plumbing/object"
+	"gopkg.in/src-d/go-git.v4/utils/merkletrie"
+	"gopkg.in/src-d/hercules.v5/internal/core"
+	"gopkg.in/src-d/hercules.v5/internal/pb"
+	items "gopkg.in/src-d/hercules.v5/internal/plumbing"
+	"gopkg.in/src-d/hercules.v5/internal/plumbing/identity"
+	"gopkg.in/src-d/hercules.v5/internal/yaml"
+)
+
+// DevsAnalysis calculates the number of commits through time per developer.
+// It also records the numbers of added, deleted and changed lines through time per developer.
+type DevsAnalysis struct {
+	core.NoopMerger
+	core.OneShotMergeProcessor
+	// ConsiderEmptyCommits indicates whether empty commits (e.g., merges) should be taken
+	// into account.
+	ConsiderEmptyCommits bool
+
+	// days maps days to developers to stats
+	days map[int]map[int]*DevDay
+	// reversedPeopleDict references IdentityDetector.ReversedPeopleDict
+	reversedPeopleDict []string
+}
+
+// DevsResult is returned by DevsAnalysis.Finalize() and carries the daily statistics
+// per developer.
+type DevsResult struct {
+	// Days is <day index> -> <developer index> -> daily stats
+	Days map[int]map[int]*DevDay
+
+	// reversedPeopleDict references IdentityDetector.ReversedPeopleDict
+	reversedPeopleDict []string
+}
+
+// DevDay is the statistics for a development day and a particular developer.
+type DevDay struct {
+	// Commits is the number of commits made by a particular developer in a particular day.
+	Commits int
+	// Added is the number of added lines by a particular developer in a particular day.
+	Added   int
+	// Removed is the number of removed lines by a particular developer in a particular day.
+	Removed int
+	// Changed is the number of changed lines by a particular developer in a particular day.
+	Changed int
+}
+
+const (
+	// ConfigDevsConsiderEmptyCommits is the name of the option to set DevsAnalysis.ConsiderEmptyCommits.
+	ConfigDevsConsiderEmptyCommits = "Devs.ConsiderEmptyCommits"
+)
+
+// Name of this PipelineItem. Uniquely identifies the type, used for mapping keys, etc.
+func (devs *DevsAnalysis) Name() string {
+	return "Devs"
+}
+
+// Provides returns the list of names of entities which are produced by this PipelineItem.
+// Each produced entity will be inserted into `deps` of dependent Consume()-s according
+// to this list. Also used by core.Registry to build the global map of providers.
+func (devs *DevsAnalysis) Provides() []string {
+	return []string{}
+}
+
+// Requires returns the list of names of entities which are needed by this PipelineItem.
+// Each requested entity will be inserted into `deps` of Consume(). In turn, those
+// entities are Provides() upstream.
+func (devs *DevsAnalysis) Requires() []string {
+	arr := [...]string{
+		identity.DependencyAuthor, items.DependencyTreeChanges, items.DependencyFileDiff,
+		items.DependencyBlobCache, items.DependencyDay}
+	return arr[:]
+}
+
+// ListConfigurationOptions returns the list of changeable public properties of this PipelineItem.
+func (devs *DevsAnalysis) ListConfigurationOptions() []core.ConfigurationOption {
+	options := [...]core.ConfigurationOption{{
+		Name:        ConfigDevsConsiderEmptyCommits,
+		Description: "Take into account empty commits such as trivial merges.",
+		Flag:        "--empty-commits",
+		Type:        core.BoolConfigurationOption,
+		Default:     false}}
+	return options[:]
+}
+
+// Configure sets the properties previously published by ListConfigurationOptions().
+func (devs *DevsAnalysis) Configure(facts map[string]interface{}) {
+	if val, exists := facts[ConfigDevsConsiderEmptyCommits].(bool); exists {
+		devs.ConsiderEmptyCommits = val
+	}
+	if val, exists := facts[identity.FactIdentityDetectorReversedPeopleDict].([]string); exists {
+		devs.reversedPeopleDict = val
+	}
+}
+
+// Flag for the command line switch which enables this analysis.
+func (devs *DevsAnalysis) Flag() string {
+	return "devs"
+}
+
+// Description returns the text which explains what the analysis is doing.
+func (devs *DevsAnalysis) Description() string {
+	return "Calculates the number of commits, added, removed and changed lines per developer through time."
+}
+
+// Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
+// calls. The repository which is going to be analysed is supplied as an argument.
+func (devs *DevsAnalysis) Initialize(repository *git.Repository) {
+	devs.days = map[int]map[int]*DevDay{}
+	devs.OneShotMergeProcessor.Initialize()
+}
+
+// Consume runs this PipelineItem on the next commit data.
+// `deps` contain all the results from upstream PipelineItem-s as requested by Requires().
+// Additionally, DependencyCommit is always present there and represents the analysed *object.Commit.
+// This function returns the mapping with analysis results. The keys must be the same as
+// in Provides(). If there was an error, nil is returned.
+func (devs *DevsAnalysis) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
+	if !devs.ShouldConsumeCommit(deps) {
+		return nil, nil
+	}
+	author := deps[identity.DependencyAuthor].(int)
+	treeDiff := deps[items.DependencyTreeChanges].(object.Changes)
+	if len(treeDiff) == 0 && !devs.ConsiderEmptyCommits {
+		return nil, nil
+	}
+	day := deps[items.DependencyDay].(int)
+	devsDay, exists := devs.days[day]
+	if !exists {
+		devsDay = map[int]*DevDay{}
+		devs.days[day] = devsDay
+	}
+	dd, exists := devsDay[author]
+	if !exists {
+		dd = &DevDay{}
+		devsDay[author] = dd
+	}
+	dd.Commits++
+	cache := deps[items.DependencyBlobCache].(map[plumbing.Hash]*items.CachedBlob)
+	fileDiffs := deps[items.DependencyFileDiff].(map[string]items.FileDiffData)
+	for _, change := range treeDiff {
+		action, err := change.Action()
+		if err != nil {
+			return nil, err
+		}
+		switch action {
+		case merkletrie.Insert:
+			blob := cache[change.To.TreeEntry.Hash]
+			lines, err := blob.CountLines()
+			if err != nil {
+				// binary
+				continue
+			}
+			dd.Added += lines
+		case merkletrie.Delete:
+			blob := cache[change.From.TreeEntry.Hash]
+			lines, err := blob.CountLines()
+			if err != nil {
+				// binary
+				continue
+			}
+			dd.Removed += lines
+		case merkletrie.Modify:
+			thisDiffs := fileDiffs[change.To.Name]
+			var removedPending int
+			for _, edit := range thisDiffs.Diffs {
+				switch edit.Type {
+				case diffmatchpatch.DiffEqual:
+					if removedPending > 0 {
+						dd.Removed += removedPending
+					}
+					removedPending = 0
+				case diffmatchpatch.DiffInsert:
+					added := utf8.RuneCountInString(edit.Text)
+					if removedPending > added {
+						dd.Changed += added
+						dd.Removed += removedPending - added
+					} else {
+						dd.Changed += removedPending
+						dd.Added += added - removedPending
+					}
+					removedPending = 0
+				case diffmatchpatch.DiffDelete:
+					removedPending = utf8.RuneCountInString(edit.Text)
+				}
+			}
+			if removedPending > 0 {
+				dd.Removed += removedPending
+			}
+		}
+	}
+	return nil, nil
+}
+
+// Finalize returns the result of the analysis. Further Consume() calls are not expected.
+func (devs *DevsAnalysis) Finalize() interface{} {
+	return DevsResult{
+		Days: devs.days,
+		reversedPeopleDict: devs.reversedPeopleDict,
+	}
+}
+
+// Fork clones this pipeline item.
+func (devs *DevsAnalysis) Fork(n int) []core.PipelineItem {
+	return core.ForkSamePipelineItem(devs, n)
+}
+
+// Serialize converts the analysis result as returned by Finalize() to text or bytes.
+// The text format is YAML and the bytes format is Protocol Buffers.
+func (devs *DevsAnalysis) Serialize(result interface{}, binary bool, writer io.Writer) error {
+	devsResult := result.(DevsResult)
+	if binary {
+		return devs.serializeBinary(&devsResult, writer)
+	}
+	devs.serializeText(&devsResult, writer)
+	return nil
+}
+
+// Deserialize converts the specified protobuf bytes to DevsResult.
+func (devs *DevsAnalysis) Deserialize(pbmessage []byte) (interface{}, error) {
+	message := pb.DevsAnalysisResults{}
+	err := proto.Unmarshal(pbmessage, &message)
+	if err != nil {
+		return nil, err
+	}
+	days := map[int]map[int]*DevDay{}
+	for day, dd := range message.Days {
+		rdd := map[int]*DevDay{}
+		days[int(day)] = rdd
+		for dev, stats := range dd.Devs {
+			if dev == -1 {
+				dev = identity.AuthorMissing
+			}
+			rdd[int(dev)] = &DevDay{
+				Commits: int(stats.Commits),
+				Added:   int(stats.Added),
+				Removed: int(stats.Removed),
+				Changed: int(stats.Changed),
+			}
+		}
+	}
+	result := DevsResult{
+		Days: days,
+		reversedPeopleDict: message.DevIndex,
+	}
+	return result, nil
+}
+
+// MergeResults combines two DevsAnalysis-es together.
+func (devs *DevsAnalysis) MergeResults(r1, r2 interface{}, c1, c2 *core.CommonAnalysisResult) interface{} {
+	cr1 := r1.(DevsResult)
+	cr2 := r2.(DevsResult)
+	merged := DevsResult{}
+	type devIndexPair struct {
+		Index1 int
+		Index2 int
+	}
+	devIndex := map[string]devIndexPair{}
+	for dev, devName := range cr1.reversedPeopleDict {
+		devIndex[devName] = devIndexPair{Index1: dev+1, Index2: devIndex[devName].Index2}
+	}
+	for dev, devName := range cr2.reversedPeopleDict {
+		devIndex[devName] = devIndexPair{Index1: devIndex[devName].Index1, Index2: dev+1}
+	}
+	jointDevSeq := make([]string, len(devIndex))
+	{
+		i := 0
+		for dev := range devIndex {
+			jointDevSeq[i] = dev
+			i++
+		}
+	}
+	sort.Strings(jointDevSeq)
+	merged.reversedPeopleDict = jointDevSeq
+	invDevIndex1 := map[int]int{}
+	invDevIndex2 := map[int]int{}
+	for i, dev := range jointDevSeq {
+		pair := devIndex[dev]
+		if pair.Index1 > 0 {
+			invDevIndex1[pair.Index1-1] = i
+		}
+		if pair.Index2 > 0 {
+			invDevIndex2[pair.Index2-1] = i
+		}
+	}
+	newDays := map[int]map[int]*DevDay{}
+	merged.Days = newDays
+	for day, dd := range cr1.Days {
+		newdd, exists := newDays[day]
+		if !exists {
+			newdd = map[int]*DevDay{}
+			newDays[day] = newdd
+		}
+		for dev, stats := range dd {
+			newdev := dev
+			if newdev != identity.AuthorMissing {
+				newdev = invDevIndex1[dev]
+			}
+			newstats, exists := newdd[newdev]
+			if !exists {
+				newstats = &DevDay{}
+				newdd[newdev] = newstats
+			}
+			newstats.Commits += stats.Commits
+			newstats.Added += stats.Added
+			newstats.Removed += stats.Removed
+			newstats.Changed += stats.Changed
+		}
+	}
+	for day, dd := range cr2.Days {
+		newdd, exists := newDays[day]
+		if !exists {
+			newdd = map[int]*DevDay{}
+			newDays[day] = newdd
+		}
+		for dev, stats := range dd {
+			newdev := dev
+			if newdev != identity.AuthorMissing {
+				newdev = invDevIndex2[dev]
+			}
+			newstats, exists := newdd[newdev]
+			if !exists {
+				newstats = &DevDay{}
+				newdd[newdev] = newstats
+			}
+			newstats.Commits += stats.Commits
+			newstats.Added += stats.Added
+			newstats.Removed += stats.Removed
+			newstats.Changed += stats.Changed
+		}
+	}
+	return merged
+}
+
+func (devs *DevsAnalysis) serializeText(result *DevsResult, writer io.Writer) {
+	fmt.Fprintln(writer, "  days:")
+	days := make([]int, len(result.Days))
+	{
+		i := 0
+		for day := range result.Days {
+			days[i] = day
+			i++
+		}
+	}
+	sort.Ints(days)
+	for _, day := range days {
+		fmt.Fprintf(writer, "    %d:\n", day)
+		rday := result.Days[day]
+		devseq := make([]int, len(rday))
+		{
+			i := 0
+			for dev := range rday {
+				devseq[i] = dev
+				i++
+			}
+		}
+		sort.Ints(devseq)
+		for _, dev := range devseq {
+			stats := rday[dev]
+			if dev == identity.AuthorMissing {
+				dev = -1
+			}
+			fmt.Fprintf(writer, "      %d: [%d, %d, %d, %d]\n",
+				dev, stats.Commits, stats.Added, stats.Removed, stats.Changed)
+		}
+	}
+	fmt.Fprintln(writer, "  people:")
+	for _, person := range result.reversedPeopleDict {
+		fmt.Fprintf(writer, "  - %s\n", yaml.SafeString(person))
+	}
+}
+
+func (devs *DevsAnalysis) serializeBinary(result *DevsResult, writer io.Writer) error {
+	message := pb.DevsAnalysisResults{}
+	message.DevIndex = result.reversedPeopleDict
+	message.Days = map[int32]*pb.DayDevs{}
+	for day, devs := range result.Days {
+		dd := &pb.DayDevs{}
+		message.Days[int32(day)] = dd
+		dd.Devs = map[int32]*pb.DevDay{}
+		for dev, stats := range devs {
+			if dev == identity.AuthorMissing {
+				dev = -1
+			}
+			dd.Devs[int32(dev)] = &pb.DevDay{
+				Commits: int32(stats.Commits),
+				Added:   int32(stats.Added),
+				Changed: int32(stats.Changed),
+				Removed: int32(stats.Removed),
+			}
+		}
+	}
+	serialized, err := proto.Marshal(&message)
+	if err != nil {
+		return err
+	}
+	_, err = writer.Write(serialized)
+	return err
+}
+
+func init() {
+	core.Registry.Register(&DevsAnalysis{})
+}

+ 332 - 0
leaves/devs_test.go

@@ -0,0 +1,332 @@
+package leaves
+
+import (
+	"bytes"
+	"testing"
+
+	"github.com/gogo/protobuf/proto"
+	"github.com/stretchr/testify/assert"
+	gitplumbing "gopkg.in/src-d/go-git.v4/plumbing"
+	"gopkg.in/src-d/go-git.v4/plumbing/object"
+	"gopkg.in/src-d/hercules.v5/internal/core"
+	"gopkg.in/src-d/hercules.v5/internal/pb"
+	"gopkg.in/src-d/hercules.v5/internal/plumbing"
+	"gopkg.in/src-d/hercules.v5/internal/plumbing/identity"
+	"gopkg.in/src-d/hercules.v5/internal/test"
+	"gopkg.in/src-d/hercules.v5/internal/test/fixtures"
+)
+
+func fixtureDevs() *DevsAnalysis {
+	d := DevsAnalysis{}
+	d.Initialize(test.Repository)
+	people := [...]string{"one@srcd", "two@srcd"}
+	d.reversedPeopleDict = people[:]
+	return &d
+}
+
+func TestDevsMeta(t *testing.T) {
+	d := fixtureDevs()
+	assert.Equal(t, d.Name(), "Devs")
+	assert.Equal(t, len(d.Provides()), 0)
+	assert.Equal(t, len(d.Requires()), 5)
+	assert.Equal(t, d.Requires()[0], identity.DependencyAuthor)
+	assert.Equal(t, d.Requires()[1], plumbing.DependencyTreeChanges)
+	assert.Equal(t, d.Requires()[2], plumbing.DependencyFileDiff)
+	assert.Equal(t, d.Requires()[3], plumbing.DependencyBlobCache)
+	assert.Equal(t, d.Requires()[4], plumbing.DependencyDay)
+	assert.Equal(t, d.Flag(), "devs")
+	assert.Len(t, d.ListConfigurationOptions(), 1)
+	assert.Equal(t, d.ListConfigurationOptions()[0].Name, ConfigDevsConsiderEmptyCommits)
+	assert.Equal(t, d.ListConfigurationOptions()[0].Flag, "--empty-commits")
+	assert.Equal(t, d.ListConfigurationOptions()[0].Type, core.BoolConfigurationOption)
+	assert.Equal(t, d.ListConfigurationOptions()[0].Default, false)
+	assert.True(t, len(d.Description()) > 0)
+}
+
+func TestDevsRegistration(t *testing.T) {
+	summoned := core.Registry.Summon((&DevsAnalysis{}).Name())
+	assert.Len(t, summoned, 1)
+	assert.Equal(t, summoned[0].Name(), "Devs")
+	leaves := core.Registry.GetLeaves()
+	matched := false
+	for _, tp := range leaves {
+		if tp.Flag() == (&DevsAnalysis{}).Flag() {
+			matched = true
+			break
+		}
+	}
+	assert.True(t, matched)
+}
+
+func TestDevsConfigure(t *testing.T) {
+	devs := DevsAnalysis{}
+	facts := map[string]interface{}{}
+	facts[ConfigDevsConsiderEmptyCommits] = true
+	devs.Configure(facts)
+	assert.Equal(t, devs.ConsiderEmptyCommits, true)
+}
+
+func TestDevsInitialize(t *testing.T) {
+	d := fixtureDevs()
+	assert.NotNil(t, d.days)
+}
+
+func TestDevsConsumeFinalize(t *testing.T) {
+	devs := fixtureDevs()
+	deps := map[string]interface{}{}
+
+	// stage 1
+	deps[identity.DependencyAuthor] = 0
+	deps[plumbing.DependencyDay] = 0
+	cache := map[gitplumbing.Hash]*plumbing.CachedBlob{}
+	AddHash(t, cache, "291286b4ac41952cbd1389fda66420ec03c1a9fe")
+	AddHash(t, cache, "c29112dbd697ad9b401333b80c18a63951bc18d9")
+	AddHash(t, cache, "baa64828831d174f40140e4b3cfa77d1e917a2c1")
+	AddHash(t, cache, "dc248ba2b22048cc730c571a748e8ffcf7085ab9")
+	deps[plumbing.DependencyBlobCache] = cache
+	changes := make(object.Changes, 3)
+	treeFrom, _ := test.Repository.TreeObject(gitplumbing.NewHash(
+		"a1eb2ea76eb7f9bfbde9b243861474421000eb96"))
+	treeTo, _ := test.Repository.TreeObject(gitplumbing.NewHash(
+		"994eac1cd07235bb9815e547a75c84265dea00f5"))
+	changes[0] = &object.Change{From: object.ChangeEntry{
+		Name: "analyser.go",
+		Tree: treeFrom,
+		TreeEntry: object.TreeEntry{
+			Name: "analyser.go",
+			Mode: 0100644,
+			Hash: gitplumbing.NewHash("dc248ba2b22048cc730c571a748e8ffcf7085ab9"),
+		},
+	}, To: object.ChangeEntry{
+		Name: "analyser.go",
+		Tree: treeTo,
+		TreeEntry: object.TreeEntry{
+			Name: "analyser.go",
+			Mode: 0100644,
+			Hash: gitplumbing.NewHash("baa64828831d174f40140e4b3cfa77d1e917a2c1"),
+		},
+	}}
+	changes[1] = &object.Change{From: object.ChangeEntry{}, To: object.ChangeEntry{
+		Name: "cmd/hercules/main.go",
+		Tree: treeTo,
+		TreeEntry: object.TreeEntry{
+			Name: "cmd/hercules/main.go",
+			Mode: 0100644,
+			Hash: gitplumbing.NewHash("c29112dbd697ad9b401333b80c18a63951bc18d9"),
+		},
+	},
+	}
+	changes[2] = &object.Change{From: object.ChangeEntry{}, To: object.ChangeEntry{
+		Name: ".travis.yml",
+		Tree: treeTo,
+		TreeEntry: object.TreeEntry{
+			Name: ".travis.yml",
+			Mode: 0100644,
+			Hash: gitplumbing.NewHash("291286b4ac41952cbd1389fda66420ec03c1a9fe"),
+		},
+	},
+	}
+	deps[plumbing.DependencyTreeChanges] = changes
+	fd := fixtures.FileDiff()
+	result, err := fd.Consume(deps)
+	assert.Nil(t, err)
+	deps[plumbing.DependencyFileDiff] = result[plumbing.DependencyFileDiff]
+	deps[core.DependencyCommit], _ = test.Repository.CommitObject(gitplumbing.NewHash(
+		"cce947b98a050c6d356bc6ba95030254914027b1"))
+	deps[core.DependencyIsMerge] = false
+	result, err = devs.Consume(deps)
+	assert.Nil(t, result)
+	assert.Nil(t, err)
+	assert.Len(t, devs.days, 1)
+	day := devs.days[0]
+	assert.Len(t, day, 1)
+	dev := day[0]
+	assert.Equal(t, dev.Commits, 1)
+	assert.Equal(t, dev.Added, 847)
+	assert.Equal(t, dev.Removed, 9)
+	assert.Equal(t, dev.Changed, 67)
+
+	deps[identity.DependencyAuthor] = 1
+	result, err = devs.Consume(deps)
+	assert.Nil(t, result)
+	assert.Nil(t, err)
+	assert.Len(t, devs.days, 1)
+	day = devs.days[0]
+	assert.Len(t, day, 2)
+	for i := 0; i < 2; i++ {
+		dev = day[i]
+		assert.Equal(t, dev.Commits, 1)
+		assert.Equal(t, dev.Added, 847)
+		assert.Equal(t, dev.Removed, 9)
+		assert.Equal(t, dev.Changed, 67)
+	}
+
+	result, err = devs.Consume(deps)
+	assert.Nil(t, result)
+	assert.Nil(t, err)
+	assert.Len(t, devs.days, 1)
+	day = devs.days[0]
+	assert.Len(t, day, 2)
+	dev = day[0]
+	assert.Equal(t, dev.Commits, 1)
+	assert.Equal(t, dev.Added, 847)
+	assert.Equal(t, dev.Removed, 9)
+	assert.Equal(t, dev.Changed, 67)
+	dev = day[1]
+	assert.Equal(t, dev.Commits, 2)
+	assert.Equal(t, dev.Added, 847*2)
+	assert.Equal(t, dev.Removed, 9*2)
+	assert.Equal(t, dev.Changed, 67*2)
+
+	deps[plumbing.DependencyDay] = 1
+	result, err = devs.Consume(deps)
+	assert.Nil(t, result)
+	assert.Nil(t, err)
+	assert.Len(t, devs.days, 2)
+	day = devs.days[0]
+	assert.Len(t, day, 2)
+	dev = day[0]
+	assert.Equal(t, dev.Commits, 1)
+	assert.Equal(t, dev.Added, 847)
+	assert.Equal(t, dev.Removed, 9)
+	assert.Equal(t, dev.Changed, 67)
+	dev = day[1]
+	assert.Equal(t, dev.Commits, 2)
+	assert.Equal(t, dev.Added, 847*2)
+	assert.Equal(t, dev.Removed, 9*2)
+	assert.Equal(t, dev.Changed, 67*2)
+	day = devs.days[1]
+	assert.Len(t, day, 1)
+	dev = day[1]
+	assert.Equal(t, dev.Commits, 1)
+	assert.Equal(t, dev.Added, 847)
+	assert.Equal(t, dev.Removed, 9)
+	assert.Equal(t, dev.Changed, 67)
+}
+
+func TestDevsFinalize(t *testing.T) {
+	devs := fixtureDevs()
+	devs.days[1] = map[int]*DevDay{}
+	devs.days[1][1] = &DevDay{10, 20, 30, 40}
+	x := devs.Finalize().(DevsResult)
+	assert.Equal(t, x.Days, devs.days)
+	assert.Equal(t, x.reversedPeopleDict, devs.reversedPeopleDict)
+}
+
+func TestDevsFork(t *testing.T) {
+	devs := fixtureDevs()
+	clone := devs.Fork(1)[0].(*DevsAnalysis)
+	assert.True(t, devs == clone)
+}
+
+func TestDevsSerialize(t *testing.T) {
+	devs := fixtureDevs()
+	devs.days[1] = map[int]*DevDay{}
+	devs.days[1][0] = &DevDay{10, 20, 30, 40}
+	devs.days[1][1] = &DevDay{1, 2, 3, 4}
+	devs.days[10] = map[int]*DevDay{}
+	devs.days[10][0] = &DevDay{11, 21, 31, 41}
+	devs.days[10][identity.AuthorMissing] = &DevDay{100, 200, 300, 400}
+	res := devs.Finalize().(DevsResult)
+	buffer := &bytes.Buffer{}
+	err := devs.Serialize(res, false, buffer)
+	assert.Nil(t, err)
+	assert.Equal(t, `  days:
+    1:
+      0: [10, 20, 30, 40]
+      1: [1, 2, 3, 4]
+    10:
+      0: [11, 21, 31, 41]
+      -1: [100, 200, 300, 400]
+  people:
+  - "one@srcd"
+  - "two@srcd"
+`, buffer.String())
+
+	buffer = &bytes.Buffer{}
+	err = devs.Serialize(res, true, buffer)
+	assert.Nil(t, err)
+	msg := pb.DevsAnalysisResults{}
+	proto.Unmarshal(buffer.Bytes(), &msg)
+	assert.Equal(t, msg.DevIndex, devs.reversedPeopleDict)
+	assert.Len(t, msg.Days, 2)
+	assert.Len(t, msg.Days[1].Devs, 2)
+	assert.Equal(t, msg.Days[1].Devs[0], &pb.DevDay{
+		Commits: 10, Added: 20, Removed: 30, Changed: 40})
+	assert.Equal(t, msg.Days[1].Devs[1], &pb.DevDay{
+		Commits: 1, Added: 2, Removed: 3, Changed: 4})
+	assert.Len(t, msg.Days[10].Devs, 2)
+	assert.Equal(t, msg.Days[10].Devs[0], &pb.DevDay{
+		Commits: 11, Added: 21, Removed: 31, Changed: 41})
+	assert.Equal(t, msg.Days[10].Devs[-1], &pb.DevDay{
+		Commits: 100, Added: 200, Removed: 300, Changed: 400})
+}
+
+func TestDevsDeserialize(t *testing.T) {
+	devs := fixtureDevs()
+	devs.days[1] = map[int]*DevDay{}
+	devs.days[1][0] = &DevDay{10, 20, 30, 40}
+	devs.days[1][1] = &DevDay{1, 2, 3, 4}
+	devs.days[10] = map[int]*DevDay{}
+	devs.days[10][0] = &DevDay{11, 21, 31, 41}
+	devs.days[10][identity.AuthorMissing] = &DevDay{100, 200, 300, 400}
+	res := devs.Finalize().(DevsResult)
+	buffer := &bytes.Buffer{}
+	err := devs.Serialize(res, true, buffer)
+	assert.Nil(t, err)
+	rawres2, err := devs.Deserialize(buffer.Bytes())
+	assert.Nil(t, err)
+	res2 := rawres2.(DevsResult)
+	assert.Equal(t, res, res2)
+}
+
+func TestDevsMergeResults(t *testing.T) {
+	people1 := [...]string{"1@srcd", "2@srcd"}
+	people2 := [...]string{"3@srcd", "1@srcd"}
+	r1 := DevsResult{
+		Days: map[int]map[int]*DevDay{},
+		reversedPeopleDict: people1[:],
+	}
+	r1.Days[1] = map[int]*DevDay{}
+	r1.Days[1][0] = &DevDay{10, 20, 30, 40}
+	r1.Days[1][1] = &DevDay{1, 2, 3, 4}
+	r1.Days[10] = map[int]*DevDay{}
+	r1.Days[10][0] = &DevDay{11, 21, 31, 41}
+	r1.Days[10][identity.AuthorMissing] = &DevDay{100, 200, 300, 400}
+	r1.Days[11] = map[int]*DevDay{}
+	r1.Days[11][1] = &DevDay{10, 20, 30, 40}
+	r2 := DevsResult{
+		Days: map[int]map[int]*DevDay{},
+		reversedPeopleDict: people2[:],
+	}
+	r2.Days[1] = map[int]*DevDay{}
+	r2.Days[1][0] = &DevDay{10, 20, 30, 40}
+	r2.Days[1][1] = &DevDay{1, 2, 3, 4}
+	r2.Days[2] = map[int]*DevDay{}
+	r2.Days[2][0] = &DevDay{11, 21, 31, 41}
+	r2.Days[2][identity.AuthorMissing] = &DevDay{100, 200, 300, 400}
+	r2.Days[10] = map[int]*DevDay{}
+	r2.Days[10][0] = &DevDay{11, 21, 31, 41}
+	r2.Days[10][identity.AuthorMissing] = &DevDay{100, 200, 300, 400}
+
+	devs := fixtureDevs()
+	rm := devs.MergeResults(r1, r2, nil, nil).(DevsResult)
+	peoplerm := [...]string{"1@srcd", "2@srcd", "3@srcd"}
+	assert.Equal(t, rm.reversedPeopleDict, peoplerm[:])
+	assert.Len(t, rm.Days, 4)
+	assert.Equal(t, rm.Days[11], map[int]*DevDay{1: {10, 20, 30, 40}})
+	assert.Equal(t, rm.Days[2], map[int]*DevDay{
+		identity.AuthorMissing: {100, 200, 300, 400},
+		2: {11, 21, 31, 41},
+	})
+	assert.Equal(t, rm.Days[1], map[int]*DevDay{
+		0: {11, 22, 33, 44},
+		1: {1, 2, 3, 4},
+		2: {10, 20, 30, 40},
+	})
+	assert.Equal(t, rm.Days[10], map[int]*DevDay{
+		0: {11, 21, 31, 41},
+		2: {11, 21, 31, 41},
+		identity.AuthorMissing: {100*2, 200*2, 300*2, 400*2},
+	})
+}

+ 2 - 2
leaves/file_history.go

@@ -163,8 +163,8 @@ func (history *FileHistory) serializeBinary(result *FileHistoryResult, writer io
 	if err != nil {
 	if err != nil {
 		return err
 		return err
 	}
 	}
-	writer.Write(serialized)
-	return nil
+	_, err = writer.Write(serialized)
+	return err
 }
 }
 
 
 func init() {
 func init() {

+ 2 - 2
leaves/shotness.go

@@ -437,8 +437,8 @@ func (shotness *ShotnessAnalysis) serializeBinary(result *ShotnessResult, writer
 	if err != nil {
 	if err != nil {
 		return err
 		return err
 	}
 	}
-	writer.Write(serialized)
-	return nil
+	_, err = writer.Write(serialized)
+	return err
 }
 }
 
 
 func (shotness *ShotnessAnalysis) extractNodes(root *uast.Node) (map[string]*uast.Node, error) {
 func (shotness *ShotnessAnalysis) extractNodes(root *uast.Node) (map[string]*uast.Node, error) {

+ 3 - 0
requirements.txt

@@ -6,3 +6,6 @@ PyYAML>=3.12,<4.0
 scipy>=0.19.0,<2.0
 scipy>=0.19.0,<2.0
 protobuf>=3.5.0,<4.0
 protobuf>=3.5.0,<4.0
 munch>=2.0
 munch>=2.0
+hdbscan==0.8.18
+ortools==6.9.5824
+fastdtw==0.3.2