Преглед на файлове

Add the docs about hercules-combine and fix some bugs

Vadim Markovtsev преди 7 години
родител
ревизия
c8e1339658
променени са 4 файла, в които са добавени 22 реда и са изтрити 7 реда
  1. 12 1
      README.md
  2. 6 4
      burndown.go
  3. 1 1
      burndown_test.go
  4. 3 1
      labours.py

+ 12 - 1
README.md

@@ -7,7 +7,8 @@ Powered by [go-git](https://github.com/src-d/go-git) and [Babelfish](https://doc
 There are two tools: `hercules` and `labours.py`. The first is the program
 There are two tools: `hercules` and `labours.py`. The first is the program
 written in Go which takes a Git repository and runs a Directed Acyclic Graph (DAG) of [analysis tasks](doc/PIPELINE_ITEMS.md).
 written in Go which takes a Git repository and runs a Directed Acyclic Graph (DAG) of [analysis tasks](doc/PIPELINE_ITEMS.md).
 The second is the Python script which draws some predefined plots. These two tools are normally used together through
 The second is the Python script which draws some predefined plots. These two tools are normally used together through
-a pipe. It is possible to write custom analyses using the plugin system.
+a pipe. It is possible to write custom analyses using the plugin system. It is also possible
+to merge several analysis results together.
 
 
 ![Hercules DAG of Burndown analysis](doc/dag.png)
 ![Hercules DAG of Burndown analysis](doc/dag.png)
 <p align="center">The DAG of burndown and couples analyses with UAST diff refining. Generated with <code>hercules -burndown -burndown-people -couples -feature=uast -dry-run -dump-dag doc/dag.dot https://github.com/src-d/hercules</code></p>
 <p align="center">The DAG of burndown and couples analyses with UAST diff refining. Generated with <code>hercules -burndown -burndown-people -couples -feature=uast -dry-run -dump-dag doc/dag.dot https://github.com/src-d/hercules</code></p>
@@ -190,6 +191,16 @@ python3 labours.py -m all
 
 
 Hercules has a plugin system and allows to run custom analyses. See [PLUGINS.md](PLUGINS.md).
 Hercules has a plugin system and allows to run custom analyses. See [PLUGINS.md](PLUGINS.md).
 
 
+### Merging
+
+`hercules-combine` is the tool which joins several analysis results in Protocol Buffers format together. 
+
+```
+hercules -burndown -pb https://github.com/src-d/go-git > go-git.pb
+hercules -burndown -pb https://github.com/src-d/hercules > hercules.pb
+hercules-combine go-git.pb hercules.pb | python3 labours.py -f pb -m project --resample M
+```
+
 ### Bad unicode errors
 ### Bad unicode errors
 
 
 YAML does not support the whole range of Unicode characters and the parser on `labours.py` side
 YAML does not support the whole range of Unicode characters and the parser on `labours.py` side

+ 6 - 4
burndown.go

@@ -260,6 +260,8 @@ func (analyser *BurndownAnalysis) Finalize() interface{} {
 		PeopleHistories:    analyser.peopleHistories,
 		PeopleHistories:    analyser.peopleHistories,
 		PeopleMatrix:       peopleMatrix,
 		PeopleMatrix:       peopleMatrix,
 		reversedPeopleDict: analyser.reversedPeopleDict,
 		reversedPeopleDict: analyser.reversedPeopleDict,
+		sampling:           analyser.Sampling,
+		granularity:        analyser.Granularity,
 	}
 	}
 }
 }
 
 
@@ -651,8 +653,8 @@ func addBurndownMatrix(matrix [][]int64, granularity, sampling int, daily [][]fl
 }
 }
 
 
 func (analyser *BurndownAnalysis) serializeText(result *BurndownResult, writer io.Writer) {
 func (analyser *BurndownAnalysis) serializeText(result *BurndownResult, writer io.Writer) {
-	fmt.Fprintln(writer, "  granularity:", analyser.Granularity)
-	fmt.Fprintln(writer, "  sampling:", analyser.Sampling)
+	fmt.Fprintln(writer, "  granularity:", result.granularity)
+	fmt.Fprintln(writer, "  sampling:", result.sampling)
 	yaml.PrintMatrix(writer, result.GlobalHistory, 2, "project", true)
 	yaml.PrintMatrix(writer, result.GlobalHistory, 2, "project", true)
 	if len(result.FileHistories) > 0 {
 	if len(result.FileHistories) > 0 {
 		fmt.Fprintln(writer, "  files:")
 		fmt.Fprintln(writer, "  files:")
@@ -678,8 +680,8 @@ func (analyser *BurndownAnalysis) serializeText(result *BurndownResult, writer i
 
 
 func (analyser *BurndownAnalysis) serializeBinary(result *BurndownResult, writer io.Writer) error {
 func (analyser *BurndownAnalysis) serializeBinary(result *BurndownResult, writer io.Writer) error {
 	message := pb.BurndownAnalysisResults{
 	message := pb.BurndownAnalysisResults{
-		Granularity: int32(analyser.Granularity),
-		Sampling:    int32(analyser.Sampling),
+		Granularity: int32(result.granularity),
+		Sampling:    int32(result.sampling),
 	}
 	}
 	if len(result.GlobalHistory) > 0 {
 	if len(result.GlobalHistory) > 0 {
 		message.Project = pb.ToBurndownSparseMatrix(result.GlobalHistory, "project")
 		message.Project = pb.ToBurndownSparseMatrix(result.GlobalHistory, "project")

+ 1 - 1
burndown_test.go

@@ -315,7 +315,7 @@ func TestBurndownConsumeFinalize(t *testing.T) {
 	}
 	}
 }
 }
 
 
-func TestBurndownAnalysisSerialize(t *testing.T) {
+func TestBurndownSerialize(t *testing.T) {
 	burndown := BurndownAnalysis{
 	burndown := BurndownAnalysis{
 		Granularity:  30,
 		Granularity:  30,
 		Sampling:     30,
 		Sampling:     30,

+ 3 - 1
labours.py

@@ -235,7 +235,7 @@ class ProtobufReader(Reader):
         return list(node.index), self._parse_sparse_matrix(node.matrix)
         return list(node.index), self._parse_sparse_matrix(node.matrix)
 
 
     def get_people_coocc(self):
     def get_people_coocc(self):
-        node = self.contents["Couples"].developer_couples
+        node = self.contents["Couples"].people_couples
         return list(node.index), self._parse_sparse_matrix(node.matrix)
         return list(node.index), self._parse_sparse_matrix(node.matrix)
 
 
     def _parse_burndown_matrix(self, matrix):
     def _parse_burndown_matrix(self, matrix):
@@ -281,6 +281,8 @@ def load_burndown(header, name, matrix, resample):
     import pandas
     import pandas
 
 
     start, last, sampling, granularity = header
     start, last, sampling, granularity = header
+    assert sampling > 0
+    assert granularity >= sampling
     start = datetime.fromtimestamp(start)
     start = datetime.fromtimestamp(start)
     last = datetime.fromtimestamp(last)
     last = datetime.fromtimestamp(last)
     print(name, "lifetime index:", calculate_average_lifetime(matrix))
     print(name, "lifetime index:", calculate_average_lifetime(matrix))