Ver código fonte

Merge pull request #262 from bobheadxi/master

Add pluggable Logger for pipeline
Vadim Markovtsev 6 anos atrás
pai
commit
a4d2b46b68
44 arquivos alterados com 554 adições e 102 exclusões
  1. 6 2
      Gopkg.lock
  2. 6 0
      cmd/hercules/plugin.template
  3. 6 0
      contrib/_plugin_example/churn_analysis.go
  4. 8 0
      core.go
  5. 9 0
      doc.go
  6. 91 0
      internal/core/logger.go
  7. 66 0
      internal/core/logger_test.go
  8. 36 13
      internal/core/pipeline.go
  9. 42 9
      internal/core/pipeline_test.go
  10. 18 11
      internal/plumbing/blob_cache.go
  11. 6 0
      internal/plumbing/diff.go
  12. 5 4
      internal/plumbing/diff_test.go
  13. 8 0
      internal/plumbing/identity/identity.go
  14. 5 0
      internal/plumbing/identity/identity_test.go
  15. 6 0
      internal/plumbing/languages.go
  16. 7 2
      internal/plumbing/languages_test.go
  17. 6 0
      internal/plumbing/line_stats.go
  18. 3 1
      internal/plumbing/line_stats_test.go
  19. 9 5
      internal/plumbing/renames.go
  20. 10 5
      internal/plumbing/renames_test.go
  21. 7 3
      internal/plumbing/ticks.go
  22. 8 10
      internal/plumbing/ticks_test.go
  23. 9 2
      internal/plumbing/tree_diff.go
  24. 5 0
      internal/plumbing/tree_diff_test.go
  25. 6 0
      internal/plumbing/uast/diff_refiner.go
  26. 5 0
      internal/plumbing/uast/diff_refiner_test.go
  27. 21 4
      internal/plumbing/uast/uast.go
  28. 15 0
      internal/plumbing/uast/uast_test.go
  29. 17 9
      leaves/burndown.go
  30. 5 0
      leaves/burndown_test.go
  31. 9 4
      leaves/comment_sentiment.go
  32. 5 0
      leaves/comment_sentiment_test.go
  33. 6 0
      leaves/commits.go
  34. 5 0
      leaves/commits_test.go
  35. 15 4
      leaves/couples.go
  36. 5 0
      leaves/couples_test.go
  37. 6 0
      leaves/devs.go
  38. 5 0
      leaves/devs_test.go
  39. 10 3
      leaves/file_history.go
  40. 5 0
      leaves/file_history_test.go
  41. 8 3
      leaves/research/typos.go
  42. 5 0
      leaves/research/typos_test.go
  43. 9 4
      leaves/shotness.go
  44. 10 4
      leaves/shotness_test.go

+ 6 - 2
Gopkg.lock

@@ -277,9 +277,12 @@
   revision = "f187355171c936ac84a82793659ebb4936bc1c23"
 
 [[projects]]
-  digest = "1:e7e3141ecc5f7eeae9a1a4438044614049ccdd57a302b8793dec0c29202cadce"
+  digest = "1:c5cc4981dbab46f68150ea9772532a7c26f8109112c7302d7cc0a5011eef3276"
   name = "github.com/stretchr/testify"
-  packages = ["assert"]
+  packages = [
+    "assert",
+    "require",
+  ]
   pruneopts = "UT"
   revision = "5b93e2dc01fd8fbf32aa74a198b0ebe78f6f6b6f"
 
@@ -632,6 +635,7 @@
     "github.com/spf13/cobra",
     "github.com/spf13/pflag",
     "github.com/stretchr/testify/assert",
+    "github.com/stretchr/testify/require",
     "golang.org/x/crypto/ssh/terminal",
     "gopkg.in/bblfsh/client-go.v3",
     "gopkg.in/bblfsh/client-go.v3/tools",

+ 6 - 0
cmd/hercules/plugin.template

@@ -34,6 +34,8 @@ type {{.name}} struct {
   hercules.NoopMerger
   // Process each merge commit only once
   hercules.OneShotMergeProcessor
+  // Logger for consistent output
+  l hercules.Logger
 }
 
 // {{.name}}Result is returned by Finalize() and represents the analysis result.
@@ -81,11 +83,15 @@ func ({{.varname}} *{{.name}}) Description() string {
 
 // Configure applies the parameters specified in the command line. Map keys correspond to "Name".
 func ({{.varname}} *{{.name}}) Configure(facts map[string]interface{}) error {
+  if l, exists := facts[hercules.ConfigLogger].(hercules.Logger); exists {
+    {{.varname}}.l = l
+  }
   return nil
 }
 
 // Initialize resets the internal temporary data structures and prepares the object for Consume().
 func ({{.varname}} *{{.name}}) Initialize(repository *git.Repository) error {
+  {{.varname}}.l = hercules.NewLogger()
   {{.varname}}.OneShotMergeProcessor.Initialize()
   return nil
 }

+ 6 - 0
contrib/_plugin_example/churn_analysis.go

@@ -30,6 +30,8 @@ type ChurnAnalysis struct {
 
 	// references IdentityDetector.ReversedPeopleDict
 	reversedPeopleDict []string
+
+	l hercules.Logger
 }
 
 type editInfo struct {
@@ -105,6 +107,9 @@ func (churn *ChurnAnalysis) Description() string {
 
 // Configure applies the parameters specified in the command line. Map keys correspond to "Name".
 func (churn *ChurnAnalysis) Configure(facts map[string]interface{}) error {
+	if l, exists := facts[hercules.ConfigLogger].(hercules.Logger); exists {
+		churn.l = l
+	}
 	if val, exists := facts[ConfigChurnTrackPeople].(bool); exists {
 		churn.TrackPeople = val
 	}
@@ -116,6 +121,7 @@ func (churn *ChurnAnalysis) Configure(facts map[string]interface{}) error {
 
 // Initialize resets the internal temporary data structures and prepares the object for Consume().
 func (churn *ChurnAnalysis) Initialize(repository *git.Repository) error {
+	churn.l = hercules.NewLogger()
 	churn.global = []editInfo{}
 	churn.people = map[int][]editInfo{}
 	churn.OneShotMergeProcessor.Initialize()

+ 8 - 0
core.go

@@ -81,6 +81,8 @@ const (
 	ConfigPipelineCommits = core.ConfigPipelineCommits
 	// ConfigTickSize is the number of hours per 'tick'
 	ConfigTickSize = plumbing.ConfigTicksSinceStartTickSize
+	// ConfigLogger is used to set the logger in all pipeline items.
+	ConfigLogger = core.ConfigLogger
 )
 
 // NewPipeline initializes a new instance of Pipeline struct.
@@ -174,3 +176,9 @@ func PathifyFlagValue(flag *pflag.Flag) {
 func EnablePathFlagTypeMasquerade() {
 	core.EnablePathFlagTypeMasquerade()
 }
+
+// Logger is the Hercules logging interface
+type Logger core.Logger
+
+// NewLogger returns an instance of the default Hercules logger
+func NewLogger() core.Logger { return core.NewLogger() }

+ 9 - 0
doc.go

@@ -37,6 +37,15 @@ Finally extract the result:
 
 The actual usage example is cmd/hercules/root.go - the command line tool's code.
 
+You can provide additional options via `facts` on initialization. For example,
+to provide your own logger, enable people-tracking, and set a custom tick size:
+
+  pipe.Initialize(map[string]interface{}{
+    hercules.ConfigLogger:            zap.NewExample().Sugar(),
+    hercules.ConfigTickSize:          12,
+    leaves.ConfigBurndownTrackPeople: true,
+  })
+
 Hercules depends heavily on https://github.com/src-d/go-git and leverages the
 diff algorithm through https://github.com/sergi/go-diff.
 

+ 91 - 0
internal/core/logger.go

@@ -0,0 +1,91 @@
+package core
+
+import (
+	"log"
+	"os"
+	"runtime/debug"
+	"strings"
+)
+
+// ConfigLogger is the key for the pipeline's logger
+const ConfigLogger = "Core.Logger"
+
+// Logger defines the output interface used by Hercules components.
+type Logger interface {
+	Info(...interface{})
+	Infof(string, ...interface{})
+	Warn(...interface{})
+	Warnf(string, ...interface{})
+	Error(...interface{})
+	Errorf(string, ...interface{})
+	Critical(...interface{})
+	Criticalf(string, ...interface{})
+}
+
+// DefaultLogger is the default logger used by a pipeline, and wraps the standard
+// log library.
+type DefaultLogger struct {
+	I *log.Logger
+	W *log.Logger
+	E *log.Logger
+}
+
+// NewLogger returns a configured default logger.
+func NewLogger() *DefaultLogger {
+	return &DefaultLogger{
+		I: log.New(os.Stderr, "[INFO] ", log.LstdFlags),
+		W: log.New(os.Stderr, "[WARN] ", log.LstdFlags),
+		E: log.New(os.Stderr, "[ERROR] ", log.LstdFlags),
+	}
+}
+
+// Info writes to "info" logger.
+func (d *DefaultLogger) Info(v ...interface{}) { d.I.Println(v...) }
+
+// Infof writes to "info" logger with printf-style formatting.
+func (d *DefaultLogger) Infof(f string, v ...interface{}) { d.I.Printf(f, v...) }
+
+// Warn writes to the "warning" logger.
+func (d *DefaultLogger) Warn(v ...interface{}) { d.W.Println(v...) }
+
+// Warnf writes to the "warning" logger with printf-style formatting.
+func (d *DefaultLogger) Warnf(f string, v ...interface{}) { d.W.Printf(f, v...) }
+
+// Error writes to the "error" logger.
+func (d *DefaultLogger) Error(v ...interface{}) { d.E.Println(v...) }
+
+// Errorf writes to the "error" logger with printf-style formatting.
+func (d *DefaultLogger) Errorf(f string, v ...interface{}) { d.E.Printf(f, v...) }
+
+// Critical writes to the "error" logger and logs the current stacktrace.
+func (d *DefaultLogger) Critical(v ...interface{}) {
+	d.E.Println(v...)
+	d.logStacktraceToErr()
+}
+
+// Criticalf writes to the "error" logger with printf-style formatting and logs the
+// current stacktrace.
+func (d *DefaultLogger) Criticalf(f string, v ...interface{}) {
+	d.E.Printf(f, v...)
+	d.logStacktraceToErr()
+}
+
+// logStacktraceToErr prints a stacktrace to the logger's error output.
+// It skips 4 levels that aren't meaningful to a logged stacktrace:
+// * debug.Stack()
+// * core.captureStacktrace()
+// * DefaultLogger::logStacktraceToErr()
+// * DefaultLogger::Error() or DefaultLogger::Errorf()
+func (d *DefaultLogger) logStacktraceToErr() {
+	d.E.Println("stacktrace:\n" + strings.Join(captureStacktrace(4), "\n"))
+}
+
+func captureStacktrace(skip int) []string {
+	stack := string(debug.Stack())
+	lines := strings.Split(stack, "\n")
+	linesToSkip := 2*skip + 1
+	if linesToSkip > len(lines) {
+		return lines
+	}
+	return lines[linesToSkip:]
+}

+ 66 - 0
internal/core/logger_test.go

@@ -0,0 +1,66 @@
+package core
+
+import (
+	"bytes"
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+)
+
+func TestLogger(t *testing.T) {
+	var (
+		f = "%s-%s"
+		v = []interface{}{"hello", "world"}
+		l = NewLogger()
+
+		iBuf bytes.Buffer
+		wBuf bytes.Buffer
+		eBuf bytes.Buffer
+	)
+
+	// capture output
+	l.I.SetOutput(&iBuf)
+	l.W.SetOutput(&wBuf)
+	l.E.SetOutput(&eBuf)
+
+	l.Info(v...)
+	assert.Contains(t, iBuf.String(), "[INFO]")
+	iBuf.Reset()
+
+	l.Infof(f, v...)
+	assert.Contains(t, iBuf.String(), "[INFO]")
+	assert.Contains(t, iBuf.String(), "-")
+	iBuf.Reset()
+
+	l.Warn(v...)
+	assert.Contains(t, wBuf.String(), "[WARN]")
+	wBuf.Reset()
+
+	l.Warnf(f, v...)
+	assert.Contains(t, wBuf.String(), "[WARN]")
+	assert.Contains(t, wBuf.String(), "-")
+	wBuf.Reset()
+
+	l.Error(v...)
+	assert.Contains(t, eBuf.String(), "[ERROR]")
+	eBuf.Reset()
+
+	l.Errorf(f, v...)
+	assert.Contains(t, eBuf.String(), "[ERROR]")
+	assert.Contains(t, eBuf.String(), "-")
+	eBuf.Reset()
+
+	l.Critical(v...)
+	assert.Contains(t, eBuf.String(), "[ERROR]")
+	assert.Contains(t, eBuf.String(), "internal/core.TestLogger")
+	assert.Contains(t, eBuf.String(), "internal/core/logger_test.go:53")
+	eBuf.Reset()
+
+	l.Criticalf(f, v...)
+	assert.Contains(t, eBuf.String(), "[ERROR]")
+	assert.Contains(t, eBuf.String(), "-")
+	assert.Contains(t, eBuf.String(), "internal/core.TestLogger")
+	assert.Contains(t, eBuf.String(), "internal/core/logger_test.go:59")
+	println(eBuf.String())
+	eBuf.Reset()
+}

+ 36 - 13
internal/core/pipeline.go

@@ -284,6 +284,9 @@ type Pipeline struct {
 
 	// Feature flags which enable the corresponding items.
 	features map[string]bool
+
+	// The logger for printing output.
+	l Logger
 }
 
 const (
@@ -329,6 +332,7 @@ func NewPipeline(repository *git.Repository) *Pipeline {
 		items:      []PipelineItem{},
 		facts:      map[string]interface{}{},
 		features:   map[string]bool{},
+		l:          NewLogger(),
 	}
 }
 
@@ -548,7 +552,8 @@ func (pipeline *Pipeline) resolve(dumpPath string) {
 						}
 						fmt.Fprintln(os.Stderr, "]")
 					}
-					panic("Failed to resolve pipeline dependencies: ambiguous graph.")
+					pipeline.l.Critical("Failed to resolve pipeline dependencies: ambiguous graph.")
+					return
 				}
 				ambiguousMap[key] = graph.FindParents(key)
 			}
@@ -565,7 +570,8 @@ func (pipeline *Pipeline) resolve(dumpPath string) {
 		for _, key := range item.Requires() {
 			key = "[" + key + "]"
 			if graph.AddEdge(key, name) == 0 {
-				log.Panicf("Unsatisfied dependency: %s -> %s", key, item.Name())
+				pipeline.l.Criticalf("Unsatisfied dependency: %s -> %s", key, item.Name())
+				return
 			}
 		}
 	}
@@ -618,7 +624,8 @@ func (pipeline *Pipeline) resolve(dumpPath string) {
 	}
 	strplan, ok := graph.Toposort()
 	if !ok {
-		panic("Failed to resolve pipeline dependencies: unable to topologically sort the items.")
+		pipeline.l.Critical("Failed to resolve pipeline dependencies: unable to topologically sort the items.")
+		return
 	}
 	pipeline.items = make([]PipelineItem, 0, len(pipeline.items))
 	for _, key := range strplan {
@@ -631,7 +638,7 @@ func (pipeline *Pipeline) resolve(dumpPath string) {
 		// fmt.Fprint(os.Stderr, graphCopy.DebugDump())
 		ioutil.WriteFile(dumpPath, []byte(graphCopy.Serialize(strplan)), 0666)
 		absPath, _ := filepath.Abs(dumpPath)
-		log.Printf("Wrote the DAG to %s\n", absPath)
+		pipeline.l.Infof("Wrote the DAG to %s\n", absPath)
 	}
 }
 
@@ -644,24 +651,36 @@ func (pipeline *Pipeline) Initialize(facts map[string]interface{}) error {
 		if !cleanReturn {
 			remotes, _ := pipeline.repository.Remotes()
 			if len(remotes) > 0 {
-				log.Printf("Failed to initialize the pipeline on %s", remotes[0].Config().URLs)
+				pipeline.l.Errorf("Failed to initialize the pipeline on %s", remotes[0].Config().URLs)
 			}
 		}
 	}()
 	if facts == nil {
 		facts = map[string]interface{}{}
 	}
+
+	// set logger from facts, otherwise set the pipeline's logger as the logger
+	// to be used by all analysis tasks by setting the fact
+	if l, exists := facts[ConfigLogger].(Logger); exists {
+		pipeline.l = l
+	} else {
+		facts[ConfigLogger] = pipeline.l
+	}
+
 	if _, exists := facts[ConfigPipelineCommits]; !exists {
 		var err error
 		facts[ConfigPipelineCommits], err = pipeline.Commits(false)
 		if err != nil {
-			log.Panicf("failed to list the commits: %v", err)
+			pipeline.l.Errorf("failed to list the commits: %v", err)
+			return err
 		}
 	}
 	pipeline.PrintActions, _ = facts[ConfigPipelinePrintActions].(bool)
 	if val, exists := facts[ConfigPipelineHibernationDistance].(int); exists {
 		if val < 0 {
-			log.Panicf("--hibernation-distance cannot be negative (got %d)", val)
+			err := fmt.Errorf("--hibernation-distance cannot be negative (got %d)", val)
+			pipeline.l.Error(err)
+			return err
 		}
 		pipeline.HibernationDistance = val
 	}
@@ -712,7 +731,7 @@ func (pipeline *Pipeline) Run(commits []*object.Commit) (map[LeafPipelineItem]in
 		if !cleanReturn {
 			remotes, _ := pipeline.repository.Remotes()
 			if len(remotes) > 0 {
-				log.Printf("Failed to run the pipeline on %s", remotes[0].Config().URLs)
+				pipeline.l.Errorf("Failed to run the pipeline on %s", remotes[0].Config().URLs)
 			}
 		}
 	}()
@@ -788,14 +807,16 @@ func (pipeline *Pipeline) Run(commits []*object.Commit) (map[LeafPipelineItem]in
 				update, err := item.Consume(state)
 				runTimePerItem[item.Name()] += time.Now().Sub(startTime).Seconds()
 				if err != nil {
-					log.Printf("%s failed on commit #%d (%d) %s\n",
-						item.Name(), commitIndex+1, index+1, step.Commit.Hash.String())
+					pipeline.l.Errorf("%s failed on commit #%d (%d) %s: %v\n",
+						item.Name(), commitIndex+1, index+1, step.Commit.Hash.String(), err)
 					return nil, err
 				}
 				for _, key := range item.Provides() {
 					val, ok := update[key]
 					if !ok {
-						log.Panicf("%s: Consume() did not return %s", item.Name(), key)
+						err := fmt.Errorf("%s: Consume() did not return %s", item.Name(), key)
+						pipeline.l.Critical(err)
+						return nil, err
 					}
 					state[key] = val
 				}
@@ -834,7 +855,8 @@ func (pipeline *Pipeline) Run(commits []*object.Commit) (map[LeafPipelineItem]in
 						startTime := time.Now()
 						err := hi.Hibernate()
 						if err != nil {
-							log.Panicf("Failed to hibernate %s: %v\n", item.Name(), err)
+							pipeline.l.Errorf("Failed to hibernate %s: %v\n", item.Name(), err)
+							return nil, err
 						}
 						runTimePerItem[item.Name()+".Hibernation"] += time.Now().Sub(startTime).Seconds()
 					}
@@ -847,7 +869,8 @@ func (pipeline *Pipeline) Run(commits []*object.Commit) (map[LeafPipelineItem]in
 						startTime := time.Now()
 						err := hi.Boot()
 						if err != nil {
-							log.Panicf("Failed to boot %s: %v\n", item.Name(), err)
+							pipeline.l.Errorf("Failed to boot %s: %v\n", item.Name(), err)
+							return nil, err
 						}
 						runTimePerItem[item.Name()+".Hibernation"] += time.Now().Sub(startTime).Seconds()
 					}

+ 42 - 9
internal/core/pipeline_test.go

@@ -11,6 +11,7 @@ import (
 	"time"
 
 	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/require"
 	"gopkg.in/src-d/go-git.v4"
 	"gopkg.in/src-d/go-git.v4/plumbing"
 	"gopkg.in/src-d/go-git.v4/plumbing/object"
@@ -32,6 +33,7 @@ type testPipelineItem struct {
 	InitializeRaises bool
 	InitializePanics bool
 	ConsumePanics    bool
+	Logger           Logger
 }
 
 func (item *testPipelineItem) Name() string {
@@ -51,6 +53,9 @@ func (item *testPipelineItem) Configure(facts map[string]interface{}) error {
 	if item.ConfigureRaises {
 		return errors.New("test1")
 	}
+	if l, ok := facts[ConfigLogger].(Logger); ok {
+		item.Logger = l
+	}
 	return nil
 }
 
@@ -272,13 +277,13 @@ func TestPipelineErrors(t *testing.T) {
 	pipeline.AddItem(item)
 	item.ConfigureRaises = true
 	err := pipeline.Initialize(map[string]interface{}{})
-	assert.NotNil(t, err)
+	assert.Error(t, err)
 	assert.Contains(t, err.Error(), "configure")
 	assert.Contains(t, err.Error(), "test1")
 	item.ConfigureRaises = false
 	item.InitializeRaises = true
 	err = pipeline.Initialize(map[string]interface{}{})
-	assert.NotNil(t, err)
+	assert.Error(t, err)
 	assert.Contains(t, err.Error(), "initialize")
 	assert.Contains(t, err.Error(), "test2")
 	item.InitializeRaises = false
@@ -286,17 +291,42 @@ func TestPipelineErrors(t *testing.T) {
 	assert.Panics(t, func() { pipeline.Initialize(map[string]interface{}{}) })
 }
 
+func TestPipelineInitialize(t *testing.T) {
+	t.Run("without logger fact", func(t *testing.T) {
+		pipeline := NewPipeline(test.Repository)
+		item := &testPipelineItem{}
+		pipeline.AddItem(item)
+		require.NoError(t, pipeline.Initialize(map[string]interface{}{}))
+		// pipeline logger should be initialized, and item logger should be the same
+		require.NotNil(t, pipeline.l)
+		assert.Equal(t, pipeline.l, item.Logger)
+	})
+
+	t.Run("with logger fact", func(t *testing.T) {
+		pipeline := NewPipeline(test.Repository)
+		item := &testPipelineItem{}
+		logger := NewLogger()
+		pipeline.AddItem(item)
+		require.NoError(t, pipeline.Initialize(map[string]interface{}{
+			ConfigLogger: logger,
+		}))
+		// pipeline logger should be set, and the item logger should be the same
+		assert.Equal(t, logger, pipeline.l)
+		assert.Equal(t, logger, item.Logger)
+	})
+}
+
 func TestPipelineRun(t *testing.T) {
 	pipeline := NewPipeline(test.Repository)
 	item := &testPipelineItem{}
 	pipeline.AddItem(item)
-	assert.Nil(t, pipeline.Initialize(map[string]interface{}{}))
+	assert.NoError(t, pipeline.Initialize(map[string]interface{}{}))
 	assert.True(t, item.Initialized)
 	commits := make([]*object.Commit, 1)
 	commits[0], _ = test.Repository.CommitObject(plumbing.NewHash(
 		"af9ddc0db70f09f3f27b4b98e415592a7485171c"))
 	result, err := pipeline.Run(commits)
-	assert.Nil(t, err)
+	assert.NoError(t, err)
 	assert.Equal(t, 2, len(result))
 	assert.Equal(t, item, result[item].(*testPipelineItem))
 	common := result[nil].(*CommonAnalysisResult)
@@ -460,11 +490,12 @@ func TestPipelineDeps(t *testing.T) {
 	commits[0], _ = test.Repository.CommitObject(plumbing.NewHash(
 		"af9ddc0db70f09f3f27b4b98e415592a7485171c"))
 	result, err := pipeline.Run(commits)
-	assert.Nil(t, err)
+	assert.NoError(t, err)
 	assert.True(t, result[item1].(bool))
 	assert.Equal(t, result[item2], item2)
 	item1.TestNilConsumeReturn = true
-	assert.Panics(t, func() { pipeline.Run(commits) })
+	_, err = pipeline.Run(commits)
+	assert.Error(t, err)
 }
 
 func TestPipelineDeployFeatures(t *testing.T) {
@@ -875,13 +906,15 @@ func TestPipelineRunHibernation(t *testing.T) {
 	}
 	pipeline.PrintActions = true
 	_, err := pipeline.Run(commits)
-	assert.Nil(t, err)
+	assert.NoError(t, err)
 	assert.True(t, item.Hibernated)
 	assert.True(t, item.Booted)
 	item.RaiseHibernateError = true
-	assert.Panics(t, func() { pipeline.Run(commits) })
+	_, err = pipeline.Run(commits)
+	assert.Error(t, err)
 	item.RaiseHibernateError = false
 	pipeline.Run(commits)
 	item.RaiseBootError = true
-	assert.Panics(t, func() { pipeline.Run(commits) })
+	_, err = pipeline.Run(commits)
+	assert.Error(t, err)
 }

+ 18 - 11
internal/plumbing/blob_cache.go

@@ -5,7 +5,6 @@ import (
 	"fmt"
 	"io"
 	"io/ioutil"
-	"log"
 
 	"github.com/pkg/errors"
 	"gopkg.in/src-d/go-git.v4"
@@ -87,6 +86,8 @@ type BlobCache struct {
 
 	repository *git.Repository
 	cache      map[plumbing.Hash]*CachedBlob
+
+	l core.Logger
 }
 
 const (
@@ -133,6 +134,11 @@ func (blobCache *BlobCache) ListConfigurationOptions() []core.ConfigurationOptio
 
 // Configure sets the properties previously published by ListConfigurationOptions().
 func (blobCache *BlobCache) Configure(facts map[string]interface{}) error {
+	if l, exists := facts[core.ConfigLogger].(core.Logger); exists {
+		blobCache.l = l
+	} else {
+		blobCache.l = core.NewLogger()
+	}
 	if val, exists := facts[ConfigBlobCacheFailOnMissingSubmodules].(bool); exists {
 		blobCache.FailOnMissingSubmodules = val
 	}
@@ -142,6 +148,7 @@ func (blobCache *BlobCache) Configure(facts map[string]interface{}) error {
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (blobCache *BlobCache) Initialize(repository *git.Repository) error {
+	blobCache.l = core.NewLogger()
 	blobCache.repository = repository
 	blobCache.cache = map[plumbing.Hash]*CachedBlob{}
 	return nil
@@ -161,7 +168,7 @@ func (blobCache *BlobCache) Consume(deps map[string]interface{}) (map[string]int
 	for _, change := range changes {
 		action, err := change.Action()
 		if err != nil {
-			log.Printf("no action in %s\n", change.To.TreeEntry.Hash)
+			blobCache.l.Errorf("no action in %s\n", change.To.TreeEntry.Hash)
 			return nil, err
 		}
 		var exists bool
@@ -172,7 +179,7 @@ func (blobCache *BlobCache) Consume(deps map[string]interface{}) (map[string]int
 			newCache[change.To.TreeEntry.Hash] = &CachedBlob{}
 			blob, err = blobCache.getBlob(&change.To, commit.File)
 			if err != nil {
-				log.Printf("file to %s %s: %v\n", change.To.Name, change.To.TreeEntry.Hash, err)
+				blobCache.l.Errorf("file to %s %s: %v\n", change.To.Name, change.To.TreeEntry.Hash, err)
 			} else {
 				cb := &CachedBlob{Blob: *blob}
 				err = cb.Cache()
@@ -180,7 +187,7 @@ func (blobCache *BlobCache) Consume(deps map[string]interface{}) (map[string]int
 					cache[change.To.TreeEntry.Hash] = cb
 					newCache[change.To.TreeEntry.Hash] = cb
 				} else {
-					log.Printf("file to %s %s: %v\n", change.To.Name, change.To.TreeEntry.Hash, err)
+					blobCache.l.Errorf("file to %s %s: %v\n", change.To.Name, change.To.TreeEntry.Hash, err)
 				}
 			}
 		case merkletrie.Delete:
@@ -191,7 +198,7 @@ func (blobCache *BlobCache) Consume(deps map[string]interface{}) (map[string]int
 				blob, err = blobCache.getBlob(&change.From, commit.File)
 				if err != nil {
 					if err.Error() != plumbing.ErrObjectNotFound.Error() {
-						log.Printf("file from %s %s: %v\n", change.From.Name,
+						blobCache.l.Errorf("file from %s %s: %v\n", change.From.Name,
 							change.From.TreeEntry.Hash, err)
 					} else {
 						blob, err = internal.CreateDummyBlob(change.From.TreeEntry.Hash)
@@ -203,7 +210,7 @@ func (blobCache *BlobCache) Consume(deps map[string]interface{}) (map[string]int
 					if err == nil {
 						cache[change.From.TreeEntry.Hash] = cb
 					} else {
-						log.Printf("file from %s %s: %v\n", change.From.Name,
+						blobCache.l.Errorf("file from %s %s: %v\n", change.From.Name,
 							change.From.TreeEntry.Hash, err)
 					}
 				}
@@ -213,7 +220,7 @@ func (blobCache *BlobCache) Consume(deps map[string]interface{}) (map[string]int
 			cache[change.To.TreeEntry.Hash] = &CachedBlob{}
 			newCache[change.To.TreeEntry.Hash] = &CachedBlob{}
 			if err != nil {
-				log.Printf("file to %s: %v\n", change.To.Name, err)
+				blobCache.l.Errorf("file to %s: %v\n", change.To.Name, err)
 			} else {
 				cb := &CachedBlob{Blob: *blob}
 				err = cb.Cache()
@@ -221,7 +228,7 @@ func (blobCache *BlobCache) Consume(deps map[string]interface{}) (map[string]int
 					cache[change.To.TreeEntry.Hash] = cb
 					newCache[change.To.TreeEntry.Hash] = cb
 				} else {
-					log.Printf("file to %s: %v\n", change.To.Name, err)
+					blobCache.l.Errorf("file to %s: %v\n", change.To.Name, err)
 				}
 			}
 			cache[change.From.TreeEntry.Hash], exists =
@@ -230,14 +237,14 @@ func (blobCache *BlobCache) Consume(deps map[string]interface{}) (map[string]int
 				cache[change.From.TreeEntry.Hash] = &CachedBlob{}
 				blob, err = blobCache.getBlob(&change.From, commit.File)
 				if err != nil {
-					log.Printf("file from %s: %v\n", change.From.Name, err)
+					blobCache.l.Errorf("file from %s: %v\n", change.From.Name, err)
 				} else {
 					cb := &CachedBlob{Blob: *blob}
 					err = cb.Cache()
 					if err == nil {
 						cache[change.From.TreeEntry.Hash] = cb
 					} else {
-						log.Printf("file from %s: %v\n", change.From.Name, err)
+						blobCache.l.Errorf("file from %s: %v\n", change.From.Name, err)
 					}
 				}
 			}
@@ -279,7 +286,7 @@ func (blobCache *BlobCache) getBlob(entry *object.ChangeEntry, fileGetter FileGe
 
 	if err != nil {
 		if err.Error() != plumbing.ErrObjectNotFound.Error() {
-			log.Printf("getBlob(%s)\n", entry.TreeEntry.Hash.String())
+			blobCache.l.Errorf("getBlob(%s)\n", entry.TreeEntry.Hash.String())
 			return nil, err
 		}
 		if entry.TreeEntry.Mode != 0160000 {

+ 6 - 0
internal/plumbing/diff.go

@@ -18,6 +18,8 @@ type FileDiff struct {
 	core.NoopMerger
 	CleanupDisabled  bool
 	WhitespaceIgnore bool
+
+	l core.Logger
 }
 
 const (
@@ -84,6 +86,9 @@ func (diff *FileDiff) ListConfigurationOptions() []core.ConfigurationOption {
 
 // Configure sets the properties previously published by ListConfigurationOptions().
 func (diff *FileDiff) Configure(facts map[string]interface{}) error {
+	if l, exists := facts[core.ConfigLogger].(core.Logger); exists {
+		diff.l = l
+	}
 	if val, exists := facts[ConfigFileDiffDisableCleanup].(bool); exists {
 		diff.CleanupDisabled = val
 	}
@@ -96,6 +101,7 @@ func (diff *FileDiff) Configure(facts map[string]interface{}) error {
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (diff *FileDiff) Initialize(repository *git.Repository) error {
+	diff.l = core.NewLogger()
 	return nil
 }
 

+ 5 - 4
internal/plumbing/diff_test.go

@@ -27,10 +27,11 @@ func TestFileDiffMeta(t *testing.T) {
 	assert.Len(t, fd.ListConfigurationOptions(), 2)
 	assert.Equal(t, fd.ListConfigurationOptions()[0].Name, items.ConfigFileDiffDisableCleanup)
 	assert.Equal(t, fd.ListConfigurationOptions()[1].Name, items.ConfigFileWhitespaceIgnore)
-	facts := map[string]interface{}{}
-	facts[items.ConfigFileDiffDisableCleanup] = true
-	facts[items.ConfigFileWhitespaceIgnore] = true
-	fd.Configure(facts)
+	assert.NoError(t, fd.Configure(map[string]interface{}{
+		core.ConfigLogger:                  core.NewLogger(),
+		items.ConfigFileDiffDisableCleanup: true,
+		items.ConfigFileWhitespaceIgnore:   true,
+	}))
 	assert.True(t, fd.CleanupDisabled)
 	assert.True(t, fd.WhitespaceIgnore)
 }

+ 8 - 0
internal/plumbing/identity/identity.go

@@ -21,6 +21,8 @@ type Detector struct {
 	PeopleDict map[string]int
 	// ReversedPeopleDict maps developer id -> description
 	ReversedPeopleDict []string
+
+	l core.Logger
 }
 
 const (
@@ -84,6 +86,11 @@ func (detector *Detector) ListConfigurationOptions() []core.ConfigurationOption
 
 // Configure sets the properties previously published by ListConfigurationOptions().
 func (detector *Detector) Configure(facts map[string]interface{}) error {
+	if l, exists := facts[core.ConfigLogger].(core.Logger); exists {
+		detector.l = l
+	} else {
+		detector.l = core.NewLogger()
+	}
 	if val, exists := facts[FactIdentityDetectorPeopleDict].(map[string]int); exists {
 		detector.PeopleDict = val
 	}
@@ -116,6 +123,7 @@ func (detector *Detector) Configure(facts map[string]interface{}) error {
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (detector *Detector) Initialize(repository *git.Repository) error {
+	detector.l = core.NewLogger()
 	return nil
 }
 

+ 5 - 0
internal/plumbing/identity/identity_test.go

@@ -41,6 +41,11 @@ func TestIdentityDetectorMeta(t *testing.T) {
 	opts := id.ListConfigurationOptions()
 	assert.Len(t, opts, 1)
 	assert.Equal(t, opts[0].Name, ConfigIdentityDetectorPeopleDictPath)
+	logger := core.NewLogger()
+	assert.NoError(t, id.Configure(map[string]interface{}{
+		core.ConfigLogger: logger,
+	}))
+	assert.Equal(t, logger, id.l)
 }
 
 func TestIdentityDetectorConfigure(t *testing.T) {

+ 6 - 0
internal/plumbing/languages.go

@@ -15,6 +15,8 @@ import (
 // LanguagesDetection run programming language detection over the changed files.
 type LanguagesDetection struct {
 	core.NoopMerger
+
+	l core.Logger
 }
 
 const (
@@ -50,12 +52,16 @@ func (langs *LanguagesDetection) ListConfigurationOptions() []core.Configuration
 
 // Configure sets the properties previously published by ListConfigurationOptions().
 func (langs *LanguagesDetection) Configure(facts map[string]interface{}) error {
+	if l, exists := facts[core.ConfigLogger].(core.Logger); exists {
+		langs.l = l
+	}
 	return nil
 }
 
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (langs *LanguagesDetection) Initialize(repository *git.Repository) error {
+	langs.l = core.NewLogger()
 	return nil
 }
 

+ 7 - 2
internal/plumbing/languages_test.go

@@ -20,8 +20,13 @@ func TestLanguagesDetectionMeta(t *testing.T) {
 	assert.Equal(t, ls.Requires()[1], DependencyBlobCache)
 	opts := ls.ListConfigurationOptions()
 	assert.Len(t, opts, 0)
-	assert.Nil(t, ls.Configure(nil))
-	assert.Nil(t, ls.Initialize(nil))
+	assert.NoError(t, ls.Configure(nil))
+	logger := core.NewLogger()
+	assert.NoError(t, ls.Configure(map[string]interface{}{
+		core.ConfigLogger: logger,
+	}))
+	assert.Equal(t, logger, ls.l)
+	assert.NoError(t, ls.Initialize(nil))
 }
 
 func TestLanguagesDetectionRegistration(t *testing.T) {

+ 6 - 0
internal/plumbing/line_stats.go

@@ -14,6 +14,8 @@ import (
 // LinesStatsCalculator measures line statistics for each text file in the commit.
 type LinesStatsCalculator struct {
 	core.NoopMerger
+
+	l core.Logger
 }
 
 // LineStats holds the numbers of inserted, deleted and changed lines.
@@ -60,12 +62,16 @@ func (lsc *LinesStatsCalculator) ListConfigurationOptions() []core.Configuration
 
 // Configure sets the properties previously published by ListConfigurationOptions().
 func (lsc *LinesStatsCalculator) Configure(facts map[string]interface{}) error {
+	if l, exists := facts[core.ConfigLogger].(core.Logger); exists {
+		lsc.l = l
+	}
 	return nil
 }
 
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (lsc *LinesStatsCalculator) Initialize(repository *git.Repository) error {
+	lsc.l = core.NewLogger()
 	return nil
 }
 

+ 3 - 1
internal/plumbing/line_stats_test.go

@@ -23,7 +23,9 @@ func TestLinesStatsMeta(t *testing.T) {
 	assert.Equal(t, ra.Requires()[1], items.DependencyBlobCache)
 	assert.Equal(t, ra.Requires()[2], items.DependencyFileDiff)
 	assert.Nil(t, ra.ListConfigurationOptions())
-	assert.Nil(t, ra.Configure(nil))
+	assert.NoError(t, ra.Configure(map[string]interface{}{
+		core.ConfigLogger: core.NewLogger(),
+	}))
 	for _, f := range ra.Fork(10) {
 		assert.Equal(t, f, ra)
 	}

+ 9 - 5
internal/plumbing/renames.go

@@ -1,7 +1,6 @@
 package plumbing
 
 import (
-	"log"
 	"path/filepath"
 	"sort"
 	"strings"
@@ -30,6 +29,8 @@ type RenameAnalysis struct {
 	SimilarityThreshold int
 
 	repository *git.Repository
+
+	l core.Logger
 }
 
 const (
@@ -92,6 +93,9 @@ func (ra *RenameAnalysis) ListConfigurationOptions() []core.ConfigurationOption
 
 // Configure sets the properties previously published by ListConfigurationOptions().
 func (ra *RenameAnalysis) Configure(facts map[string]interface{}) error {
+	if l, exists := facts[core.ConfigLogger].(core.Logger); exists {
+		ra.l = l
+	}
 	if val, exists := facts[ConfigRenameAnalysisSimilarityThreshold].(int); exists {
 		ra.SimilarityThreshold = val
 	}
@@ -101,8 +105,9 @@ func (ra *RenameAnalysis) Configure(facts map[string]interface{}) error {
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (ra *RenameAnalysis) Initialize(repository *git.Repository) error {
+	ra.l = core.NewLogger()
 	if ra.SimilarityThreshold < 0 || ra.SimilarityThreshold > 100 {
-		log.Printf("Warning: adjusted the similarity threshold to %d\n",
+		ra.l.Warnf("adjusted the similarity threshold to %d\n",
 			RenameAnalysisDefaultThreshold)
 		ra.SimilarityThreshold = RenameAnalysisDefaultThreshold
 	}
@@ -384,9 +389,8 @@ func (ra *RenameAnalysis) blobsAreClose(blob1 *CachedBlob, blob2 *CachedBlob) (b
 	cleanReturn := false
 	defer func() {
 		if !cleanReturn {
-			log.Println()
-			log.Println(blob1.Hash.String())
-			log.Println(blob2.Hash.String())
+			ra.l.Warnf("\nunclean return detected for blobs '%s' and '%s'\n",
+				blob1.Hash.String(), blob2.Hash.String())
 		}
 	}()
 	_, err1 := blob1.CountLines()

+ 10 - 5
internal/plumbing/renames_test.go

@@ -34,12 +34,17 @@ func TestRenameAnalysisMeta(t *testing.T) {
 	assert.Len(t, opts, 1)
 	assert.Equal(t, opts[0].Name, ConfigRenameAnalysisSimilarityThreshold)
 	ra.SimilarityThreshold = 0
-	facts := map[string]interface{}{}
-	facts[ConfigRenameAnalysisSimilarityThreshold] = 70
-	ra.Configure(facts)
+
+	assert.NoError(t, ra.Configure(map[string]interface{}{
+		ConfigRenameAnalysisSimilarityThreshold: 70,
+	}))
 	assert.Equal(t, ra.SimilarityThreshold, 70)
-	delete(facts, ConfigRenameAnalysisSimilarityThreshold)
-	ra.Configure(facts)
+
+	logger := core.NewLogger()
+	assert.NoError(t, ra.Configure(map[string]interface{}{
+		core.ConfigLogger: logger,
+	}))
+	assert.Equal(t, logger, ra.l)
 	assert.Equal(t, ra.SimilarityThreshold, 70)
 }
 

+ 7 - 3
internal/plumbing/ticks.go

@@ -1,7 +1,6 @@
 package plumbing
 
 import (
-	"log"
 	"time"
 
 	"gopkg.in/src-d/go-git.v4"
@@ -20,6 +19,8 @@ type TicksSinceStart struct {
 	tick0        *time.Time
 	previousTick int
 	commits      map[int][]plumbing.Hash
+
+	l core.Logger
 }
 
 const (
@@ -73,6 +74,9 @@ func (ticks *TicksSinceStart) ListConfigurationOptions() []core.ConfigurationOpt
 
 // Configure sets the properties previously published by ListConfigurationOptions().
 func (ticks *TicksSinceStart) Configure(facts map[string]interface{}) error {
+	if l, exists := facts[core.ConfigLogger].(core.Logger); exists {
+		ticks.l = l
+	}
 	if val, exists := facts[ConfigTicksSinceStartTickSize].(int); exists {
 		ticks.TickSize = time.Duration(val) * time.Hour
 	} else {
@@ -89,6 +93,7 @@ func (ticks *TicksSinceStart) Configure(facts map[string]interface{}) error {
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (ticks *TicksSinceStart) Initialize(repository *git.Repository) error {
+	ticks.l = core.NewLogger()
 	if ticks.TickSize == 0 {
 		ticks.TickSize = DefaultTicksSinceStartTickSize * time.Hour
 	}
@@ -120,8 +125,7 @@ func (ticks *TicksSinceStart) Consume(deps map[string]interface{}) (map[string]i
 		// our precision is 1 day
 		*ticks.tick0 = commit.Committer.When
 		if ticks.tick0.Unix() < 631152000 { // 01.01.1990, that was 30 years ago
-			log.Println()
-			log.Printf("Warning: suspicious committer timestamp in %s > %s: %d",
+			ticks.l.Warnf("suspicious committer timestamp in %s > %s: %d",
 				ticks.remote, commit.Hash.String(), ticks.tick0.Unix())
 		}
 	}

+ 8 - 10
internal/plumbing/ticks_test.go

@@ -2,8 +2,6 @@ package plumbing
 
 import (
 	"bytes"
-	"log"
-	"os"
 	"strings"
 	"testing"
 	"time"
@@ -33,7 +31,11 @@ func TestTicksSinceStartMeta(t *testing.T) {
 	assert.Equal(t, tss.Provides()[0], DependencyTick)
 	assert.Equal(t, len(tss.Requires()), 0)
 	assert.Len(t, tss.ListConfigurationOptions(), 1)
-	tss.Configure(map[string]interface{}{})
+	logger := core.NewLogger()
+	assert.NoError(t, tss.Configure(map[string]interface{}{
+		core.ConfigLogger: logger,
+	}))
+	assert.Equal(t, logger, tss.l)
 }
 
 func TestTicksSinceStartRegistration(t *testing.T) {
@@ -189,15 +191,11 @@ func TestTicksSinceStartConsumeZero(t *testing.T) {
 	deps[core.DependencyCommit] = commit
 	deps[core.DependencyIndex] = 0
 	// print warning to log
-	myOutput := &bytes.Buffer{}
-	log.SetOutput(myOutput)
-	defer func() {
-		log.SetOutput(os.Stderr)
-	}()
+	var capture bytes.Buffer
+	tss.l.(*core.DefaultLogger).W.SetOutput(&capture)
 	res, err := tss.Consume(deps)
 	assert.Nil(t, err)
-	output := myOutput.String()
-	assert.Contains(t, output, "Warning")
+	output := capture.String()
 	assert.Contains(t, output, "cce947b98a050c6d356bc6ba95030254914027b1")
 	assert.Contains(t, output, "hercules")
 	// depending on where the contributor clones this project from, the remote

+ 9 - 2
internal/plumbing/tree_diff.go

@@ -3,7 +3,6 @@ package plumbing
 import (
 	"fmt"
 	"io"
-	"log"
 	"path"
 	"regexp"
 	"strings"
@@ -29,6 +28,8 @@ type TreeDiff struct {
 	previousTree   *object.Tree
 	previousCommit plumbing.Hash
 	repository     *git.Repository
+
+	l core.Logger
 }
 
 const (
@@ -120,6 +121,9 @@ func (treediff *TreeDiff) ListConfigurationOptions() []core.ConfigurationOption
 
 // Configure sets the properties previously published by ListConfigurationOptions().
 func (treediff *TreeDiff) Configure(facts map[string]interface{}) error {
+	if l, exists := facts[core.ConfigLogger].(core.Logger); exists {
+		treediff.l = l
+	}
 	if val, exists := facts[ConfigTreeDiffEnableBlacklist].(bool); exists && val {
 		treediff.SkipFiles = facts[ConfigTreeDiffBlacklistedPrefixes].([]string)
 	}
@@ -142,6 +146,7 @@ func (treediff *TreeDiff) Configure(facts map[string]interface{}) error {
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (treediff *TreeDiff) Initialize(repository *git.Repository) error {
+	treediff.l = core.NewLogger()
 	treediff.previousTree = nil
 	treediff.repository = repository
 	if treediff.Languages == nil {
@@ -165,7 +170,9 @@ func (treediff *TreeDiff) Consume(deps map[string]interface{}) (map[string]inter
 		}
 	}
 	if !pass && treediff.previousCommit != plumbing.ZeroHash {
-		log.Panicf("%s > %s", treediff.previousCommit.String(), commit.Hash.String())
+		err := fmt.Errorf("%s > %s", treediff.previousCommit.String(), commit.Hash.String())
+		treediff.l.Critical(err)
+		return nil, err
 	}
 	tree, err := commit.Tree()
 	if err != nil {

+ 5 - 0
internal/plumbing/tree_diff_test.go

@@ -26,6 +26,11 @@ func TestTreeDiffMeta(t *testing.T) {
 	assert.Equal(t, td.Provides()[0], DependencyTreeChanges)
 	opts := td.ListConfigurationOptions()
 	assert.Len(t, opts, 4)
+	logger := core.NewLogger()
+	assert.NoError(t, td.Configure(map[string]interface{}{
+		core.ConfigLogger: logger,
+	}))
+	assert.Equal(t, logger, td.l)
 }
 
 func TestTreeDiffConfigure(t *testing.T) {

+ 6 - 0
internal/plumbing/uast/diff_refiner.go

@@ -18,6 +18,8 @@ import (
 // optimal, choose the one which touches less AST nodes.
 type FileDiffRefiner struct {
 	core.NoopMerger
+
+	l core.Logger
 }
 
 // Name of this PipelineItem. Uniquely identifies the type, used for mapping keys, etc.
@@ -54,12 +56,16 @@ func (ref *FileDiffRefiner) ListConfigurationOptions() []core.ConfigurationOptio
 
 // Configure sets the properties previously published by ListConfigurationOptions().
 func (ref *FileDiffRefiner) Configure(facts map[string]interface{}) error {
+	if l, exists := facts[core.ConfigLogger].(core.Logger); exists {
+		ref.l = l
+	}
 	return nil
 }
 
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (ref *FileDiffRefiner) Initialize(repository *git.Repository) error {
+	ref.l = core.NewLogger()
 	return nil
 }
 

+ 5 - 0
internal/plumbing/uast/diff_refiner_test.go

@@ -37,6 +37,11 @@ func TestFileDiffRefinerMeta(t *testing.T) {
 	features := fd.Features()
 	assert.Len(t, features, 1)
 	assert.Equal(t, features[0], FeatureUast)
+	logger := core.NewLogger()
+	assert.NoError(t, fd.Configure(map[string]interface{}{
+		core.ConfigLogger: logger,
+	}))
+	assert.Equal(t, logger, fd.l)
 }
 
 func TestFileDiffRefinerRegistration(t *testing.T) {

+ 21 - 4
internal/plumbing/uast/uast.go

@@ -6,7 +6,6 @@ import (
 	"fmt"
 	"io"
 	"io/ioutil"
-	"log"
 	"os"
 	"path"
 	"runtime"
@@ -16,7 +15,7 @@ import (
 
 	"github.com/Jeffail/tunny"
 	"github.com/gogo/protobuf/proto"
-	"gopkg.in/bblfsh/client-go.v3"
+	bblfsh "gopkg.in/bblfsh/client-go.v3"
 	"gopkg.in/bblfsh/sdk.v2/uast/nodes"
 	"gopkg.in/bblfsh/sdk.v2/uast/nodes/nodesproto"
 	"gopkg.in/src-d/go-git.v4"
@@ -41,6 +40,8 @@ type Extractor struct {
 
 	clients []*bblfsh.Client
 	pool    *tunny.Pool
+
+	l core.Logger
 }
 
 const (
@@ -159,6 +160,9 @@ func (exr *Extractor) ListConfigurationOptions() []core.ConfigurationOption {
 
 // Configure sets the properties previously published by ListConfigurationOptions().
 func (exr *Extractor) Configure(facts map[string]interface{}) error {
+	if l, exists := facts[core.ConfigLogger].(core.Logger); exists {
+		exr.l = l
+	}
 	if val, exists := facts[ConfigUASTEndpoint].(string); exists {
 		exr.Endpoint = val
 	}
@@ -186,6 +190,7 @@ func (exr *Extractor) Configure(facts map[string]interface{}) error {
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (exr *Extractor) Initialize(repository *git.Repository) error {
+	exr.l = core.NewLogger()
 	if exr.Context == nil {
 		exr.Context = func() (context.Context, context.CancelFunc) {
 			return context.WithTimeout(context.Background(),
@@ -207,7 +212,7 @@ func (exr *Extractor) Initialize(repository *git.Repository) error {
 		client, err := bblfsh.NewClient(exr.Endpoint)
 		if err != nil {
 			if err.Error() == "context deadline exceeded" {
-				log.Println("Looks like the Babelfish server is not running. Please refer " +
+				exr.l.Error("Looks like the Babelfish server is not running. Please refer " +
 					"to https://docs.sourced.tech/babelfish/using-babelfish/getting-started#running-with-docker-recommended")
 			}
 			return err
@@ -289,7 +294,7 @@ func (exr *Extractor) Consume(deps map[string]interface{}) (map[string]interface
 		if exr.FailOnErrors {
 			return nil, errors.New(joined)
 		}
-		log.Println(joined)
+		exr.l.Error(joined)
 	}
 	return map[string]interface{}{DependencyUasts: uasts}, nil
 }
@@ -362,6 +367,8 @@ const (
 type Changes struct {
 	core.NoopMerger
 	cache map[plumbing.Hash]nodes.Node
+
+	l core.Logger
 }
 
 // Name of this PipelineItem. Uniquely identifies the type, used for mapping keys, etc.
@@ -392,12 +399,16 @@ func (uc *Changes) ListConfigurationOptions() []core.ConfigurationOption {
 
 // Configure sets the properties previously published by ListConfigurationOptions().
 func (uc *Changes) Configure(facts map[string]interface{}) error {
+	if l, exists := facts[core.ConfigLogger].(core.Logger); exists {
+		uc.l = l
+	}
 	return nil
 }
 
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (uc *Changes) Initialize(repository *git.Repository) error {
+	uc.l = core.NewLogger()
 	uc.cache = map[plumbing.Hash]nodes.Node{}
 	return nil
 }
@@ -463,6 +474,8 @@ type ChangesSaver struct {
 
 	repository *git.Repository
 	result     [][]Change
+
+	l core.Logger
 }
 
 const (
@@ -515,6 +528,9 @@ func (saver *ChangesSaver) Description() string {
 
 // Configure sets the properties previously published by ListConfigurationOptions().
 func (saver *ChangesSaver) Configure(facts map[string]interface{}) error {
+	if l, exists := facts[core.ConfigLogger].(core.Logger); exists {
+		saver.l = l
+	}
 	if val, exists := facts[ConfigUASTChangesSaverOutputPath]; exists {
 		saver.OutputPath = val.(string)
 	}
@@ -524,6 +540,7 @@ func (saver *ChangesSaver) Configure(facts map[string]interface{}) error {
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (saver *ChangesSaver) Initialize(repository *git.Repository) error {
+	saver.l = core.NewLogger()
 	saver.repository = repository
 	saver.result = [][]Change{}
 	saver.OneShotMergeProcessor.Initialize()

+ 15 - 0
internal/plumbing/uast/uast_test.go

@@ -60,6 +60,11 @@ func TestUASTExtractorMeta(t *testing.T) {
 	feats := exr.Features()
 	assert.Len(t, feats, 1)
 	assert.Equal(t, feats[0], FeatureUast)
+	logger := core.NewLogger()
+	assert.NoError(t, exr.Configure(map[string]interface{}{
+		core.ConfigLogger: logger,
+	}))
+	assert.Equal(t, logger, exr.l)
 }
 
 func TestUASTExtractorConfiguration(t *testing.T) {
@@ -239,6 +244,11 @@ func TestUASTChangesMeta(t *testing.T) {
 	assert.Equal(t, ch.Requires()[1], items.DependencyTreeChanges)
 	opts := ch.ListConfigurationOptions()
 	assert.Len(t, opts, 0)
+	logger := core.NewLogger()
+	assert.NoError(t, ch.Configure(map[string]interface{}{
+		core.ConfigLogger: logger,
+	}))
+	assert.Equal(t, logger, ch.l)
 }
 
 func TestUASTChangesRegistration(t *testing.T) {
@@ -369,6 +379,11 @@ func TestUASTChangesSaverMeta(t *testing.T) {
 	assert.Len(t, opts, 1)
 	assert.Equal(t, opts[0].Name, ConfigUASTChangesSaverOutputPath)
 	assert.Equal(t, chs.Flag(), "dump-uast-changes")
+	logger := core.NewLogger()
+	assert.NoError(t, chs.Configure(map[string]interface{}{
+		core.ConfigLogger: logger,
+	}))
+	assert.Equal(t, logger, chs.l)
 }
 
 func TestUASTChangesSaverConfiguration(t *testing.T) {

+ 17 - 9
leaves/burndown.go

@@ -107,6 +107,8 @@ type BurndownAnalysis struct {
 	tickSize time.Duration
 	// references IdentityDetector.ReversedPeopleDict
 	reversedPeopleDict []string
+
+	l core.Logger
 }
 
 // BurndownResult carries the result of running BurndownAnalysis - it is returned by
@@ -255,6 +257,11 @@ func (analyser *BurndownAnalysis) ListConfigurationOptions() []core.Configuratio
 
 // Configure sets the properties previously published by ListConfigurationOptions().
 func (analyser *BurndownAnalysis) Configure(facts map[string]interface{}) error {
+	if l, exists := facts[core.ConfigLogger].(core.Logger); exists {
+		analyser.l = l
+	} else {
+		analyser.l = core.NewLogger()
+	}
 	if val, exists := facts[ConfigBurndownGranularity].(int); exists {
 		analyser.Granularity = val
 	}
@@ -307,24 +314,25 @@ func (analyser *BurndownAnalysis) Description() string {
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (analyser *BurndownAnalysis) Initialize(repository *git.Repository) error {
+	analyser.l = core.NewLogger()
 	if analyser.Granularity <= 0 {
-		log.Printf("Warning: adjusted the granularity to %d ticks\n",
+		analyser.l.Warnf("adjusted the granularity to %d ticks\n",
 			DefaultBurndownGranularity)
 		analyser.Granularity = DefaultBurndownGranularity
 	}
 	if analyser.Sampling <= 0 {
-		log.Printf("Warning: adjusted the sampling to %d ticks\n",
+		analyser.l.Warnf("adjusted the sampling to %d ticks\n",
 			DefaultBurndownGranularity)
 		analyser.Sampling = DefaultBurndownGranularity
 	}
 	if analyser.Sampling > analyser.Granularity {
-		log.Printf("Warning: granularity may not be less than sampling, adjusted to %d\n",
+		analyser.l.Warnf("granularity may not be less than sampling, adjusted to %d\n",
 			analyser.Granularity)
 		analyser.Sampling = analyser.Granularity
 	}
 	if analyser.tickSize == 0 {
 		def := items.DefaultTicksSinceStartTickSize * time.Hour
-		log.Printf("Warning: tick size was not set, adjusted to %v\n", def)
+		analyser.l.Warnf("tick size was not set, adjusted to %v\n", def)
 		analyser.tickSize = items.DefaultTicksSinceStartTickSize * time.Hour
 	}
 	analyser.repository = repository
@@ -1335,7 +1343,7 @@ func (analyser *BurndownAnalysis) handleModification(
 
 	thisDiffs := diffs[change.To.Name]
 	if file.Len() != thisDiffs.OldLinesOfCode {
-		log.Printf("====TREE====\n%s", file.Dump())
+		analyser.l.Infof("====TREE====\n%s", file.Dump())
 		return fmt.Errorf("%s: internal integrity error src %d != %d %s -> %s",
 			change.To.Name, thisDiffs.OldLinesOfCode, file.Len(),
 			change.From.TreeEntry.Hash.String(), change.To.TreeEntry.Hash.String())
@@ -1366,13 +1374,13 @@ func (analyser *BurndownAnalysis) handleModification(
 		}
 		length := utf8.RuneCountInString(edit.Text)
 		debugError := func() {
-			log.Printf("%s: internal diff error\n", change.To.Name)
-			log.Printf("Update(%d, %d, %d (0), %d (0))\n", analyser.tick, position,
+			analyser.l.Errorf("%s: internal diff error\n", change.To.Name)
+			analyser.l.Errorf("Update(%d, %d, %d (0), %d (0))\n", analyser.tick, position,
 				length, utf8.RuneCountInString(pending.Text))
 			if dumpBefore != "" {
-				log.Printf("====TREE BEFORE====\n%s====END====\n", dumpBefore)
+				analyser.l.Errorf("====TREE BEFORE====\n%s====END====\n", dumpBefore)
 			}
-			log.Printf("====TREE AFTER====\n%s====END====\n", file.Dump())
+			analyser.l.Errorf("====TREE AFTER====\n%s====END====\n", file.Dump())
 		}
 		switch edit.Type {
 		case diffmatchpatch.DiffEqual:

+ 5 - 0
leaves/burndown_test.go

@@ -57,6 +57,11 @@ func TestBurndownMeta(t *testing.T) {
 	}
 	assert.Len(t, opts, matches)
 	assert.Equal(t, bd.Flag(), "burndown")
+	logger := core.NewLogger()
+	assert.NoError(t, bd.Configure(map[string]interface{}{
+		core.ConfigLogger: logger,
+	}))
+	assert.Equal(t, logger, bd.l)
 }
 
 func TestBurndownConfigure(t *testing.T) {

+ 9 - 4
leaves/comment_sentiment.go

@@ -5,7 +5,6 @@ package leaves
 import (
 	"fmt"
 	"io"
-	"log"
 	"os"
 	"regexp"
 	"sort"
@@ -21,7 +20,7 @@ import (
 	"gopkg.in/src-d/hercules.v10/internal/pb"
 	items "gopkg.in/src-d/hercules.v10/internal/plumbing"
 	uast_items "gopkg.in/src-d/hercules.v10/internal/plumbing/uast"
-	"gopkg.in/vmarkovtsev/BiDiSentiment.v1"
+	sentiment "gopkg.in/vmarkovtsev/BiDiSentiment.v1"
 )
 
 // CommentSentimentAnalysis measures comment sentiment through time.
@@ -34,6 +33,8 @@ type CommentSentimentAnalysis struct {
 	commentsByTick map[int][]string
 	commitsByTick  map[int][]plumbing.Hash
 	xpather        *uast_items.ChangesXPather
+
+	l core.Logger
 }
 
 // CommentSentimentResult contains the sentiment values per tick, where 1 means very negative
@@ -116,6 +117,9 @@ func (sent *CommentSentimentAnalysis) Description() string {
 
 // Configure sets the properties previously published by ListConfigurationOptions().
 func (sent *CommentSentimentAnalysis) Configure(facts map[string]interface{}) error {
+	if l, exists := facts[core.ConfigLogger].(core.Logger); exists {
+		sent.l = l
+	}
 	if val, exists := facts[ConfigCommentSentimentGap]; exists {
 		sent.Gap = val.(float32)
 	}
@@ -129,12 +133,12 @@ func (sent *CommentSentimentAnalysis) Configure(facts map[string]interface{}) er
 
 func (sent *CommentSentimentAnalysis) validate() {
 	if sent.Gap < 0 || sent.Gap >= 1 {
-		log.Printf("Sentiment gap is too big: %f => reset to the default %f",
+		sent.l.Warnf("Sentiment gap is too big: %f => reset to the default %f",
 			sent.Gap, DefaultCommentSentimentGap)
 		sent.Gap = DefaultCommentSentimentGap
 	}
 	if sent.MinCommentLength < 10 {
-		log.Printf("Comment minimum length is too small: %d => reset to the default %d",
+		sent.l.Warnf("Comment minimum length is too small: %d => reset to the default %d",
 			sent.MinCommentLength, DefaultCommentSentimentCommentMinLength)
 		sent.MinCommentLength = DefaultCommentSentimentCommentMinLength
 	}
@@ -143,6 +147,7 @@ func (sent *CommentSentimentAnalysis) validate() {
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (sent *CommentSentimentAnalysis) Initialize(repository *git.Repository) error {
+	sent.l = core.NewLogger()
 	sent.commentsByTick = map[int][]string{}
 	sent.xpather = &uast_items.ChangesXPather{XPath: "//uast:Comment"}
 	sent.validate()

+ 5 - 0
leaves/comment_sentiment_test.go

@@ -55,6 +55,11 @@ func TestCommentSentimentMeta(t *testing.T) {
 	}
 	assert.Len(t, opts, matches)
 	assert.Equal(t, sent.Flag(), "sentiment")
+	logger := core.NewLogger()
+	assert.NoError(t, sent.Configure(map[string]interface{}{
+		core.ConfigLogger: logger,
+	}))
+	assert.Equal(t, logger, sent.l)
 }
 
 func TestCommentSentimentConfigure(t *testing.T) {

+ 6 - 0
leaves/commits.go

@@ -23,6 +23,8 @@ type CommitsAnalysis struct {
 	commits []*CommitStat
 	// reversedPeopleDict references IdentityDetector.ReversedPeopleDict
 	reversedPeopleDict []string
+
+	l core.Logger
 }
 
 // CommitsResult is returned by CommitsAnalysis.Finalize() and carries the statistics
@@ -77,6 +79,9 @@ func (ca *CommitsAnalysis) ListConfigurationOptions() []core.ConfigurationOption
 
 // Configure sets the properties previously published by ListConfigurationOptions().
 func (ca *CommitsAnalysis) Configure(facts map[string]interface{}) error {
+	if l, exists := facts[core.ConfigLogger].(core.Logger); exists {
+		ca.l = l
+	}
 	if val, exists := facts[identity.FactIdentityDetectorReversedPeopleDict].([]string); exists {
 		ca.reversedPeopleDict = val
 	}
@@ -96,6 +101,7 @@ func (ca *CommitsAnalysis) Description() string {
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (ca *CommitsAnalysis) Initialize(repository *git.Repository) error {
+	ca.l = core.NewLogger()
 	return nil
 }
 

+ 5 - 0
leaves/commits_test.go

@@ -28,6 +28,11 @@ func TestCommitsMeta(t *testing.T) {
 	opts := ca.ListConfigurationOptions()
 	assert.Len(t, opts, 0)
 	assert.Equal(t, ca.Flag(), "commits-stat")
+	logger := core.NewLogger()
+	assert.NoError(t, ca.Configure(map[string]interface{}{
+		core.ConfigLogger: logger,
+	}))
+	assert.Equal(t, logger, ca.l)
 }
 
 func TestCommitsRegistration(t *testing.T) {

+ 15 - 4
leaves/couples.go

@@ -3,7 +3,6 @@ package leaves
 import (
 	"fmt"
 	"io"
-	"log"
 	"sort"
 
 	"github.com/gogo/protobuf/proto"
@@ -38,6 +37,8 @@ type CouplesAnalysis struct {
 	lastCommit *object.Commit
 	// reversedPeopleDict references IdentityDetector.ReversedPeopleDict
 	reversedPeopleDict []string
+
+	l core.Logger
 }
 
 // CouplesResult is returned by CouplesAnalysis.Finalize() and carries couples matrices from
@@ -99,6 +100,9 @@ func (couples *CouplesAnalysis) ListConfigurationOptions() []core.ConfigurationO
 
 // Configure sets the properties previously published by ListConfigurationOptions().
 func (couples *CouplesAnalysis) Configure(facts map[string]interface{}) error {
+	if l, exists := facts[core.ConfigLogger].(core.Logger); exists {
+		couples.l = l
+	}
 	if val, exists := facts[identity.FactIdentityDetectorPeopleCount].(int); exists {
 		couples.PeopleNumber = val
 		couples.reversedPeopleDict = facts[identity.FactIdentityDetectorReversedPeopleDict].([]string)
@@ -121,6 +125,7 @@ func (couples *CouplesAnalysis) Description() string {
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (couples *CouplesAnalysis) Initialize(repository *git.Repository) error {
+	couples.l = core.NewLogger()
 	couples.people = make([]map[string]int, couples.PeopleNumber+1)
 	for i := range couples.people {
 		couples.people[i] = map[string]int{}
@@ -212,14 +217,18 @@ func (couples *CouplesAnalysis) Finalize() interface{} {
 	for i, name := range filesSequence {
 		file, err := couples.lastCommit.File(name)
 		if err != nil {
-			log.Panicf("cannot find file %s in commit %s: %v",
+			err := fmt.Errorf("cannot find file %s in commit %s: %v",
 				name, couples.lastCommit.Hash.String(), err)
+			couples.l.Critical(err)
+			return err
 		}
 		blob := items.CachedBlob{Blob: file.Blob}
 		err = blob.Cache()
 		if err != nil {
-			log.Panicf("cannot read blob %s of file %s: %v",
+			err := fmt.Errorf("cannot read blob %s of file %s: %v",
 				blob.Hash.String(), name, err)
+			couples.l.Critical(err)
+			return err
 		}
 		filesLines[i], _ = blob.CountLines()
 	}
@@ -301,8 +310,10 @@ func (couples *CouplesAnalysis) Deserialize(pbmessage []byte) (interface{}, erro
 		}
 	}
 	if len(message.FileCouples.Index) != len(message.FilesLines) {
-		log.Panicf("Couples PB message integrity violation: file_couples (%d) != file_lines (%d)",
+		err := fmt.Errorf("Couples PB message integrity violation: file_couples (%d) != file_lines (%d)",
 			len(message.FileCouples.Index), len(message.FilesLines))
+		couples.l.Critical(err)
+		return nil, err
 	}
 	for i, v := range message.FilesLines {
 		result.FilesLines[i] = int(v)

+ 5 - 0
leaves/couples_test.go

@@ -33,6 +33,11 @@ func TestCouplesMeta(t *testing.T) {
 	assert.Equal(t, c.Requires()[1], plumbing.DependencyTreeChanges)
 	assert.Equal(t, c.Flag(), "couples")
 	assert.Len(t, c.ListConfigurationOptions(), 0)
+	logger := core.NewLogger()
+	assert.NoError(t, c.Configure(map[string]interface{}{
+		core.ConfigLogger: logger,
+	}))
+	assert.Equal(t, logger, c.l)
 }
 
 func TestCouplesRegistration(t *testing.T) {

+ 6 - 0
leaves/devs.go

@@ -31,6 +31,8 @@ type DevsAnalysis struct {
 	ticks map[int]map[int]*DevTick
 	// reversedPeopleDict references IdentityDetector.ReversedPeopleDict
 	reversedPeopleDict []string
+
+	l core.Logger
 }
 
 // DevsResult is returned by DevsAnalysis.Finalize() and carries the daily statistics
@@ -92,6 +94,9 @@ func (devs *DevsAnalysis) ListConfigurationOptions() []core.ConfigurationOption
 
 // Configure sets the properties previously published by ListConfigurationOptions().
 func (devs *DevsAnalysis) Configure(facts map[string]interface{}) error {
+	if l, exists := facts[core.ConfigLogger].(core.Logger); exists {
+		devs.l = l
+	}
 	if val, exists := facts[ConfigDevsConsiderEmptyCommits].(bool); exists {
 		devs.ConsiderEmptyCommits = val
 	}
@@ -114,6 +119,7 @@ func (devs *DevsAnalysis) Description() string {
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (devs *DevsAnalysis) Initialize(repository *git.Repository) error {
+	devs.l = core.NewLogger()
 	devs.ticks = map[int]map[int]*DevTick{}
 	devs.OneShotMergeProcessor.Initialize()
 	return nil

+ 5 - 0
leaves/devs_test.go

@@ -41,6 +41,11 @@ func TestDevsMeta(t *testing.T) {
 	assert.Equal(t, d.ListConfigurationOptions()[0].Type, core.BoolConfigurationOption)
 	assert.Equal(t, d.ListConfigurationOptions()[0].Default, false)
 	assert.True(t, len(d.Description()) > 0)
+	logger := core.NewLogger()
+	assert.NoError(t, d.Configure(map[string]interface{}{
+		core.ConfigLogger: logger,
+	}))
+	assert.Equal(t, logger, d.l)
 }
 
 func TestDevsRegistration(t *testing.T) {

+ 10 - 3
leaves/file_history.go

@@ -3,7 +3,6 @@ package leaves
 import (
 	"fmt"
 	"io"
-	"log"
 	"sort"
 	"strings"
 
@@ -25,6 +24,8 @@ type FileHistoryAnalysis struct {
 	core.OneShotMergeProcessor
 	files      map[string]*FileHistory
 	lastCommit *object.Commit
+
+	l core.Logger
 }
 
 // FileHistoryResult is returned by Finalize() and represents the analysis result.
@@ -79,12 +80,16 @@ func (history *FileHistoryAnalysis) Description() string {
 
 // Configure sets the properties previously published by ListConfigurationOptions().
 func (history *FileHistoryAnalysis) Configure(facts map[string]interface{}) error {
+	if l, exists := facts[core.ConfigLogger].(core.Logger); exists {
+		history.l = l
+	}
 	return nil
 }
 
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (history *FileHistoryAnalysis) Initialize(repository *git.Repository) error {
+	history.l = core.NewLogger()
 	history.files = map[string]*FileHistory{}
 	history.OneShotMergeProcessor.Initialize()
 	return nil
@@ -158,7 +163,8 @@ func (history *FileHistoryAnalysis) Finalize() interface{} {
 	files := map[string]FileHistory{}
 	fileIter, err := history.lastCommit.Files()
 	if err != nil {
-		log.Panicf("Failed to iterate files of %s", history.lastCommit.Hash.String())
+		history.l.Errorf("Failed to iterate files of %s", history.lastCommit.Hash.String())
+		return err
 	}
 	err = fileIter.ForEach(func(file *object.File) error {
 		if fh := history.files[file.Name]; fh != nil {
@@ -167,7 +173,8 @@ func (history *FileHistoryAnalysis) Finalize() interface{} {
 		return nil
 	})
 	if err != nil {
-		log.Panicf("Failed to iterate files of %s", history.lastCommit.Hash.String())
+		history.l.Errorf("Failed to iterate files of %s", history.lastCommit.Hash.String())
+		return err
 	}
 	return FileHistoryResult{Files: files}
 }

+ 5 - 0
leaves/file_history_test.go

@@ -32,6 +32,11 @@ func TestFileHistoryMeta(t *testing.T) {
 	assert.Equal(t, fh.Requires()[2], identity.DependencyAuthor)
 	assert.Len(t, fh.ListConfigurationOptions(), 0)
 	assert.Nil(t, fh.Configure(nil))
+	logger := core.NewLogger()
+	assert.NoError(t, fh.Configure(map[string]interface{}{
+		core.ConfigLogger: logger,
+	}))
+	assert.Equal(t, logger, fh.l)
 }
 
 func TestFileHistoryRegistration(t *testing.T) {

+ 8 - 3
leaves/research/typos.go

@@ -4,7 +4,6 @@ import (
 	"bytes"
 	"fmt"
 	"io"
-	"log"
 	"unicode/utf8"
 
 	"github.com/gogo/protobuf/proto"
@@ -38,6 +37,8 @@ type TyposDatasetBuilder struct {
 	xpather uast_items.ChangesXPather
 	// remote carries the repository remote URL (for debugging)
 	remote string
+
+	l core.Logger
 }
 
 // TyposResult is returned by TyposDatasetBuilder.Finalize() and carries the found typo-fix
@@ -101,6 +102,9 @@ func (tdb *TyposDatasetBuilder) ListConfigurationOptions() []core.ConfigurationO
 
 // Configure sets the properties previously published by ListConfigurationOptions().
 func (tdb *TyposDatasetBuilder) Configure(facts map[string]interface{}) error {
+	if l, exists := facts[core.ConfigLogger].(core.Logger); exists {
+		tdb.l = l
+	}
 	if val, exists := facts[ConfigTyposDatasetMaximumAllowedDistance].(int); exists {
 		tdb.MaximumAllowedDistance = val
 	}
@@ -120,6 +124,7 @@ func (tdb *TyposDatasetBuilder) Description() string {
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (tdb *TyposDatasetBuilder) Initialize(repository *git.Repository) error {
+	tdb.l = core.NewLogger()
 	if tdb.MaximumAllowedDistance <= 0 {
 		tdb.MaximumAllowedDistance = DefaultMaximumAllowedTypoDistance
 	}
@@ -198,7 +203,7 @@ func (tdb *TyposDatasetBuilder) Consume(deps map[string]interface{}) (map[string
 		for _, n := range nodesAdded {
 			pos := uast.PositionsOf(n.(nodes.Object))
 			if pos.Start() == nil {
-				log.Printf("repo %s commit %s file %s adds identifier %s with no position",
+				tdb.l.Warnf("repo %s commit %s file %s adds identifier %s with no position",
 					tdb.remote, commit.String(), change.Change.To.Name,
 					n.(nodes.Object)["Name"].(nodes.String))
 				continue
@@ -211,7 +216,7 @@ func (tdb *TyposDatasetBuilder) Consume(deps map[string]interface{}) (map[string
 		for _, n := range nodesRemoved {
 			pos := uast.PositionsOf(n.(nodes.Object))
 			if pos.Start() == nil {
-				log.Printf("repo %s commit %s file %s removes identifier %s with no position",
+				tdb.l.Warnf("repo %s commit %s file %s removes identifier %s with no position",
 					tdb.remote, commit.String(), change.Change.To.Name,
 					n.(nodes.Object)["Name"].(nodes.String))
 				continue

+ 5 - 0
leaves/research/typos_test.go

@@ -34,6 +34,11 @@ func TestTyposDatasetMeta(t *testing.T) {
 	assert.Equal(t, opts[0].Name, ConfigTyposDatasetMaximumAllowedDistance)
 	assert.Equal(t, opts[0].Type, core.IntConfigurationOption)
 	assert.Equal(t, tdb.Flag(), "typos-dataset")
+	logger := core.NewLogger()
+	assert.NoError(t, tdb.Configure(map[string]interface{}{
+		core.ConfigLogger: logger,
+	}))
+	assert.Equal(t, logger, tdb.l)
 }
 
 func TestTyposDatasetRegistration(t *testing.T) {

+ 9 - 4
leaves/shotness.go

@@ -3,7 +3,6 @@ package leaves
 import (
 	"fmt"
 	"io"
-	"log"
 	"sort"
 	"unicode/utf8"
 
@@ -31,6 +30,8 @@ type ShotnessAnalysis struct {
 
 	nodes map[string]*nodeShotness
 	files map[string]map[string]*nodeShotness
+
+	l core.Logger
 }
 
 const (
@@ -129,6 +130,9 @@ func (shotness *ShotnessAnalysis) Description() string {
 
 // Configure sets the properties previously published by ListConfigurationOptions().
 func (shotness *ShotnessAnalysis) Configure(facts map[string]interface{}) error {
+	if l, exists := facts[core.ConfigLogger].(core.Logger); exists {
+		shotness.l = l
+	}
 	if val, exists := facts[ConfigShotnessXpathStruct]; exists {
 		shotness.XpathStruct = val.(string)
 	} else {
@@ -145,6 +149,7 @@ func (shotness *ShotnessAnalysis) Configure(facts map[string]interface{}) error
 // Initialize resets the temporary caches and prepares this PipelineItem for a series of Consume()
 // calls. The repository which is going to be analysed is supplied as an argument.
 func (shotness *ShotnessAnalysis) Initialize(repository *git.Repository) error {
+	shotness.l = core.NewLogger()
 	shotness.nodes = map[string]*nodeShotness{}
 	shotness.files = map[string]map[string]*nodeShotness{}
 	shotness.OneShotMergeProcessor.Initialize()
@@ -209,7 +214,7 @@ func (shotness *ShotnessAnalysis) Consume(deps map[string]interface{}) (map[stri
 		if change.Before == nil {
 			nodes, err := shotness.extractNodes(change.After)
 			if err != nil {
-				log.Printf("Shotness: commit %s file %s failed to filter UAST: %s\n",
+				shotness.l.Warnf("Shotness: commit %s file %s failed to filter UAST: %s\n",
 					commit.Hash.String(), toName, err.Error())
 				continue
 			}
@@ -245,14 +250,14 @@ func (shotness *ShotnessAnalysis) Consume(deps map[string]interface{}) (map[stri
 		// pass through new UAST
 		nodesBefore, err := shotness.extractNodes(change.Before)
 		if err != nil {
-			log.Printf("Shotness: commit ^%s file %s failed to filter UAST: %s\n",
+			shotness.l.Warnf("Shotness: commit ^%s file %s failed to filter UAST: %s\n",
 				commit.Hash.String(), change.Change.From.Name, err.Error())
 			continue
 		}
 		reversedNodesBefore := reverseNodeMap(nodesBefore)
 		nodesAfter, err := shotness.extractNodes(change.After)
 		if err != nil {
-			log.Printf("Shotness: commit %s file %s failed to filter UAST: %s\n",
+			shotness.l.Warnf("Shotness: commit %s file %s failed to filter UAST: %s\n",
 				commit.Hash.String(), toName, err.Error())
 			continue
 		}

+ 10 - 4
leaves/shotness_test.go

@@ -45,12 +45,18 @@ func TestShotnessMeta(t *testing.T) {
 	assert.Nil(t, sh.Configure(nil))
 	assert.Equal(t, sh.XpathStruct, DefaultShotnessXpathStruct)
 	assert.Equal(t, sh.XpathName, DefaultShotnessXpathName)
-	facts := map[string]interface{}{}
-	facts[ConfigShotnessXpathStruct] = "xpath!"
-	facts[ConfigShotnessXpathName] = "another!"
-	assert.Nil(t, sh.Configure(facts))
+	assert.NoError(t, sh.Configure(map[string]interface{}{
+		ConfigShotnessXpathStruct: "xpath!",
+		ConfigShotnessXpathName:   "another!",
+	}))
 	assert.Equal(t, sh.XpathStruct, "xpath!")
 	assert.Equal(t, sh.XpathName, "another!")
+
+	logger := core.NewLogger()
+	assert.NoError(t, sh.Configure(map[string]interface{}{
+		core.ConfigLogger: logger,
+	}))
+	assert.Equal(t, logger, sh.l)
 }
 
 func TestShotnessRegistration(t *testing.T) {