瀏覽代碼

Merge pull request #34 from vmarkovtsev/master

Shotness
Vadim Markovtsev 7 年之前
父節點
當前提交
7f3259d252
共有 6 個文件被更改,包括 755 次插入2 次删除
  1. 2 2
      diff_refiner.go
  2. 12 0
      pb/pb.proto
  3. 6 0
      pipeline.go
  4. 7 0
      pipeline_test.go
  5. 425 0
      shotness.go
  6. 303 0
      shotness_test.go

+ 2 - 2
diff_refiner.go

@@ -78,7 +78,7 @@ func (ref *FileDiffRefiner) Consume(deps map[string]interface{}) (map[string]int
 		}
 		uastChange := changes[fileName]
 		line2node := make([][]*uast.Node, oldDiff.NewLinesOfCode)
-		visitEachNode(uastChange.After, func(node *uast.Node) {
+		VisitEachNode(uastChange.After, func(node *uast.Node) {
 			if node.StartPosition != nil && node.EndPosition != nil {
 				for l := node.StartPosition.Line; l <= node.EndPosition.Line; l++ {
 					nodes := line2node[l-1] // line starts with 1
@@ -135,7 +135,7 @@ func (ref *FileDiffRefiner) Consume(deps map[string]interface{}) (map[string]int
 }
 
 // Depth first tree traversal.
-func visitEachNode(root *uast.Node, payload func(*uast.Node)) {
+func VisitEachNode(root *uast.Node, payload func(*uast.Node)) {
 	queue := []*uast.Node{}
 	queue = append(queue, root)
 	for len(queue) > 0 {

+ 12 - 0
pb/pb.proto

@@ -85,6 +85,18 @@ message UASTChangesSaverResults {
     repeated UASTChange changes = 1;
 }
 
+message ShotnessRecord {
+    string internal_role = 1;
+    repeated int32 roles = 2;
+    string name = 3;
+    string file = 4;
+    map<int32, int32> counters = 5;
+}
+
+message ShotnessAnalysisResultMessage {
+    repeated ShotnessRecord records = 1;
+}
+
 message AnalysisResults {
     Metadata header = 1;
     // the mapped values are dynamic messages which require the second parsing pass.

+ 6 - 0
pipeline.go

@@ -341,6 +341,12 @@ func (pipeline *Pipeline) SetFeaturesFromFlags() {
 }
 
 func (pipeline *Pipeline) DeployItem(item PipelineItem) PipelineItem {
+	fpi, ok := item.(FeaturedPipelineItem)
+	if ok {
+		for _, f := range fpi.Features() {
+			pipeline.SetFeature(f)
+		}
+	}
 	queue := []PipelineItem{}
 	queue = append(queue, item)
 	added := map[string]PipelineItem{}

+ 7 - 0
pipeline_test.go

@@ -320,6 +320,13 @@ func TestPipelineDeps(t *testing.T) {
 	assert.Panics(t, func() { pipeline.Run(commits) })
 }
 
+func TestPipelineDeployFeatures(t *testing.T) {
+	pipeline := NewPipeline(testRepository)
+	pipeline.DeployItem(&testPipelineItem{})
+	f, _ := pipeline.GetFeature("power")
+	assert.True(t, f)
+}
+
 func TestPipelineError(t *testing.T) {
 	pipeline := NewPipeline(testRepository)
 	item := &testPipelineItem{}

+ 425 - 0
shotness.go

@@ -0,0 +1,425 @@
+package hercules
+
+import (
+	"fmt"
+	"io"
+	"os"
+	"sort"
+	"unicode/utf8"
+
+	"github.com/gogo/protobuf/proto"
+	"github.com/sergi/go-diff/diffmatchpatch"
+	"gopkg.in/bblfsh/client-go.v2/tools"
+	"gopkg.in/bblfsh/sdk.v1/uast"
+	"gopkg.in/src-d/go-git.v4"
+	"gopkg.in/src-d/go-git.v4/plumbing/object"
+	"gopkg.in/src-d/hercules.v3/pb"
+)
+
+// ShotnessAnalysis contains the intermediate state which is mutated by Consume(). It should implement
+// LeafPipelineItem.
+type ShotnessAnalysis struct {
+	XpathStruct string
+	XpathName   string
+
+	nodes map[string]*nodeShotness
+	files map[string]map[string]*nodeShotness
+}
+
+const (
+	ConfigShotnessXpathStruct = "Shotness.XpathStruct"
+	ConfigShotnessXpathName   = "Shotness.XpathName"
+
+	DefaultShotnessXpathStruct = "//*[@roleFunction and @roleDeclaration]"
+	DefaultShotnessXpathName   = "/*[@roleFunction and @roleIdentifier and @roleName] | /*/*[@roleFunction and @roleIdentifier and @roleName]"
+)
+
+type nodeShotness struct {
+	Count   int
+	Summary NodeSummary
+	Couples map[string]int
+}
+
+type NodeSummary struct {
+	InternalRole string
+	Roles        []uast.Role
+	Name         string
+	File         string
+}
+
+// ShotnessAnalysisResult is returned by Finalize() and represents the analysis result.
+type ShotnessAnalysisResult struct {
+	Nodes    []NodeSummary
+	Counters []map[int]int
+}
+
+func (node NodeSummary) String() string {
+	return node.InternalRole + "_" + node.Name + "_" + node.File
+}
+
+func (shotness *ShotnessAnalysis) Name() string {
+	return "Shotness"
+}
+
+func (shotness *ShotnessAnalysis) Provides() []string {
+	return []string{}
+}
+
+func (ref *ShotnessAnalysis) Features() []string {
+	arr := [...]string{FeatureUast}
+	return arr[:]
+}
+
+func (shotness *ShotnessAnalysis) Requires() []string {
+	arr := [...]string{DependencyFileDiff, DependencyUastChanges}
+	return arr[:]
+}
+
+// ListConfigurationOptions tells the engine which parameters can be changed through the command
+// line.
+func (shotness *ShotnessAnalysis) ListConfigurationOptions() []ConfigurationOption {
+	opts := [...]ConfigurationOption{{
+		Name:        ConfigShotnessXpathStruct,
+		Description: "UAST XPath query to use for filtering the nodes.",
+		Flag:        "shotness-xpath-struct",
+		Type:        StringConfigurationOption,
+		Default:     DefaultShotnessXpathStruct}, {
+		Name:        ConfigShotnessXpathName,
+		Description: "UAST XPath query to determine the names of the filtered nodes.",
+		Flag:        "shotness-xpath-name",
+		Type:        StringConfigurationOption,
+		Default:     DefaultShotnessXpathName},
+	}
+	return opts[:]
+}
+
+// Flag returns the command line switch which activates the analysis.
+func (shotness *ShotnessAnalysis) Flag() string {
+	return "shotness"
+}
+
+// Configure applies the parameters specified in the command line.
+func (shotness *ShotnessAnalysis) Configure(facts map[string]interface{}) {
+	if val, exists := facts[ConfigShotnessXpathStruct]; exists {
+		shotness.XpathStruct = val.(string)
+	} else {
+		shotness.XpathStruct = DefaultShotnessXpathStruct
+	}
+	if val, exists := facts[ConfigShotnessXpathName]; exists {
+		shotness.XpathName = val.(string)
+	} else {
+		shotness.XpathName = DefaultShotnessXpathName
+	}
+}
+
+// Initialize resets the internal temporary data structures and prepares the object for Consume().
+func (shotness *ShotnessAnalysis) Initialize(repository *git.Repository) {
+	shotness.nodes = map[string]*nodeShotness{}
+	shotness.files = map[string]map[string]*nodeShotness{}
+}
+
+// Consume is called for every commit in the sequence.
+func (shotness *ShotnessAnalysis) Consume(deps map[string]interface{}) (map[string]interface{}, error) {
+	commit := deps["commit"].(*object.Commit)
+	changesList := deps[DependencyUastChanges].([]UASTChange)
+	diffs := deps[DependencyFileDiff].(map[string]FileDiffData)
+	allNodes := map[string]bool{}
+
+	addNode := func(name string, node *uast.Node, fileName string) {
+		nodeSummary := NodeSummary{
+			InternalRole: node.InternalType,
+			Roles:        node.Roles,
+			Name:         name,
+			File:         fileName,
+		}
+		key := nodeSummary.String()
+		exists := allNodes[key]
+		allNodes[key] = true
+		var count int
+		if ns := shotness.nodes[key]; ns != nil {
+			count = ns.Count
+		}
+		if count == 0 {
+			shotness.nodes[key] = &nodeShotness{
+				Summary: nodeSummary, Count: 1, Couples: map[string]int{}}
+			fmap := shotness.files[nodeSummary.File]
+			if fmap == nil {
+				fmap = map[string]*nodeShotness{}
+			}
+			fmap[key] = shotness.nodes[key]
+			shotness.files[nodeSummary.File] = fmap
+		} else if !exists { // in case there are removals and additions in the same node
+			shotness.nodes[key].Count = count + 1
+		}
+	}
+
+	for _, change := range changesList {
+		if change.After == nil {
+			for key, summary := range shotness.files[change.Change.From.Name] {
+				for subkey := range summary.Couples {
+					delete(shotness.nodes[subkey].Couples, key)
+				}
+			}
+			for key := range shotness.files[change.Change.From.Name] {
+				delete(shotness.nodes, key)
+			}
+			delete(shotness.files, change.Change.From.Name)
+			continue
+		}
+		toName := change.Change.To.Name
+		if change.Before == nil {
+			nodes, err := shotness.extractNodes(change.After)
+			if err != nil {
+				fmt.Fprintf(os.Stderr, "Shotness: commit %s file %s failed to filter UAST: %s\n",
+					commit.Hash.String(), toName, err.Error())
+				continue
+			}
+			for name, node := range nodes {
+				addNode(name, node, toName)
+			}
+			continue
+		}
+		// Before -> After
+		if change.Change.From.Name != toName {
+			// renamed
+			oldFile := shotness.files[change.Change.From.Name]
+			newFile := map[string]*nodeShotness{}
+			shotness.files[toName] = newFile
+			for oldKey, ns := range oldFile {
+				ns.Summary.File = toName
+				newKey := ns.Summary.String()
+				newFile[newKey] = ns
+				shotness.nodes[newKey] = ns
+				for coupleKey, count := range ns.Couples {
+					coupleCouples := shotness.nodes[coupleKey].Couples
+					delete(coupleCouples, oldKey)
+					coupleCouples[newKey] = count
+				}
+			}
+			// deferred cleanup is needed
+			for key := range oldFile {
+				delete(shotness.nodes, key)
+			}
+			delete(shotness.files, change.Change.From.Name)
+		}
+		// pass through old UAST
+		// pass through new UAST
+		nodesBefore, err := shotness.extractNodes(change.Before)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "Shotness: commit ^%s file %s failed to filter UAST: %s\n",
+				commit.Hash.String(), change.Change.From.Name, err.Error())
+			continue
+		}
+		reversedNodesBefore := reverseNodeMap(nodesBefore)
+		nodesAfter, err := shotness.extractNodes(change.After)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "Shotness: commit %s file %s failed to filter UAST: %s\n",
+				commit.Hash.String(), toName, err.Error())
+			continue
+		}
+		reversedNodesAfter := reverseNodeMap(nodesAfter)
+		genLine2Node := func(nodes map[string]*uast.Node, linesNum int) [][]*uast.Node {
+			res := make([][]*uast.Node, linesNum)
+			for _, node := range nodes {
+				if node.StartPosition != nil && node.EndPosition != nil {
+					for l := node.StartPosition.Line; l <= node.EndPosition.Line; l++ {
+						lineNodes := res[l-1]
+						if lineNodes == nil {
+							lineNodes = []*uast.Node{}
+						}
+						lineNodes = append(lineNodes, node)
+						res[l-1] = lineNodes
+					}
+				}
+			}
+			return res
+		}
+		diff := diffs[toName]
+		line2nodeBefore := genLine2Node(nodesBefore, diff.OldLinesOfCode)
+		line2nodeAfter := genLine2Node(nodesAfter, diff.NewLinesOfCode)
+		// Scan through all the edits. Given the line numbers, get the list of active nodes
+		// and add them.
+		var lineNumBefore, lineNumAfter int
+		for _, edit := range diff.Diffs {
+			size := utf8.RuneCountInString(edit.Text)
+			switch edit.Type {
+			case diffmatchpatch.DiffDelete:
+				for l := lineNumBefore; l < lineNumBefore+size; l++ {
+					nodes := line2nodeBefore[l]
+					for _, node := range nodes {
+						// toName because we handled a possible rename before
+						addNode(reversedNodesBefore[node], node, toName)
+					}
+				}
+				lineNumBefore += size
+			case diffmatchpatch.DiffInsert:
+				for l := lineNumAfter; l < lineNumAfter+size; l++ {
+					nodes := line2nodeAfter[l]
+					for _, node := range nodes {
+						addNode(reversedNodesAfter[node], node, toName)
+					}
+				}
+				lineNumAfter += size
+			case diffmatchpatch.DiffEqual:
+				lineNumBefore += size
+				lineNumAfter += size
+			}
+		}
+	}
+	for keyi := range allNodes {
+		for keyj := range allNodes {
+			if keyi == keyj {
+				continue
+			}
+			shotness.nodes[keyi].Couples[keyj]++
+		}
+	}
+	return nil, nil
+}
+
+// Finalize produces the result of the analysis. No more Consume() calls are expected afterwards.
+func (shotness *ShotnessAnalysis) Finalize() interface{} {
+	result := ShotnessAnalysisResult{
+		Nodes:    make([]NodeSummary, len(shotness.nodes)),
+		Counters: make([]map[int]int, len(shotness.nodes)),
+	}
+	keys := make([]string, len(shotness.nodes))
+	i := 0
+	for key := range shotness.nodes {
+		keys[i] = key
+		i++
+	}
+	sort.Strings(keys)
+	reverseKeys := map[string]int{}
+	for i, key := range keys {
+		reverseKeys[key] = i
+	}
+	for i, key := range keys {
+		node := shotness.nodes[key]
+		result.Nodes[i] = node.Summary
+		counter := map[int]int{}
+		result.Counters[i] = counter
+		counter[i] = node.Count
+		for ck, val := range node.Couples {
+			counter[reverseKeys[ck]] = val
+		}
+	}
+	return result
+}
+
+// Serialize converts the result from Finalize() to either Protocol Buffers or YAML.
+func (shotness *ShotnessAnalysis) Serialize(result interface{}, binary bool, writer io.Writer) error {
+	shotnessResult := result.(ShotnessAnalysisResult)
+	if binary {
+		return shotness.serializeBinary(&shotnessResult, writer)
+	}
+	shotness.serializeText(&shotnessResult, writer)
+	return nil
+}
+
+func (shotness *ShotnessAnalysis) serializeText(result *ShotnessAnalysisResult, writer io.Writer) {
+	for i, summary := range result.Nodes {
+		fmt.Fprintf(writer, "  - name: %s\n    file: %s\n    ir: %s\n    roles: [",
+			summary.Name, summary.File, summary.InternalRole)
+		for j, r := range summary.Roles {
+			if j < len(summary.Roles)-1 {
+				fmt.Fprintf(writer, "%d,", r)
+			} else {
+				fmt.Fprintf(writer, "%d]\n    counters: {", r)
+			}
+		}
+		keys := make([]int, len(result.Counters[i]))
+		j := 0
+		for key := range result.Counters[i] {
+			keys[j] = key
+			j++
+		}
+		sort.Ints(keys)
+		j = 0
+		for _, key := range keys {
+			val := result.Counters[i][key]
+			if j < len(result.Counters[i])-1 {
+				fmt.Fprintf(writer, "%d:%d,", key, val)
+			} else {
+				fmt.Fprintf(writer, "%d:%d}\n", key, val)
+			}
+			j++
+		}
+	}
+}
+
+func (shotness *ShotnessAnalysis) serializeBinary(result *ShotnessAnalysisResult, writer io.Writer) error {
+	message := pb.ShotnessAnalysisResultMessage{
+		Records: make([]*pb.ShotnessRecord, len(result.Nodes)),
+	}
+	for i, summary := range result.Nodes {
+		record := &pb.ShotnessRecord{
+			Name:         summary.Name,
+			File:         summary.File,
+			InternalRole: summary.InternalRole,
+			Roles:        make([]int32, len(summary.Roles)),
+			Counters:     map[int32]int32{},
+		}
+		for j, r := range summary.Roles {
+			record.Roles[j] = int32(r)
+		}
+		for key, val := range result.Counters[i] {
+			record.Counters[int32(key)] = int32(val)
+		}
+		message.Records[i] = record
+	}
+	serialized, err := proto.Marshal(&message)
+	if err != nil {
+		return err
+	}
+	writer.Write(serialized)
+	return nil
+}
+
+func (shotness *ShotnessAnalysis) extractNodes(root *uast.Node) (map[string]*uast.Node, error) {
+	structs, err := tools.Filter(root, shotness.XpathStruct)
+	if err != nil {
+		return nil, err
+	}
+	// some structs may be inside other structs; we pick the outermost
+	// otherwise due to UAST quirks there may be false positives
+	internal := map[*uast.Node]bool{}
+	for _, mainNode := range structs {
+		subs, err := tools.Filter(mainNode, shotness.XpathStruct)
+		if err != nil {
+			return nil, err
+		}
+		for _, sub := range subs {
+			if sub != mainNode {
+				internal[sub] = true
+			}
+		}
+	}
+	res := map[string]*uast.Node{}
+	for _, node := range structs {
+		if internal[node] {
+			continue
+		}
+		nodeNames, err := tools.Filter(node, shotness.XpathName)
+		if err != nil {
+			return nil, err
+		}
+		if len(nodeNames) == 0 {
+			continue
+		}
+		res[nodeNames[0].Token] = node
+	}
+	return res, nil
+}
+
+func reverseNodeMap(nodes map[string]*uast.Node) map[*uast.Node]string {
+	res := map[*uast.Node]string{}
+	for key, node := range nodes {
+		res[node] = key
+	}
+	return res
+}
+
+func init() {
+	Registry.Register(&ShotnessAnalysis{})
+}

+ 303 - 0
shotness_test.go

@@ -0,0 +1,303 @@
+package hercules
+
+import (
+	"io/ioutil"
+	"path"
+	"testing"
+
+	"bytes"
+	"github.com/gogo/protobuf/proto"
+	"github.com/sergi/go-diff/diffmatchpatch"
+	"github.com/stretchr/testify/assert"
+	"gopkg.in/bblfsh/sdk.v1/uast"
+	"gopkg.in/src-d/go-git.v4/plumbing/object"
+	"gopkg.in/src-d/hercules.v3/pb"
+)
+
+func fixtureShotness() *ShotnessAnalysis {
+	sh := &ShotnessAnalysis{}
+	sh.Initialize(testRepository)
+	sh.Configure(nil)
+	return sh
+}
+
+func TestShotnessMeta(t *testing.T) {
+	sh := &ShotnessAnalysis{}
+	sh.Initialize(testRepository)
+	assert.NotNil(t, sh.nodes)
+	assert.NotNil(t, sh.files)
+	assert.Equal(t, sh.Name(), "Shotness")
+	assert.Len(t, sh.Provides(), 0)
+	assert.Equal(t, len(sh.Requires()), 2)
+	assert.Equal(t, sh.Requires()[0], DependencyFileDiff)
+	assert.Equal(t, sh.Requires()[1], DependencyUastChanges)
+	assert.Len(t, sh.ListConfigurationOptions(), 2)
+	assert.Equal(t, sh.ListConfigurationOptions()[0].Name, ConfigShotnessXpathStruct)
+	assert.Equal(t, sh.ListConfigurationOptions()[1].Name, ConfigShotnessXpathName)
+	sh.Configure(nil)
+	assert.Equal(t, sh.XpathStruct, DefaultShotnessXpathStruct)
+	assert.Equal(t, sh.XpathName, DefaultShotnessXpathName)
+	facts := map[string]interface{}{}
+	facts[ConfigShotnessXpathStruct] = "xpath!"
+	facts[ConfigShotnessXpathName] = "another!"
+	sh.Configure(facts)
+	assert.Equal(t, sh.XpathStruct, "xpath!")
+	assert.Equal(t, sh.XpathName, "another!")
+	features := sh.Features()
+	assert.Len(t, features, 1)
+	assert.Equal(t, features[0], FeatureUast)
+}
+
+func TestShotnessRegistration(t *testing.T) {
+	tp, exists := Registry.registered[(&ShotnessAnalysis{}).Name()]
+	assert.True(t, exists)
+	assert.Equal(t, tp.Elem().Name(), "ShotnessAnalysis")
+	tp, exists = Registry.flags[(&ShotnessAnalysis{}).Flag()]
+	assert.True(t, exists)
+	assert.Equal(t, tp.Elem().Name(), "ShotnessAnalysis")
+}
+
+func bakeShotness(t *testing.T) (*ShotnessAnalysis, ShotnessAnalysisResult) {
+	sh := fixtureShotness()
+	bytes1, err := ioutil.ReadFile(path.Join("test_data", "1.java"))
+	assert.Nil(t, err)
+	bytes2, err := ioutil.ReadFile(path.Join("test_data", "2.java"))
+	assert.Nil(t, err)
+	dmp := diffmatchpatch.New()
+	src, dst, _ := dmp.DiffLinesToRunes(string(bytes1), string(bytes2))
+	state := map[string]interface{}{}
+	state["commit"] = &object.Commit{}
+	fileDiffs := map[string]FileDiffData{}
+	const fileName = "test.java"
+	fileDiffs[fileName] = FileDiffData{
+		OldLinesOfCode: len(src),
+		NewLinesOfCode: len(dst),
+		Diffs:          dmp.DiffMainRunes(src, dst, false),
+	}
+	state[DependencyFileDiff] = fileDiffs
+	uastChanges := make([]UASTChange, 1)
+	loadUast := func(name string) *uast.Node {
+		bytes, err := ioutil.ReadFile(path.Join("test_data", name))
+		assert.Nil(t, err)
+		node := uast.Node{}
+		proto.Unmarshal(bytes, &node)
+		return &node
+	}
+	state[DependencyUastChanges] = uastChanges
+	uastChanges[0] = UASTChange{
+		Change: &object.Change{
+			From: object.ChangeEntry{},
+			To:   object.ChangeEntry{Name: fileName}},
+		Before: nil, After: loadUast("uast1.pb"),
+	}
+	iresult, err := sh.Consume(state)
+	assert.Nil(t, err)
+	assert.Nil(t, iresult)
+	uastChanges[0] = UASTChange{
+		Change: &object.Change{
+			From: object.ChangeEntry{Name: fileName},
+			To:   object.ChangeEntry{Name: fileName}},
+		Before: loadUast("uast1.pb"), After: loadUast("uast2.pb"),
+	}
+	iresult, err = sh.Consume(state)
+	assert.Nil(t, err)
+	assert.Nil(t, iresult)
+	return sh, sh.Finalize().(ShotnessAnalysisResult)
+}
+
+func TestShotnessConsume(t *testing.T) {
+	sh := fixtureShotness()
+	bytes1, err := ioutil.ReadFile(path.Join("test_data", "1.java"))
+	assert.Nil(t, err)
+	bytes2, err := ioutil.ReadFile(path.Join("test_data", "2.java"))
+	assert.Nil(t, err)
+	dmp := diffmatchpatch.New()
+	src, dst, _ := dmp.DiffLinesToRunes(string(bytes1), string(bytes2))
+	state := map[string]interface{}{}
+	state["commit"] = &object.Commit{}
+	fileDiffs := map[string]FileDiffData{}
+	const fileName = "test.java"
+	const newfileName = "new.java"
+	fileDiffs[fileName] = FileDiffData{
+		OldLinesOfCode: len(src),
+		NewLinesOfCode: len(dst),
+		Diffs:          dmp.DiffMainRunes(src, dst, false),
+	}
+	state[DependencyFileDiff] = fileDiffs
+	uastChanges := make([]UASTChange, 1)
+	loadUast := func(name string) *uast.Node {
+		bytes, err := ioutil.ReadFile(path.Join("test_data", name))
+		assert.Nil(t, err)
+		node := uast.Node{}
+		proto.Unmarshal(bytes, &node)
+		return &node
+	}
+	state[DependencyUastChanges] = uastChanges
+	uastChanges[0] = UASTChange{
+		Change: &object.Change{
+			From: object.ChangeEntry{},
+			To:   object.ChangeEntry{Name: fileName}},
+		Before: nil, After: loadUast("uast1.pb"),
+	}
+	iresult, err := sh.Consume(state)
+	assert.Nil(t, err)
+	assert.Nil(t, iresult)
+	uastChanges[0] = UASTChange{
+		Change: &object.Change{
+			From: object.ChangeEntry{Name: fileName},
+			To:   object.ChangeEntry{Name: newfileName}},
+		Before: loadUast("uast1.pb"), After: loadUast("uast2.pb"),
+	}
+	fileDiffs[newfileName] = fileDiffs[fileName]
+	delete(fileDiffs, fileName)
+	iresult, err = sh.Consume(state)
+	assert.Nil(t, err)
+	assert.Nil(t, iresult)
+	assert.Len(t, sh.nodes, 18)
+	assert.Len(t, sh.files, 1)
+	assert.Len(t, sh.files["new.java"], 18)
+	for _, node := range sh.nodes {
+		assert.Equal(t, node.Summary.InternalRole, "MethodDeclaration")
+		if node.Summary.Name != "testUnpackEntryFromFile" {
+			assert.Equal(t, node.Count, 1)
+			if node.Summary.Name != "testUnpackEntryFromStreamToFile" {
+				assert.Len(t, node.Couples, 16)
+			} else {
+				assert.Len(t, node.Couples, 1)
+			}
+		} else {
+			assert.Equal(t, node.Count, 2)
+			assert.Len(t, node.Couples, 17)
+		}
+	}
+	result := sh.Finalize().(ShotnessAnalysisResult)
+	assert.Len(t, result.Nodes, 18)
+	assert.Len(t, result.Counters, 18)
+	assert.Equal(t, result.Nodes[14].String(),
+		"MethodDeclaration_testUnpackEntryFromStreamToFile_"+newfileName)
+	assert.Equal(t, result.Counters[14], map[int]int{14: 1, 13: 1})
+	assert.Equal(t, result.Nodes[15].String(),
+		"MethodDeclaration_testUnpackEntryFromStream_"+newfileName)
+	assert.Equal(t, result.Counters[15], map[int]int{
+		8: 1, 0: 1, 5: 1, 6: 1, 11: 1, 1: 1, 13: 1, 17: 1, 3: 1, 15: 1, 9: 1, 4: 1, 7: 1, 16: 1, 2: 1, 12: 1, 10: 1})
+	uastChanges[0] = UASTChange{
+		Change: &object.Change{
+			From: object.ChangeEntry{Name: newfileName},
+			To:   object.ChangeEntry{}},
+		Before: loadUast("uast2.pb"), After: nil,
+	}
+	iresult, err = sh.Consume(state)
+	assert.Nil(t, err)
+	assert.Nil(t, iresult)
+	assert.Len(t, sh.nodes, 0)
+	assert.Len(t, sh.files, 0)
+}
+
+func TestShotnessSerializeText(t *testing.T) {
+	sh, result := bakeShotness(t)
+	buffer := &bytes.Buffer{}
+	sh.Serialize(result, false, buffer)
+	assert.Equal(t, buffer.String(), `  - name: testAddEntry
+    file: test.java
+    ir: MethodDeclaration
+    roles: [111,59,41,45]
+    counters: {0:1,1:1,2:1,3:1,4:1,5:1,6:1,7:1,8:1,9:1,10:1,11:1,12:1,13:1,15:1,16:1,17:1}
+  - name: testArchiveEquals
+    file: test.java
+    ir: MethodDeclaration
+    roles: [111,59,41,45]
+    counters: {0:1,1:1,2:1,3:1,4:1,5:1,6:1,7:1,8:1,9:1,10:1,11:1,12:1,13:1,15:1,16:1,17:1}
+  - name: testContainsAnyEntry
+    file: test.java
+    ir: MethodDeclaration
+    roles: [111,59,41,45]
+    counters: {0:1,1:1,2:1,3:1,4:1,5:1,6:1,7:1,8:1,9:1,10:1,11:1,12:1,13:1,15:1,16:1,17:1}
+  - name: testDuplicateEntryAtAddOrReplace
+    file: test.java
+    ir: MethodDeclaration
+    roles: [111,59,41,45]
+    counters: {0:1,1:1,2:1,3:1,4:1,5:1,6:1,7:1,8:1,9:1,10:1,11:1,12:1,13:1,15:1,16:1,17:1}
+  - name: testDuplicateEntryAtAdd
+    file: test.java
+    ir: MethodDeclaration
+    roles: [111,59,41,45]
+    counters: {0:1,1:1,2:1,3:1,4:1,5:1,6:1,7:1,8:1,9:1,10:1,11:1,12:1,13:1,15:1,16:1,17:1}
+  - name: testDuplicateEntryAtReplace
+    file: test.java
+    ir: MethodDeclaration
+    roles: [111,59,41,45]
+    counters: {0:1,1:1,2:1,3:1,4:1,5:1,6:1,7:1,8:1,9:1,10:1,11:1,12:1,13:1,15:1,16:1,17:1}
+  - name: testPackEntries
+    file: test.java
+    ir: MethodDeclaration
+    roles: [111,59,41,45]
+    counters: {0:1,1:1,2:1,3:1,4:1,5:1,6:1,7:1,8:1,9:1,10:1,11:1,12:1,13:1,15:1,16:1,17:1}
+  - name: testPackEntry
+    file: test.java
+    ir: MethodDeclaration
+    roles: [111,59,41,45]
+    counters: {0:1,1:1,2:1,3:1,4:1,5:1,6:1,7:1,8:1,9:1,10:1,11:1,12:1,13:1,15:1,16:1,17:1}
+  - name: testPreserveRoot
+    file: test.java
+    ir: MethodDeclaration
+    roles: [111,59,41,45]
+    counters: {0:1,1:1,2:1,3:1,4:1,5:1,6:1,7:1,8:1,9:1,10:1,11:1,12:1,13:1,15:1,16:1,17:1}
+  - name: testRemoveDirs
+    file: test.java
+    ir: MethodDeclaration
+    roles: [111,59,41,45]
+    counters: {0:1,1:1,2:1,3:1,4:1,5:1,6:1,7:1,8:1,9:1,10:1,11:1,12:1,13:1,15:1,16:1,17:1}
+  - name: testRemoveEntry
+    file: test.java
+    ir: MethodDeclaration
+    roles: [111,59,41,45]
+    counters: {0:1,1:1,2:1,3:1,4:1,5:1,6:1,7:1,8:1,9:1,10:1,11:1,12:1,13:1,15:1,16:1,17:1}
+  - name: testRepackArchive
+    file: test.java
+    ir: MethodDeclaration
+    roles: [111,59,41,45]
+    counters: {0:1,1:1,2:1,3:1,4:1,5:1,6:1,7:1,8:1,9:1,10:1,11:1,12:1,13:1,15:1,16:1,17:1}
+  - name: testUnexplode
+    file: test.java
+    ir: MethodDeclaration
+    roles: [111,59,41,45]
+    counters: {0:1,1:1,2:1,3:1,4:1,5:1,6:1,7:1,8:1,9:1,10:1,11:1,12:1,13:1,15:1,16:1,17:1}
+  - name: testUnpackEntryFromFile
+    file: test.java
+    ir: MethodDeclaration
+    roles: [111,59,41,45]
+    counters: {0:1,1:1,2:1,3:1,4:1,5:1,6:1,7:1,8:1,9:1,10:1,11:1,12:1,13:2,14:1,15:1,16:1,17:1}
+  - name: testUnpackEntryFromStreamToFile
+    file: test.java
+    ir: MethodDeclaration
+    roles: [111,59,41,45]
+    counters: {13:1,14:1}
+  - name: testUnpackEntryFromStream
+    file: test.java
+    ir: MethodDeclaration
+    roles: [111,59,41,45]
+    counters: {0:1,1:1,2:1,3:1,4:1,5:1,6:1,7:1,8:1,9:1,10:1,11:1,12:1,13:1,15:1,16:1,17:1}
+  - name: testZipException
+    file: test.java
+    ir: MethodDeclaration
+    roles: [111,59,41,45]
+    counters: {0:1,1:1,2:1,3:1,4:1,5:1,6:1,7:1,8:1,9:1,10:1,11:1,12:1,13:1,15:1,16:1,17:1}
+  - name: unexplodeWithException
+    file: test.java
+    ir: MethodDeclaration
+    roles: [111,100,41,45]
+    counters: {0:1,1:1,2:1,3:1,4:1,5:1,6:1,7:1,8:1,9:1,10:1,11:1,12:1,13:1,15:1,16:1,17:1}
+`)
+}
+
+func TestShotnessSerializeBinary(t *testing.T) {
+	sh, result := bakeShotness(t)
+	buffer := &bytes.Buffer{}
+	sh.Serialize(result, true, buffer)
+	message := pb.ShotnessAnalysisResultMessage{}
+	err := proto.Unmarshal(buffer.Bytes(), &message)
+	assert.Nil(t, err)
+	assert.Len(t, message.Records, 18)
+	assert.Equal(t, message.Records[14].Name, "testUnpackEntryFromStreamToFile")
+	assert.Equal(t, message.Records[14].Counters, map[int32]int32{14: 1, 13: 1})
+}