comment_sentiment_test.go 196 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844
  1. // +build tensorflow
  2. package leaves
  3. import (
  4. "bytes"
  5. "log"
  6. "strings"
  7. "testing"
  8. "github.com/gogo/protobuf/proto"
  9. "github.com/stretchr/testify/assert"
  10. bblfsh "gopkg.in/bblfsh/client-go.v3"
  11. "gopkg.in/bblfsh/client-go.v3/tools"
  12. "gopkg.in/bblfsh/sdk.v2/uast"
  13. "gopkg.in/bblfsh/sdk.v2/uast/nodes"
  14. "gopkg.in/bblfsh/sdk.v2/uast/query"
  15. "gopkg.in/src-d/go-git.v4/plumbing"
  16. "gopkg.in/src-d/hercules.v10/internal/core"
  17. "gopkg.in/src-d/hercules.v10/internal/pb"
  18. items "gopkg.in/src-d/hercules.v10/internal/plumbing"
  19. uast_items "gopkg.in/src-d/hercules.v10/internal/plumbing/uast"
  20. uast_test "gopkg.in/src-d/hercules.v10/internal/plumbing/uast/test"
  21. "gopkg.in/src-d/hercules.v10/internal/test"
  22. )
  23. func fixtureCommentSentiment() *CommentSentimentAnalysis {
  24. sent := &CommentSentimentAnalysis{
  25. Gap: DefaultCommentSentimentGap,
  26. MinCommentLength: DefaultCommentSentimentCommentMinLength,
  27. }
  28. facts := map[string]interface{}{
  29. items.FactCommitsByTick: map[int][]plumbing.Hash{},
  30. }
  31. sent.Configure(facts)
  32. sent.Initialize(test.Repository)
  33. return sent
  34. }
  35. func TestCommentSentimentMeta(t *testing.T) {
  36. sent := CommentSentimentAnalysis{}
  37. assert.Equal(t, sent.Name(), "Sentiment")
  38. assert.Equal(t, len(sent.Provides()), 0)
  39. required := [...]string{uast_items.DependencyUastChanges, items.DependencyTick}
  40. for _, name := range required {
  41. assert.Contains(t, sent.Requires(), name)
  42. }
  43. opts := sent.ListConfigurationOptions()
  44. matches := 0
  45. for _, opt := range opts {
  46. switch opt.Name {
  47. case ConfigCommentSentimentMinLength, ConfigCommentSentimentGap:
  48. matches++
  49. }
  50. }
  51. assert.Len(t, opts, matches)
  52. assert.Equal(t, sent.Flag(), "sentiment")
  53. logger := core.NewLogger()
  54. assert.NoError(t, sent.Configure(map[string]interface{}{
  55. core.ConfigLogger: logger,
  56. }))
  57. assert.Equal(t, logger, sent.l)
  58. }
  59. func TestCommentSentimentConfigure(t *testing.T) {
  60. sent := CommentSentimentAnalysis{}
  61. facts := map[string]interface{}{}
  62. facts[ConfigCommentSentimentMinLength] = 77
  63. facts[ConfigCommentSentimentGap] = float32(0.77)
  64. facts[items.FactCommitsByTick] = map[int][]plumbing.Hash{}
  65. sent.Configure(facts)
  66. assert.Equal(t, sent.Gap, float32(0.77))
  67. assert.Equal(t, sent.MinCommentLength, 77)
  68. facts[ConfigCommentSentimentMinLength] = -10
  69. facts[ConfigCommentSentimentGap] = float32(2)
  70. sent.Configure(facts)
  71. assert.Equal(t, sent.Gap, DefaultCommentSentimentGap)
  72. assert.Equal(t, sent.MinCommentLength, DefaultCommentSentimentCommentMinLength)
  73. }
  74. func TestCommentSentimentRegistration(t *testing.T) {
  75. summoned := core.Registry.Summon((&CommentSentimentAnalysis{}).Name())
  76. assert.Len(t, summoned, 1)
  77. assert.Equal(t, summoned[0].Name(), "Sentiment")
  78. leaves := core.Registry.GetLeaves()
  79. matched := false
  80. for _, tp := range leaves {
  81. if tp.Flag() == (&CommentSentimentAnalysis{}).Flag() {
  82. matched = true
  83. break
  84. }
  85. }
  86. assert.True(t, matched)
  87. }
  88. func TestCommentSentimentFork(t *testing.T) {
  89. sent1 := fixtureCommentSentiment()
  90. clones := sent1.Fork(1)
  91. assert.Len(t, clones, 1)
  92. sent2 := clones[0].(*CommentSentimentAnalysis)
  93. assert.True(t, sent1 == sent2)
  94. sent1.Merge([]core.PipelineItem{sent2})
  95. }
  96. func TestCommentSentimentSerializeText(t *testing.T) {
  97. sent := fixtureCommentSentiment()
  98. result := CommentSentimentResult{
  99. EmotionsByTick: map[int]float32{},
  100. CommentsByTick: map[int][]string{},
  101. commitsByTick: map[int][]plumbing.Hash{},
  102. }
  103. result.EmotionsByTick[9] = 0.5
  104. result.CommentsByTick[9] = []string{"test", "hello"}
  105. result.commitsByTick[9] = []plumbing.Hash{plumbing.NewHash("4f7c7a154638a0f2468276c56188d90c9cef0dfc")}
  106. buffer := &bytes.Buffer{}
  107. sent.Serialize(result, false, buffer)
  108. assert.Equal(t, buffer.String(), " 9: [0.5000, [4f7c7a154638a0f2468276c56188d90c9cef0dfc], \"test|hello\"]\n")
  109. }
  110. func TestCommentSentimentSerializeBinary(t *testing.T) {
  111. sent := fixtureCommentSentiment()
  112. result := CommentSentimentResult{
  113. EmotionsByTick: map[int]float32{},
  114. CommentsByTick: map[int][]string{},
  115. commitsByTick: map[int][]plumbing.Hash{},
  116. }
  117. result.EmotionsByTick[9] = 0.5
  118. result.CommentsByTick[9] = []string{"test", "hello"}
  119. result.commitsByTick[9] = []plumbing.Hash{plumbing.NewHash("4f7c7a154638a0f2468276c56188d90c9cef0dfc")}
  120. buffer := &bytes.Buffer{}
  121. sent.Serialize(result, true, buffer)
  122. msg := pb.CommentSentimentResults{}
  123. proto.Unmarshal(buffer.Bytes(), &msg)
  124. assert.Len(t, msg.SentimentByTick, 1)
  125. assert.Equal(t, msg.SentimentByTick[int32(9)].Commits, []string{"4f7c7a154638a0f2468276c56188d90c9cef0dfc"})
  126. assert.Equal(t, msg.SentimentByTick[int32(9)].Comments, []string{"test", "hello"})
  127. assert.Equal(t, msg.SentimentByTick[int32(9)].Value, float32(0.5))
  128. }
  129. func TestCommentSentimentFinalize(t *testing.T) {
  130. sent := fixtureCommentSentiment()
  131. sent.commitsByTick = testSentimentCommits
  132. sent.commentsByTick = testSentimentComments
  133. result := sent.Finalize().(CommentSentimentResult)
  134. for key, vals := range testSentimentComments {
  135. assert.Equal(t, vals, result.CommentsByTick[key])
  136. assert.True(t, result.EmotionsByTick[key] >= 0)
  137. assert.True(t, result.EmotionsByTick[key] <= 1)
  138. }
  139. }
  140. func TestCommentSentimentConsume(t *testing.T) {
  141. sent := fixtureCommentSentiment()
  142. client, err := bblfsh.NewClient("0.0.0.0:9432")
  143. if err != nil {
  144. log.Panicf("Failed to connect to the Babelfish server at 0.0.0.0:9432: %v", err)
  145. }
  146. hash1 := "4f7c7a154638a0f2468276c56188d90c9cef0dfc"
  147. hash2 := "2a7392320b332494a08d5113aabe6d056fef7e9d"
  148. root1 := uast_test.ParseBlobFromTestRepo(hash1, "labours.py", client)
  149. root2 := uast_test.ParseBlobFromTestRepo(hash2, "labours.py", client)
  150. comments, _ := tools.Filter(root2, "//uast:Comment")
  151. for _, c := range query.AllNodes(comments) {
  152. obj := c.(nodes.Object)
  153. t := strings.TrimSpace(string(obj["Text"].(nodes.String)))
  154. if t == "we need to adjust the peak, it may not be less than the decayed value" {
  155. obj["Text"] = nodes.String("license copyright boring")
  156. } else if t == "Tensorflow 1.5 parses sys.argv unconditionally *applause*" {
  157. obj[uast.KeyPos].(nodes.Object)[uast.KeyStart] = nil
  158. }
  159. }
  160. gitChange := test.FakeChangeForName("labours.py", hash1, hash2)
  161. deps := map[string]interface{}{
  162. items.DependencyTick: 0,
  163. uast_items.DependencyUastChanges: []uast_items.Change{
  164. {Before: root1, After: root2, Change: gitChange},
  165. },
  166. }
  167. deps[core.DependencyCommit], _ = test.Repository.CommitObject(plumbing.NewHash(
  168. "d48d50d0b72be95bb8054e3839b8dd79fa970b1c"))
  169. result, err := sent.Consume(deps)
  170. assert.Nil(t, err)
  171. assert.Nil(t, result)
  172. assert.Len(t, sent.commentsByTick, 1)
  173. assert.Len(t, sent.commentsByTick[0], 4)
  174. }
  175. var (
  176. testSentimentCommits = map[int][]plumbing.Hash{
  177. 0: {plumbing.NewHash("1f0bd8cac393d7da2e5845b5ffce5f4a8568006f"), plumbing.NewHash("37a1db225420851cc668600c49697d9a2057f098"), plumbing.NewHash("9b17b682922a762531a7673238b3485323c07a48"), plumbing.NewHash("238d16974fe0856b2e7e5ae13da09f129f6baf0b"), plumbing.NewHash("ea69c1a3bb214d23f96eb77443366dd20554e7a0"), plumbing.NewHash("021d11e6cc00a794a6e6f75d22b5704117c9af48")},
  178. 1: {plumbing.NewHash("b365fe450d9fb86e8c6b8799c1b706312d7b9900"), plumbing.NewHash("2379f276817019564a48bf4a1b13dab304209e50"), plumbing.NewHash("e5844deadccf77aef650b6af263453a2e2d83e09"), plumbing.NewHash("824ceb93354b8d6968173b1f6e15db30930f5707"), plumbing.NewHash("6b8dce4779e3acdaac56c045d8cf9bc062fdd200"), plumbing.NewHash("c589c524d73acb4f2b46a7dabd0e4529d4696048"), plumbing.NewHash("f0deb9093d7edba8ca57b0b4d6bb3241fe9f8b8a")},
  179. 2: {plumbing.NewHash("610cebd2f159502378ccd458093ee7c2fe62bdb3"), plumbing.NewHash("2219704aaa0662dd9a276d6b21e0352999be54df"), plumbing.NewHash("abd01d116b3570aa06999d6bc19cf5fcdfbd33bc")},
  180. 3: {plumbing.NewHash("33d82a418ab7cf0f650481cd8a9347dbc5d96191"), plumbing.NewHash("33530ce7de699fefadeaebd531603cd36e357ce5"), plumbing.NewHash("31c39350e442b0f8298fca8855bfbd40918bfee8"), plumbing.NewHash("3a986bfd79c0dd83c6b0f55ce940821d78cc6363"), plumbing.NewHash("5f7fbc85ead063d36e4ac28ec9806d300124cd17")},
  181. 4: {plumbing.NewHash("a0152cc8e314bb8e1757acbf962e5d669acff20e"), plumbing.NewHash("c57d5cce7903511edd4048f8bfed2ad0dc6f6b6b")},
  182. 6: {plumbing.NewHash("bf43f611a04448d15a9174cbd79804cd5605e4f0"), plumbing.NewHash("6819d38e2980e55d05d6f8cdae76fdd799fe7834"), plumbing.NewHash("4ef999ff2654ff2b981bd7ae78d1a1673a5a6d89")},
  183. 13: {plumbing.NewHash("f35d529f42198f30c81011e5d0c60a37fe5ab7ba"), plumbing.NewHash("25ad4000f9eadde485a293e29f0c081b736251db"), plumbing.NewHash("8b2849bd314fc8f8f42f65fda95916b5f1cddafc"), plumbing.NewHash("f953508bd994b13bc64c1f58df08ed7b6d20c3cf"), plumbing.NewHash("6cd076a14fa163cae38ff7768daf810d851c4885"), plumbing.NewHash("cf1b6ffe7b6ef01e05710504c71b595f5b2b13f3"), plumbing.NewHash("9c5f08442a7e89f435077b16b04994fb9f9322ff"), plumbing.NewHash("4bb07c1afb5048b7bfbe11ed9e9043db0f52ebdc"), plumbing.NewHash("0b5848e2bf44fd04027e019a56bbc27b67229ee5"), plumbing.NewHash("0af6b6c7f5cbad394673bc962dd248f50fd821ff"), plumbing.NewHash("6b3e44484ba10f2a4a3020dd223447cd66a5519f"), plumbing.NewHash("0b96fc2855a835e86b36c5255daab44f57af8624")},
  184. 15: {plumbing.NewHash("d7904013acbe6b9a37d51bf691fa4e1e69c8bbd9"), plumbing.NewHash("8a7119d1ebe81cdaafefe9139a1757572f081b22"), plumbing.NewHash("81bdb74ef34f2cb43997e5c1489cd5cb30d82fba"), plumbing.NewHash("b7a5d4de702b863eaa86d609da5b2c77a8c96240")},
  185. 17: {plumbing.NewHash("e16891e5e1044cabbf058409c0b75d26428afd81"), plumbing.NewHash("9f595fe7f7856ae9ea8a1e8439e1342757f0f3a4")},
  186. 22: {plumbing.NewHash("c224482190b8d4a057f9546753b8331e18a6d953"), plumbing.NewHash("4bb90a5bfd762269adeee772e233a733a6d318a9"), plumbing.NewHash("df4d37054e000cb247f54c852993e6a4757745c1"), plumbing.NewHash("de27afbbe9882c2b84987821ac2810adc7244f8a"), plumbing.NewHash("8f6cb7c93a641cbbef0288072210f72107cf73ba"), plumbing.NewHash("e074a049205829feb52c7d90f2011519327e56f3")},
  187. 37: {plumbing.NewHash("361fd41f12b6f4982db91dc117074a8f5d9a2294"), plumbing.NewHash("426ea15427054705cc6c318f627e271f53c2d10b")},
  188. 39: {plumbing.NewHash("33b7be70180b55cb47308dc735de6601ea52694e"), plumbing.NewHash("6fc8660a5148c1693e2c02874ffdb6a7d8f1a2c8"), plumbing.NewHash("ea92046a19cc4e1446d141a34e260c4a878bd98b"), plumbing.NewHash("938f96b647c538567bf0344e61e05a9a6f53e9ab"), plumbing.NewHash("31ea01865609427c8e940e19a61095a50a44da36"), plumbing.NewHash("122baad7fa39329308d330b3ce0e653bd6d46985"), plumbing.NewHash("e97b873d9ed330710ae3e81efa2015bd1600c809")},
  189. 49: {plumbing.NewHash("e82ab0ca50ec5230718e97d0affb2a8573ca52c9"), plumbing.NewHash("482bf236b32ee650f910e98c528ebd7212a7ba2f")},
  190. 64: {plumbing.NewHash("3d51788efd8de128cbca1f624ada0e66a1a7b203"), plumbing.NewHash("5e49367ee623cb6dc2f9207cb6c24235473f5040"), plumbing.NewHash("b5c1ccab5a98864c73910513d07ad4adaf25dfa7"), plumbing.NewHash("28113b2010a86e9e1fac7f53cb115df3b6883a31"), plumbing.NewHash("39a69b3f528a1a83bb1767135d6e57ba81e6e3be"), plumbing.NewHash("4f9958452c84967eae7d0f2c833bb1b396f3ba48"), plumbing.NewHash("1d61d18b9e7f995ae37081e58711013177ad8406")},
  191. 68: {plumbing.NewHash("188ea6d8e2b06dba5d9223ab69484d4c45d72800"), plumbing.NewHash("8c1fe9193f55fd758af74d3270e5630db630fa2c")},
  192. 72: {plumbing.NewHash("a32b7ef00471333a1496b36ce4fc8d6e75d5957e"), plumbing.NewHash("a96ac71a1034abc709be039c17d09637aeb58c14"), plumbing.NewHash("6892a20a9290c6a19e05ece4a428ed2c11461451"), plumbing.NewHash("a44d5a7dfb3a4de36b113c0d2d059000bf084482"), plumbing.NewHash("9736c77671f402fed95d805572dcc1976443cd20")},
  193. 73: {plumbing.NewHash("ee179f7da155c3cb8129b954db828906886ad4e4"), plumbing.NewHash("cd3a0359fa344653363784b02fc1a375922c3b46")},
  194. 80: {plumbing.NewHash("d2b229df2ea0bab712379c418115bc44508bc6f9"), plumbing.NewHash("872a9faf180e1257e63c577c5f0273d12cd6ee9e")},
  195. 81: {plumbing.NewHash("1857a6af5ed6592552559b15c190708e912be139"), plumbing.NewHash("702e9d3beca8b514a2ec10992e863dcd0b7cd9e5"), plumbing.NewHash("4830b4be274e389a891b20b99b2196c3b5c70fa9")},
  196. 83: {plumbing.NewHash("06f22db69a01bcbacc3425954ab2a38d495fdada"), plumbing.NewHash("e23e86ae7a7f3ed021e5b9f013b66203b41a0931"), plumbing.NewHash("1f224de9b1c23e18995b6e651adde6ca06033cac"), plumbing.NewHash("ccbe381dcdf2d43b2d88d33ba3dcf4680fe116a8"), plumbing.NewHash("0e6fd3d306c6a8dffdbc333433006e0b6b483873")},
  197. 84: {plumbing.NewHash("ac0a9db039850550cd542dc609b00b990f5c60eb"), plumbing.NewHash("fd5b68dbe3e80aec440f578f6a30ca314615b806")},
  198. 91: {plumbing.NewHash("cc750adf60aab3e339269e552e0e6559546c7b7d"), plumbing.NewHash("0d6575c7f9152a7d00b72445db97a5647ec9895e"), plumbing.NewHash("f04fdec9e61eabe9750af00f56c18bdde8972045"), plumbing.NewHash("8ef90a03b37b03a3d4b88b958462fa200be99d7b")},
  199. 93: {plumbing.NewHash("cc9edcf472766257979b28148727ccef1bcf9b41"), plumbing.NewHash("877d44740c6c28657ce72903865cc491e2297008"), plumbing.NewHash("e3d3d622189aa19d077ea2fec46bb9da67d85e9e"), plumbing.NewHash("e8a6ae298d7fdbd5ad4e5fe25c2d5ff82d0178a3"), plumbing.NewHash("ce659e568be435116981663d8743b406340c8e77"), plumbing.NewHash("c7aac3ce39d9cae52fe864a8aeb722b87c8f0dff")},
  200. 95: {plumbing.NewHash("2ab9f0ef616c6d05124bdf9c81eb542d73f8e5b6"), plumbing.NewHash("560cb94519ad5a5eed3adc323a9e1ad3cf0690e9")},
  201. 98: {plumbing.NewHash("f30223096eb2d16a65e0e99cb5c31b1f8dfe0980"), plumbing.NewHash("f1cd436574b28063f6490ab669e1b357b124e6a8"), plumbing.NewHash("66b8f3717569558e69e18a1ea127204885a623d7"), plumbing.NewHash("553a7c0265e241976bfabac929dfd971add9266b"), plumbing.NewHash("be75548ca354fc9113bba16b8c83ae8fd1b031ae"), plumbing.NewHash("dab55518bacc70c2f44ca11787c346b821343316"), plumbing.NewHash("53a05b6e4c1e7ee2c6d13eda9826f5bc9a321391"), plumbing.NewHash("f2c97d817b6f8e886e9bae5975e36f9e77f7ee69"), plumbing.NewHash("b22e547e9882471337bb482c717ea7b63fe46ecc")},
  202. 109: {plumbing.NewHash("22bd7cfab6ef749b033a79c364cd720755d6fbd3"), plumbing.NewHash("2d0e84a85709ef6d59cb7940a930241e6dba8dac"), plumbing.NewHash("94c930e99e8908d2188213672bed050b54ebdb5a")},
  203. 110: {plumbing.NewHash("08b1964b773b35739673bdbe14ee25b9a7520fa0"), plumbing.NewHash("fd1d5908c6b050a64f0eee3571f73a1983bd26d4"), plumbing.NewHash("510068b83ea175ca80cd15a26035d704f3807858"), plumbing.NewHash("036d968ad628ce1047c90b31a41906ac28e1e96c"), plumbing.NewHash("08abc317f2d02ef98689f1f94197baa05b4c0e91"), plumbing.NewHash("43d84368c62aebd3976177de6be486b417e462ad"), plumbing.NewHash("8824f1b469cf48d5d8fa96be5368a4cd82d783ca"), plumbing.NewHash("46e19b95d8346a553183c6a734576bca4b55d950"), plumbing.NewHash("6a4aab453f42bda2368e51bc707c22c40c384b34"), plumbing.NewHash("0e87f4070eda7742da3e17d113ad2488d2e14401")},
  204. 111: {plumbing.NewHash("efbf7a27f18c42a5444f8b96aba20cc226c9249e")},
  205. 116: {plumbing.NewHash("ec8f7f00173599016e0fea01f081f465d91b1813"), plumbing.NewHash("3037183c1b0d850659c68571ea8256108b8fd2eb"), plumbing.NewHash("ed9834c62ab87bddaa50d8be423ef12b63c051f7"), plumbing.NewHash("1ebec1e515bac4c4c4564a410c2de3ee33bb8a1e"), plumbing.NewHash("2e204479addbbeed63302a638fae379c994c420a")},
  206. 118: {plumbing.NewHash("e975f8a691f76a87e3f00dacfa8235cac7282660"), plumbing.NewHash("32fd202805ad31d7ea33a429ed2c79c2c4ee7717"), plumbing.NewHash("3a28da9e54ba8bb5b2aee2266c9bca3b0a975ca7"), plumbing.NewHash("d1387c1e871e726e6b75050f39809fd54fdacb81"), plumbing.NewHash("b6aaeb35ee6b27b026e0495f1c04e830fe8c5db9"), plumbing.NewHash("e06f9df8782b6859cca4aeccb63ef3ca661a49df"), plumbing.NewHash("7c3bf9d02fb58a65394185ba65c192e5bbfa12ef"), plumbing.NewHash("0e295dbda19bde82792054849acce69ed7a9b742"), plumbing.NewHash("4f6b1a4dae7701cc79a523e96fe812efaa54745b"), plumbing.NewHash("06610325092f8be6d1d8810289343f5be5327a61"), plumbing.NewHash("f1d60121ed6c300dc20d214ec4b810ee1cfdd584"), plumbing.NewHash("a4def20848c511e309e84062236646df12073a72")},
  207. 121: {plumbing.NewHash("6a1ede1aa368e8c8f72fa32d47c519999e3fa138"), plumbing.NewHash("3212cd4ccdf5526a9572e3a19f64c3e761434227"), plumbing.NewHash("48381f8af59d568ddd1ccaf69439329dcf4bad19")},
  208. 129: {plumbing.NewHash("de78ddff9c6c14e606fce8d88d04de0eff77319b"), plumbing.NewHash("37965cae6b6746f22a00d04e0b963a297c51f940")},
  209. 134: {plumbing.NewHash("616fcbaa20cac012d2febd75517d3c3aa77318a0"), plumbing.NewHash("e1d8b1ba094a0766e703f89d4cf7c8b1d3e7cd6c"), plumbing.NewHash("0eea5055c9c4025d44263d0ae64181bed3d3087c"), plumbing.NewHash("c81d6ec93f6350eabec347acc4420456dc07312b"), plumbing.NewHash("9e7f67b6f9cd6f3646aa3c767abbf4a932d4f930")},
  210. 135: {plumbing.NewHash("b057624707be574d78a741516be2f98c03f3e193"), plumbing.NewHash("1b66e36e25f4d947a3bccf6e5ba9222d322dc06b"), plumbing.NewHash("425f29038ad13e1f627f75aadb16e44d71a25d1b"), plumbing.NewHash("eac3bf8b587936782952539bafc6dfb1c9d49fa2"), plumbing.NewHash("b3cb7f4ef724a5d2ae58100c8cbcda51567d118d"), plumbing.NewHash("ae4219e6b13fbe666d4ad5c34ac3cfa86355d8df")},
  211. 143: {plumbing.NewHash("23b1d7929f0d3a4affbc7b627a5f20427ec0b68d"), plumbing.NewHash("818f5d7dc4035692bcd31d37658388ebeb542e5c"), plumbing.NewHash("5e9579aeac5c57d821c5a330a82e2592975fad96"), plumbing.NewHash("78feed7fa98f2f7fc03e3e21b69b7b2997f0cef2")},
  212. 149: {plumbing.NewHash("6cd8d3c37a2fcaf861e49bfebad49f43cfaa7ab3"), plumbing.NewHash("f66b58bb6c9f6d1ef22abd84f63b67a12ed7e301"), plumbing.NewHash("34999c865896642e25d46d6334200775008e1562"), plumbing.NewHash("bad60eedda8d84fd532802c741797bbc7950a1dc"), plumbing.NewHash("a7014350496dcb80b8866e42f4a42500a8eb07e3")},
  213. 151: {plumbing.NewHash("9773e810a527d88b97239e0117be0967a3b4214f"), plumbing.NewHash("3c4f0ac609b5a7b1f55db0f3dd5d25d366416f70")},
  214. 152: {plumbing.NewHash("1eb2e6e3f267e6d63da23b43a7b842d912dba2ed"), plumbing.NewHash("7a86ff7f5bcbf8bb56cdab90fd873c2f2b561b3e"), plumbing.NewHash("2c30d503eada5cb5429b6f6d8ced0e996760e40e")},
  215. 161: {plumbing.NewHash("c368b86d11b439f061a72697a8e6b64a0eb1c871"), plumbing.NewHash("2e60c999240a5ec01365cbdc0ca35e2e9f207a92")},
  216. 173: {plumbing.NewHash("393642df55e867302d2f0d3d5a4c29b349688038")},
  217. 190: {plumbing.NewHash("b9bf954f244434603e447634364b26bb099c75fb"), plumbing.NewHash("5bab11eec7977f1a4d30006fe15287ac2e109ba0"), plumbing.NewHash("83544cdb413e3be4096ea3cdd96c9edd0de17b8b"), plumbing.NewHash("876bca046fd9acf5e39b17dc2a9c38b94ca7f4e1")},
  218. 193: {plumbing.NewHash("775719983f66a79b55877a97cd4271f9b5d3084a")},
  219. 209: {plumbing.NewHash("e7c6d598a95d9654c7080fca4260cce8abd71269")},
  220. 210: {plumbing.NewHash("52fd68f4c5bd616928f482d3d79e230919714d67"), plumbing.NewHash("01bb88513d5a693d1d7bdd4f9555490d607b73dc")},
  221. 226: {plumbing.NewHash("f5819a0d4e88d32b24686a12f3209d7157fc0046"), plumbing.NewHash("227c300a0a2edf934d89c4e972fbbec1826d06da"), plumbing.NewHash("b1bbedfb46b7b486efbeb85b6999044b3672e099"), plumbing.NewHash("3d25fae014a1fab4ef75dc16747aa93d0d0bb58d")},
  222. 227: {plumbing.NewHash("1dc1d25a580f888f02c9449535c16b2d8dbb1549")},
  223. 232: {plumbing.NewHash("cda6a998ef55121223489364fb340e7ea74ffc4c"), plumbing.NewHash("15fea0488abef42d59498d9a02e3b33fb9c7d222"), plumbing.NewHash("24b5e80667c8998d7e5e9689085fecc92a9506d3"), plumbing.NewHash("368df8ef04dfc289e3c6d5a177336f97bb233ae7"), plumbing.NewHash("33ed943ad59fc99f469c0d9d52bfd8462542bf74"), plumbing.NewHash("6ffa18e3905ac8dac016111063db4bbacbc788c7"), plumbing.NewHash("5ed913da1108b63c69d48d30b395ae35e576dc9f")},
  224. 235: {plumbing.NewHash("fd05964135bfd8c4182a42f915d51e6e45c2a216"), plumbing.NewHash("4e519f7aa701dc435ce42ac5f6bcacf19f8efa09")},
  225. 236: {plumbing.NewHash("52dac5e4b3e0e3a913f8c709ea4983f52f11d6d7"), plumbing.NewHash("a744b600e94ae00fbec71ef493afdff48bc3816b"), plumbing.NewHash("8ad18ce8f5f88d2244ceec3075c5de0a0880e1aa")},
  226. 237: {plumbing.NewHash("34838cd3692a9af3d6d61a42de8e2f40d1e8a2d6"), plumbing.NewHash("8f2b5f0458cce4e5588238b86e3e2738952ea849"), plumbing.NewHash("bc5e993ae30a59d7bcdfa03513ecc3d43b58a5f7"), plumbing.NewHash("8b75182a177ce4772082d1fcb8376a9e535a12c5")},
  227. 243: {plumbing.NewHash("47ed18a3af8be20dce91286a331d4671074ee0ca"), plumbing.NewHash("25e85b616fad4166407c6460f757fbc885c4fd22")},
  228. 246: {plumbing.NewHash("4b39b5f36b997868f9105d8bc76188f8837f50db"), plumbing.NewHash("634aedca1a059a86f35b5fb8147d19d49e4d5954"), plumbing.NewHash("71c6c83e301e5bc3339df67bb36b973ea3c82e87"), plumbing.NewHash("69a8acc05a40a523aa4e51b2e281b94a2e8bdc43"), plumbing.NewHash("6c458ff28103580bbc3ba3d1acb38dc8e7eb12b2"), plumbing.NewHash("2bc900c3d24560bcf7745dcef953be9b1bc77870"), plumbing.NewHash("e32436144be933486182db6baee76c8746810488"), plumbing.NewHash("b6ea543a4616be87936cd443be4c5839e1a4ef37"), plumbing.NewHash("5c3aea2202b198ba94f83335f90427034585ccbd"), plumbing.NewHash("ef3ef71ee6836a69201f908c9ea907f08151bfef"), plumbing.NewHash("26b040effe8e7ebc19118ce9abd888230b6ad754"), plumbing.NewHash("ea2fd6526bfd75a99038c26fb619c43a3ab0a8d3"), plumbing.NewHash("677e15cd02c3741514cd3d6cedf8273e923c71e8"), plumbing.NewHash("e600b0d947a5c27971cea0beea5a1a0d652428ca"), plumbing.NewHash("36392c75d74ab575bd986fcd9ee71078eff13b1e"), plumbing.NewHash("060ef32ce0a2fbbed836abbe0a6bef421b177ab8"), plumbing.NewHash("7ecd6c3c5f4d3008b197a40e89681455bf3d796c"), plumbing.NewHash("cbee000b66b747e09ea6521ff08fcdfdaecead6c"), plumbing.NewHash("8f2d6d2714aa1b60950a2fc355d39297b7f2cdfb"), plumbing.NewHash("361a7cfe41f7663fd9f0c12345e4ca52b0a8a2a5"), plumbing.NewHash("df860fdb94c63cf7898315277fe951d1c0ba16a9")},
  229. 250: {plumbing.NewHash("7bb897dff13db7bb23d07370cf2e6cba38949025"), plumbing.NewHash("39a457cccd8b6f122b667a99c0d916c409ffce14"), plumbing.NewHash("3e9f5c204a80a0e081295a1a245ea45692d3b84d"), plumbing.NewHash("bb17fc7af16b0369d24084f0fade3ad102adb10c"), plumbing.NewHash("2161910a53604bdc48027c5c4e71f9af4228cbaa"), plumbing.NewHash("f295ecb302ea3298ab61dda892f323c7c7807d02")},
  230. 252: {plumbing.NewHash("ada2f2fa0dd9dca2972e374ff937b59461cfacb5"), plumbing.NewHash("e8818c841c4d3708b8123c8033055f892acaf010"), plumbing.NewHash("55e62e587fb93f720cfd83d23a4282710207018a"), plumbing.NewHash("05a82e957c0fb75e4a58057b133dbcddd816112e"), plumbing.NewHash("74b37bf87a8b76e9d4af3a7b29f570aa9c96f572"), plumbing.NewHash("897942783a32c7c828e504a27b8a0e48f4fa95ad"), plumbing.NewHash("c11bdd850a298b5787583a4ab09882908bd9a27e")},
  231. 255: {plumbing.NewHash("b126b6328a44fa3332d2d7fd011da3ff196a669a"), plumbing.NewHash("31cf6b16f48d1da338c7af26d64f5104534fe0ab"), plumbing.NewHash("e7859bf1884bb1455451e95d579a0db00f0fbfbd"), plumbing.NewHash("36eff96a5de6cb425d7e12cc187472eaca307243"), plumbing.NewHash("258688408042b9ba757e75e6fb1da3097afb4e47"), plumbing.NewHash("6d6481fedd18acc9a2de548f8516fa10ed7d2269"), plumbing.NewHash("d400fc45129fbd4d3fe83345a69ec5973bb29a37"), plumbing.NewHash("07ffc76b9318ab1197ce61838992960e9abb3c22"), plumbing.NewHash("3620f02258a84851e5141b26509d69c480054c85")},
  232. 256: {plumbing.NewHash("96f3404a574faa97b1895753fed7a602f853baa7"), plumbing.NewHash("81787dd2bb6b0f0afd3e4f55ead8fa6c854bbc05"), plumbing.NewHash("be46766622db892c0a21b733716f95f09f39b35b"), plumbing.NewHash("82353da4dc66bc702a74c6c233f3e16b7682f9e6"), plumbing.NewHash("c2534964b76015eb3d76261b025c7556b354764c")},
  233. 257: {plumbing.NewHash("1ef35e90fc285680f368dc395fb44c680d47421d"), plumbing.NewHash("20ca5befdcdb8759aa7fdb4c452215deaa4f7cbf"), plumbing.NewHash("7da1523053f2e5f4fa15c87e019b3244c8653a53"), plumbing.NewHash("59406a7148bac8c88ebb59a8c404d12016d926d5"), plumbing.NewHash("a4fff5aba3679a7d98297c9cc74764080a268c18"), plumbing.NewHash("45c5f36399c08e71b8d5e64997b5a40cec50b92b"), plumbing.NewHash("7401f47da4d438927b982c3dcecf07313c05ffd7")},
  234. 259: {plumbing.NewHash("40864d89974a2b0a2f97aaf7bfdd66659c8ceae2"), plumbing.NewHash("12e0052f43eface40882f2d4086daf8a2ba9a599"), plumbing.NewHash("25d76eb1241cf8e7592125737eb653e94b1695db"), plumbing.NewHash("656681e9d8a2a10a7d9666270a3b2e5157eabbc2"), plumbing.NewHash("06f5f43079ac8206ed7a4533814cc92f7d3b58f7"), plumbing.NewHash("9a93fc51cf50bb557c30678ba1c75826860a1425"), plumbing.NewHash("d229c4784505346e6a039cf2bdd5cbca3ce9e648"), plumbing.NewHash("ee3af2b8d0c8ca8cbd9a243cb64de4162c823fff"), plumbing.NewHash("4135797250a2ea7fc6886fdf9569370b752da1b7")},
  235. 260: {plumbing.NewHash("0f86b45918bc68821905f6ac0894874eef564fed"), plumbing.NewHash("fddcf3acd393553bfe230ec87ddb5f836659000d")},
  236. 262: {plumbing.NewHash("792b75559484629340d55603f453b4af1d278dab"), plumbing.NewHash("1e418e02004afbd33d181ad58406a3bd6867a12e"), plumbing.NewHash("d68f12bbdff3fa2ca0dbda654e3770b95ee3a0ec")},
  237. 263: {plumbing.NewHash("151a6fbab9e20b586ca435fc051405ddee7c95a8"), plumbing.NewHash("42b3d37a54545882699283b5764cf3c997f8d9cd"), plumbing.NewHash("d04cac6526171af608baf38f68f9767af62b0555"), plumbing.NewHash("063dd7d6c556269dc2712b941fc7471225538bfd")},
  238. 265: {plumbing.NewHash("9d0cf9fbfa420cc72e1f06879a350915f8ffc791"), plumbing.NewHash("2f48f056ecbd3750d7bc52d59363a12974c5e8f8"), plumbing.NewHash("332f5c661f3eeb370be7fd599c5e62bea4dfb576"), plumbing.NewHash("1fd55f69e5543bdc739763aebb4f0276a45995d4"), plumbing.NewHash("dac6b2f6a5faf9705bcbaffa927d12258f804fd5"), plumbing.NewHash("80096798fc3ecbd51e8e7a9109d6344d808d03a7"), plumbing.NewHash("dd58103a3cdee9cf57c3def74de22032c7765348")},
  239. 268: {plumbing.NewHash("27ee3a6bbc5b7a1b440f54ad18ffdbe98c271693"), plumbing.NewHash("49c343f83648cea3432cf18f3e7f8e3134d2328d"), plumbing.NewHash("df612a881f41e72fa7129ac5e1b5318088635f0d"), plumbing.NewHash("fd632b70c57d7e94f587105c829c2f316093d85f"), plumbing.NewHash("13df0bf32accd57a7288a29fa09e4badf1d0b780")},
  240. 270: {plumbing.NewHash("eda1a9e0a4eb786128a117409f26c5bf072ea172"), plumbing.NewHash("29e60ab372e1a123be2f2884a6818f9a2508bf68"), plumbing.NewHash("2a4680ec3e0a358c093573fb0e03b07609a38003"), plumbing.NewHash("534f68ec7752c615ac0092af2af64796a6d001fb"), plumbing.NewHash("7a61cc20b94714d964d4eca557da2aa35859e4bb"), plumbing.NewHash("16675b98c0d47c1f58f42ada534ff52721aa085e")},
  241. 279: {plumbing.NewHash("3a9ffc8ffd447b66e426c6befeaa163d6ec3e8da")},
  242. 281: {plumbing.NewHash("ad9d41f1b0aff443b0e1ec571b3d537f7c711ff5"), plumbing.NewHash("69e19b1e03492c54179bf98ea3cab7c7d032cf2b"), plumbing.NewHash("3f623df0209ef591a3c49626bdd83e5043484e31"), plumbing.NewHash("f447644900fd1c7653ce4c9a3728ce20ba20f610"), plumbing.NewHash("6445d385eeee56e77728ea4a5ee6c810e5ff6dc6")},
  243. 286: {plumbing.NewHash("037e592f2ba7c18b71bc9b39f84de11af0252863"), plumbing.NewHash("998efc04eefa0c14057c1fa87cab71df5b24bf7e"), plumbing.NewHash("5d1789c8055f51672e758ddb9f21868f16f19705")},
  244. 289: {plumbing.NewHash("a5c07d796aa63733cfc79ccfa8c37cd604aef3a0"), plumbing.NewHash("ada6dd29435066a19782ad25dc79827a000a90d3"), plumbing.NewHash("94c9183179dee147d0c9676b714dc145456f87e4"), plumbing.NewHash("1fdcc370b6d9a71483bc968d5a0f3d259e07a78e"), plumbing.NewHash("cb13a33a3191d703d439444b9aaa5a5089dae0d9")},
  245. 291: {plumbing.NewHash("3d3b8c52e97ab8cbb66612b74e5564f70748b7ed"), plumbing.NewHash("58a94a9b058cded717583be4bf99b9365bdb7f2d")},
  246. 294: {plumbing.NewHash("70f0fe515a6b9300f3de5e82f1e7d4c01ec40b0e"), plumbing.NewHash("a18932cb658c012808dd78bbd7f5bf1fe712e6c9"), plumbing.NewHash("83aaadaa9d69214880d20b1e2bd9715a6c37fbe6"), plumbing.NewHash("3bfe4eace955f8b6b46b389f37375b9cc4ae94a0"), plumbing.NewHash("5c3839a950c59ae80a999a2f6bf5a317b0ca54ab")},
  247. 297: {plumbing.NewHash("852fe9cc7b02a7fd7edbd47efd0d66f3a960d99f"), plumbing.NewHash("45714e343f36292cec938dc01ebb8a2dec10cea4"), plumbing.NewHash("262c8ce1a0bd46ed9095fbb43d4de46af51ae3b1"), plumbing.NewHash("fb99e04f869fd6acec3590d090bb31ca2aecef96")},
  248. 306: {plumbing.NewHash("4922a67f09d5a54bcebc28cfe5baced9981bbeee"), plumbing.NewHash("45955be120b261f3f5b43c0c31190c6bacfcf4e3"), plumbing.NewHash("aceded7bfbdb94436e198788dc7892a958c3379a"), plumbing.NewHash("ecdce975d375c25a1738e86ce727eacf83b4090a"), plumbing.NewHash("59dcd7ba7add890feb484db0feea42f0f0138e1d"), plumbing.NewHash("b2681804f8aacc07dc345673364fd787ae4d66f5"), plumbing.NewHash("282e634f3a916ec1c4dc31543762af1e7f82b503")},
  249. 307: {plumbing.NewHash("4341c623ff651e847d0d9af2335e8310aa359288"), plumbing.NewHash("027a0182100a7391c5194c23ab281b27480e25f5"), plumbing.NewHash("14b109072ac60b8cb8fc976b2e1adc29c36ca5bc"), plumbing.NewHash("bb2b3ada3d502c740f2704e44362c9bc32be4353"), plumbing.NewHash("001f29cf54fd75020160c5cecd4438e5a5f6b90a"), plumbing.NewHash("5d3c267398d208a4ce817808ae0907f4acaf0808")},
  250. 309: {plumbing.NewHash("c5d11f1da3c3ecf7c9f29cb30a4515f9b8403b61"), plumbing.NewHash("9720db95669abdca9f57438be708b3edf37c5485"), plumbing.NewHash("3d85402ca548957030685c451c96b05342454906"), plumbing.NewHash("0ca4a0fbae6836e02073487b202260a67cc0c80a"), plumbing.NewHash("a07efd4b5c3acb2a31dad59f842da3b1484f70b4")},
  251. 317: {plumbing.NewHash("523e24e8acf8ecc6d6b31e78cdc6e4c3967bbbdb"), plumbing.NewHash("f27c5b05003713db6a5691570a4539acfce6d885"), plumbing.NewHash("cae797b8039d632944feff73ca0f2dda8f2cde65"), plumbing.NewHash("657b9fb48e93b59083d2e0b8a5e4daf237179dbc")},
  252. 324: {plumbing.NewHash("3df101cc779abd11913b5244e4c8b86b746d260e"), plumbing.NewHash("87cc39d99f80255bdade7c847ab3e584db81df07"), plumbing.NewHash("44e0a7bbf9316c39afd5b49e09b668ef2dbb706e"), plumbing.NewHash("2b26389188f5df1b273bbe6b2a0be80db7312709"), plumbing.NewHash("5b23dd8a2fcfdaf8e2860933a2b49e8b4a723a90")},
  253. 325: {plumbing.NewHash("c9642571c23004a10e8bca1a5b1446af399f80db"), plumbing.NewHash("9048b5cbba3dcfe2149aaf473fe95a4174799116"), plumbing.NewHash("1a6cb71732fe2a1297a6a3d92644f45df3211d7e"), plumbing.NewHash("1019e50e7fa105f96ee4e276894876c5ccfa2670")},
  254. 329: {plumbing.NewHash("d03f7768b8bf83d2485e7c165b38775c81c2f0e9"), plumbing.NewHash("d1a3842b3d476aaf8479ec4dc3a9eff6ad35e8b6")},
  255. 331: {plumbing.NewHash("ea5cb74414286de3bdeb8b752a161e006b9286fa"), plumbing.NewHash("f10c430731904a11558632a0a4936eb3f154647e"), plumbing.NewHash("0c75006d12dbad2a3ef364b416dd61744b0ebe3d"), plumbing.NewHash("7896ef71436ce5caf1aa0157b2179b02094ae852"), plumbing.NewHash("654404c2ed8db47a5361a3bff9126a16507c9c4c"), plumbing.NewHash("58ca064f9321ad4e6db09d9e8b22db4a3d8fc7dc"), plumbing.NewHash("82d3489764b29609ff1f993183222627995b5e93"), plumbing.NewHash("47bd0af7025bd52dcb95dcfe46e217cb9158d75d"), plumbing.NewHash("784d81d2c88630b8cba8236f3354afa3326cd3c1"), plumbing.NewHash("2c49115cd32a0f1fd14f727abfe683d66a6c1854")},
  256. 333: {plumbing.NewHash("db8f43128be9c83c5832bfb61a4454ed077f9d36"), plumbing.NewHash("461573a8d998065bc205dfc460c3269ead4afa08"), plumbing.NewHash("06a1545645d974350d13425246eec53a08cb6ab8")},
  257. 334: {plumbing.NewHash("8a6cf4c13eb0a3b4a0d9ede0ee0530be4580c0a8"), plumbing.NewHash("3f905e4a357c05b216c29e34d26c12a4cd866f8e"), plumbing.NewHash("ff676f10f66d409552c652da1f1e126615cf7fc8"), plumbing.NewHash("ababd952100de2d4f5f4a3bbb4925406443b98c4")},
  258. 336: {plumbing.NewHash("379966050454151db016f427dd7dae24a35e45e0"), plumbing.NewHash("f3f19146f9c248b9d600c5767d1ad60dc9aa20da"), plumbing.NewHash("80927fa9589e587437c4e590cd782aeca538c991"), plumbing.NewHash("c4361d2246f703524932830c40e4c0a8ecb6b95b")},
  259. 338: {plumbing.NewHash("9efe17aeeafc6d8c1406a48f82fc63731d4b2b6c"), plumbing.NewHash("a3c9d2d7c9eb386f7612b7d1d08df0bcec7e5103")},
  260. 339: {plumbing.NewHash("48ce23086b18c07aba6daec86ffd91e919eadf0f"), plumbing.NewHash("becc5f3a2cd0599be17ac9f62e9ac78bde42702f"), plumbing.NewHash("990ef92a60ce6a9e6296235bef7f4ee920ced9c0"), plumbing.NewHash("d9ca798c60641a0ed5eea22e45be95054157af0d"), plumbing.NewHash("0daec53acbf4c3df6c054b36ece5c1ae2db55d86"), plumbing.NewHash("ca05efc76f01e371c9d8915dfa59fe72f390c0de")},
  261. 344: {plumbing.NewHash("6596cc79d60cfa08bdac4e9807da0501cdfe8534"), plumbing.NewHash("ef93e2cffd5c8d231ea3bb7efd79d98035e7e743"), plumbing.NewHash("e5ccf535312626a2727cbfd15455c592357f7568"), plumbing.NewHash("be4a86f6dc61e7cffe3887666b8152fec81fded6")},
  262. 346: {plumbing.NewHash("cfcb1e87038e33d22fa4d8f38ec96c949767ebb7"), plumbing.NewHash("fc476840fa835e916ca046f7df5220773bfb8cfe"), plumbing.NewHash("ce7276bc5565afdf0b5d5d47b6b0d753f812023b"), plumbing.NewHash("bde45eff87ef40ce02da98c56a2ad59ae9170d4d")},
  263. 350: {plumbing.NewHash("0c1af0901d2f7386a1ddb25c61e84794d4ca641e"), plumbing.NewHash("37fd456a5cd44db6ca23975de6fe151c9713c58f"), plumbing.NewHash("80a831de1a954d23b5b9f7b9526398e10a1512e1"), plumbing.NewHash("ecd414d7164613177725c81d023e6a66701de470")},
  264. 357: {plumbing.NewHash("1145fec39ff7f665f7bd352261549f8738de815e"), plumbing.NewHash("295bfe4e3ae7e98655b3630a9f83b2df4a82234f")},
  265. 369: {plumbing.NewHash("61ade48343bbc9a030f60e4398b1f9007e984517"), plumbing.NewHash("ca96737b20642356c7c6a575458d8938ea2e735b"), plumbing.NewHash("cf3b3dff329cb2b50724ef44977abfd0a841ad77"), plumbing.NewHash("7448dcea6591f632026e56f3246593782867923d"), plumbing.NewHash("a066cf8680372f107f4da35b1518998a851e02b7")},
  266. 370: {plumbing.NewHash("bf4dab3501c62836f94ea17d2f0e198348f5293d"), plumbing.NewHash("fcb6ae8eed5058d7759d2db8bdfbf59e1033b1d9"), plumbing.NewHash("e754581ecb8ad61182c06b70b6b4c329c76bd1b9"), plumbing.NewHash("6ddb5a0452d91cc82ad7104277adc93bb0a5826b"), plumbing.NewHash("8740791d5cef5e88a588bdda4b5d3ef6006808f9"), plumbing.NewHash("f57128bd3d347194d77bfe43ff6a794054679d69"), plumbing.NewHash("64449c196e3ad7f5a9e85b9c01e8f0ac83335232"), plumbing.NewHash("efe5916109e220a429a2cff110edb952d747466f"), plumbing.NewHash("10deb8f267e1d982415a23a8f5c8cc44da4f4671"), plumbing.NewHash("f8e2df16f1169c5b6198419d44244d2cbc163c92"), plumbing.NewHash("337c0c66cf8d077d3c2ec9cee552cd7911f2e62b"), plumbing.NewHash("75bef59016a8a230823a04836e1ab6e5bf0079dc"), plumbing.NewHash("0836e47dfce8ecde847f3f5b9ccd559a9e0ca745"), plumbing.NewHash("52dbeb1f26bac4272e35a8bb578f16cc584c3721"), plumbing.NewHash("91a819fb343d27e05a5cdf44ac84b43892bdaa56"), plumbing.NewHash("2a4f6b942db5da321c627ebc1f0ca448bf491556"), plumbing.NewHash("8ad686595202c6aabc44af3d16c0440e016e3fae"), plumbing.NewHash("4d7ff76cfbaf5fb8a57d8f3232c989e8e2c2f5b5")},
  267. 372: {plumbing.NewHash("3ca775144526712656725c3ebfb2d0072b34c328"), plumbing.NewHash("a6fe2ae341f9b02dd5067c964332982d1e10adf6"), plumbing.NewHash("8b3543fca9d811c638bb72d78601c8564f5465fd"), plumbing.NewHash("fe00f5ff6436a94a1212676916a9edd824d44338"), plumbing.NewHash("836fb03aa09730e65365e1bf23c286980b6b06fa"), plumbing.NewHash("3f128b9838e16e7efa7cc2f9e2300a2dc02147d8"), plumbing.NewHash("73817b8b77fa925ffa77d98449de17cf27f69d84")},
  268. 373: {plumbing.NewHash("56ba6b9c7ef8705c1d70b12061b2875f4f544e43"), plumbing.NewHash("d09e2a67bb9ce53f9318930296a9825dd82f7197"), plumbing.NewHash("3860e078a5854282aac5e7329f6a8dd6b20b49f4"), plumbing.NewHash("f981bdb5514c3b1489fd7def6acf91433dd11bda"), plumbing.NewHash("43187187692bdfed0bb5c27c730aa8506e83dcab"), plumbing.NewHash("c064963ef8553a96f6f487ad4c00ed5832cc1cf7"), plumbing.NewHash("0242ca59ac0412b44419de2b6b4d07bebf975610"), plumbing.NewHash("6ad6b19bd60cb343498b1ff5f7e746fbd187dbc9"), plumbing.NewHash("af88e051fa6b20cece17c610b72f9c8f3557aa6a"), plumbing.NewHash("81233b3cd3189bcd62845bd2f56006a2e0417354"), plumbing.NewHash("f7cbdff79cdd9600c8650ecea5acdadcf2da3c19"), plumbing.NewHash("76da13dff6860e4cf8d48e27883926439e12ad54"), plumbing.NewHash("eb8b40cccdfff5e9782c33192ac021f5e7c9bc78"), plumbing.NewHash("2902149f776e20623ec49cec485455c057bd26ba"), plumbing.NewHash("30118bbab0a90f77acd7d366d04c49375fa85aae"), plumbing.NewHash("dbe7662e72986657cc4ac16f15ef0d5a0d9473d9"), plumbing.NewHash("35da9d6ef247941473dad9954a2fd4b07e2f731d"), plumbing.NewHash("d3615e682e1c65e56427bffbf7527d8c2a91609a")},
  269. 376: {plumbing.NewHash("8c73c6f218abc2b7345bab8928e5c5bcf192f4d1"), plumbing.NewHash("201352784088145cb1b320fc7762c81f493df19e"), plumbing.NewHash("2157aa6172c7cc397231e8c22e0388ad62c975dc"), plumbing.NewHash("d6f94c0bc97ace3b82fb1abcb3ec7a4e5cb41b63"), plumbing.NewHash("88a86f7e45f17cf11ddf03dc042d088dbc174df5"), plumbing.NewHash("444cd56740485a8d51c07431035dc0c9ba432b00"), plumbing.NewHash("36cc50803021b21deefb053ad53e39e8442184e5"), plumbing.NewHash("cc985c3a9ca2e01a51ce461acef2b6a61901d44f"), plumbing.NewHash("04c998a742f58acedf65a248b6149497f58904d0"), plumbing.NewHash("599e070824a5d2c6bcb40027cda5357390fc2b8c")},
  270. 381: {plumbing.NewHash("1db555a530ce433f39c9d8af0592e6e81468d6eb"), plumbing.NewHash("6ea3188971a29405710563be48956da94ae26fa8"), plumbing.NewHash("345413fb8c9a8f4367081783fa6c4b99eb9a3608"), plumbing.NewHash("fa1d6b478eaf2e457f42b09b41ffd557fcaf8b71"), plumbing.NewHash("80ebe801384ba9e6bf788df5aec5bb6b45ed9a5e"), plumbing.NewHash("1206120d1084cbe45dc2876f002cb572a97e3844")},
  271. 382: {plumbing.NewHash("cb65139aa8857421e70920422844db8a2d6b0a87"), plumbing.NewHash("3e3c210f1d2a3f86dc6b95800ed8e5ae1d852185"), plumbing.NewHash("3cbca7bdba93df45c6ac2a00683d8f7724301d5f"), plumbing.NewHash("05e1d8e5f4bc3eab549fe386f4db24641acffa7b")},
  272. 383: {plumbing.NewHash("c1c2b330a104ceb7d641613a70cc3f2d70e547f7"), plumbing.NewHash("4f5f88b9bab58e363b95ddc1931f2036d13d14e6"), plumbing.NewHash("57ea065db795d8451ad5fbdf84e8b262276d8690"), plumbing.NewHash("cba3ea9d9069458c40a1cb588d3cd361ad702c12"), plumbing.NewHash("3b961a6b7bb1422197f2204170f03848f2e93865"), plumbing.NewHash("cb5d69c76954544abe094f565272839aea7fdbca"), plumbing.NewHash("2cc9ebf28bef640043fc9abc2c61a421ffded792"), plumbing.NewHash("85f80714c29d6bd8c8cde9138f0369f5df1b9a33"), plumbing.NewHash("b8059aeaba0c65ee57276e3949015909cb205af5"), plumbing.NewHash("fe9b797a46810b6f8d327756fdf286c17239ad41"), plumbing.NewHash("ca7437502bc3a282cb9051e8eb5d92199c97d3df")},
  273. 384: {plumbing.NewHash("b32248d615abfd7835886a0f50962856f7d7c986"), plumbing.NewHash("c4ed82cdf62cce0b18c601cef872925d67e0c53a"), plumbing.NewHash("090a46763ea5768a063e69bebb9349dd42915f1d"), plumbing.NewHash("2eea3a4c5d0fc4104454b0b81712915e400e3bfe")},
  274. 388: {plumbing.NewHash("7ce144881a1dad0bde8f5ff481155f4408de70c4"), plumbing.NewHash("1904194c7a39640b65ab7877d52bccc035a7e2bf"), plumbing.NewHash("36eef0dd9ac921d8b9e995f3de6833e8d6213717"), plumbing.NewHash("0ed00e38f095ec7fe14e55c8e3fd7ce242a79df4"), plumbing.NewHash("b61235b77f87288d62ddd8ce4aae88b76babf887"), plumbing.NewHash("ba29cd8e4682bd0265960ea484c23e8e36df8015"), plumbing.NewHash("5ab48ac5d4157eb23540c9a78268757ff5e764c2"), plumbing.NewHash("c525e634dc1ad6ea4815035b059c13d901209a6a"), plumbing.NewHash("106c0b753a0a1e5a36a2822a308d3e2d502bb594")},
  275. 389: {plumbing.NewHash("85f0448feec3843b587c8674a660462d8b25e1e2"), plumbing.NewHash("9f929999d164a780aaf6671c5f457b26dacf5b32"), plumbing.NewHash("66c8d7baf2544e540b9b15f478d0c60516c50d8f"), plumbing.NewHash("ee7f056779480578dff1cb528e4b8ed1c74092df"), plumbing.NewHash("4680d70a785e07e555a7cba6b48e2aed47ad4aa7")},
  276. 390: {plumbing.NewHash("ad49962ba984536599770277aaf55211faa95014"), plumbing.NewHash("256d4ef71b5a311b07d9df878aa9151b3fc4ce53"), plumbing.NewHash("423a633b5b3663c8e54ef66208dbdf6348d29612"), plumbing.NewHash("61dd53e2621f6a44b90bbe4682eccb1bbf7574e7")},
  277. 391: {plumbing.NewHash("d6ab850f455ad64973c617cbd51ac2ac41dba3f5"), plumbing.NewHash("63c1757df519bc5756c0d7d79dabd5ec0420f3c8")},
  278. 394: {plumbing.NewHash("6fb4e0e441859c1e69cdc2a7e322272a7d816e7d"), plumbing.NewHash("18841fa58de4f45eb64311ee513d16247ae96077"), plumbing.NewHash("5910278ca8583a1e5fb4104c2419212595d5e69e"), plumbing.NewHash("344437c491a1823e8b9deab9f8d90002f3285021")},
  279. 404: {plumbing.NewHash("078b20169b1e7bfe86e5ce5f691b75f2b9de127a"), plumbing.NewHash("d8864bfe48d64c15dc70f13e46c7e08772811fd9"), plumbing.NewHash("18504bcc86c670f41d6ca89a3a5f6e5fed839b20"), plumbing.NewHash("3c57aff85b7d4b1c60c94bea0e06025de2eace5b")},
  280. 411: {plumbing.NewHash("5308033936aa05bec65f11a6769ffbc904d0cac3"), plumbing.NewHash("d5ae6f32dd1136226e7bd7a63a2120c317668d1e"), plumbing.NewHash("610ccba9f5c030e6106520359c27dc9c226670d7"), plumbing.NewHash("b581eb3f27a49c57f861ae64a3dfdd6f8fba53b3"), plumbing.NewHash("aea00258e7c8548bc0b9b91731fe606ce79509f0"), plumbing.NewHash("a08995a90d0b5f94300ecd6c27b5e32f9b08de2c")},
  281. 413: {plumbing.NewHash("15d0b0ea08833b8f2db19e2da1ac428bad69d4cc"), plumbing.NewHash("d7ff7cde925f803919d954f5cb73a685b8f14b0a")},
  282. 415: {plumbing.NewHash("2091347a712a35a820669b623ca22eac90aefc0f"), plumbing.NewHash("aa91994166422b9cd3d07721f4efd25549c7e7ba"), plumbing.NewHash("8a717f5b6c709cfd521548fc525f2c20c2b6b78f"), plumbing.NewHash("8fc5b90e9a8c6d6fcedf35bad1c6692e40c92f74")},
  283. 418: {plumbing.NewHash("e2fb8b2786817b4014c077c13e99efb551fe35c1"), plumbing.NewHash("2f8acfe4bf340ccae58ade4cbfddb0513fa1d4dc")},
  284. 419: {plumbing.NewHash("f0bfc24adc41840d937f6f485bb69591532cc538")},
  285. 428: {plumbing.NewHash("c5e2116ead1792a9e23d094b6793fad80a979746"), plumbing.NewHash("7b5bab83f4836a23a8a27a5e11762c999bcdfb6d")},
  286. 435: {plumbing.NewHash("e5b99c7512fe6ba5067481ec6136e2abefda1be0"), plumbing.NewHash("0e18e345b0168ff31d08a062714f0a3e556afaca"), plumbing.NewHash("7869cdccec0f5b4009d86640396b2818dab21bde"), plumbing.NewHash("3bba409d9e26ec7f7a964362667a6e8c0e08fc89"), plumbing.NewHash("34b8b57c2fdf10e3490753bae1f358f81dba966f"), plumbing.NewHash("dfc128b89a7b7f3fbebd0ad7b8a6a843e8dc2f26")},
  287. 436: {plumbing.NewHash("b71f6ba8644681b51ce4962ad2e0543adbfd4d28"), plumbing.NewHash("fd3cfb196b2be22c274cc7b7f0abbd1c8e87a2c2"), plumbing.NewHash("604ea8d68abccd1ed762b364007179cae3dc2960"), plumbing.NewHash("489c07e74821a86e71c0aff502acc5e765d1450d"), plumbing.NewHash("62053e68e2895f990295bae4bda3e671767849fb"), plumbing.NewHash("97e31b6090b809cc1987f7d16a1dc55379edaf42"), plumbing.NewHash("ce51e199701e71cf68a7187075d2a0115f5baf22"), plumbing.NewHash("af5c5b6a55528a255500b733439a66d25b5647ec")},
  288. 437: {plumbing.NewHash("b0d52d930aa00b7d0876abffe38e33d8a307b117"), plumbing.NewHash("ded23f14c79dddacd47d3fc5aeadef74aa028b0a"), plumbing.NewHash("25c10af59694c6a61778f9111e3ca97dfd8971b4")},
  289. 438: {plumbing.NewHash("d7e39347b99dd5489b089ffac8a525db0ff5cc03"), plumbing.NewHash("f6ecab58cb9bcd4da5836d540e2f83617239c352"), plumbing.NewHash("c8d605db55a1b7b64d9e10b961ac5cc0674cdaa9"), plumbing.NewHash("825beb42c4dbf55ecebfb3021f3b59133e52bb5b"), plumbing.NewHash("e4b3a052a4f49f6ba5cdd93e983de45df595bc12"), plumbing.NewHash("4e0c8cf25bad948bb5886ff25b1d4c7781ab51c0")},
  290. 444: {plumbing.NewHash("53aaa842ed4cb3b8619032eb6c5deedef32fdbc0"), plumbing.NewHash("6501b587c06b525eeda92b5f318bee8d74b9cc5c"), plumbing.NewHash("6b122ba25f79ab0a43ff8d78f65cfba36a7bcee5")},
  291. 453: {plumbing.NewHash("cdb5b09cd7f1f02d9eced2ad75b55cd56c679d34"), plumbing.NewHash("258cf3b0f73642116b395656fe498df8d827efcd"), plumbing.NewHash("5bb5eb1657ea7517179b35f03949f4673b5c32d5"), plumbing.NewHash("d266b75423780880ad222598c66d08efbccaa0b0"), plumbing.NewHash("76406dd0c286da809b63f628c38cd37ecaae20eb")},
  292. 461: {plumbing.NewHash("439f2f3b2b7fc4b35f4fbea32fefbc15e136c6ba"), plumbing.NewHash("3bc80d3db4365674003bd30af1e0cc7859b6184b"), plumbing.NewHash("dc122c31ef31382c82ea5cc9c081d7f6ee3b0f5b"), plumbing.NewHash("885e6e621bea9cf9224853a974d0e2d83168a6fa"), plumbing.NewHash("aa45dee5a474e1b0713eda858e55a2bf74a754fc"), plumbing.NewHash("8d3f39852aa6596c41f6a2a901ee76501bc1741e")},
  293. 464: {plumbing.NewHash("ee8ff00a2a8a307c952fb8e7bef241188c7fb12b"), plumbing.NewHash("742ac532620a009d3d5f8c0751ea8804c0668629"), plumbing.NewHash("ede79f818e491a43ada3705314024021aed23d7c"), plumbing.NewHash("3927505d1a2bd6f14544bc623da8c725630a0d80"), plumbing.NewHash("b4adce34dcf026679ca7b19ccede9104719e544a"), plumbing.NewHash("28819d36a45ffa8222d27d0ecd0c4f2c8b632a76"), plumbing.NewHash("eaff5bdfd709dbc9f3fd73e89dcd424caec505b3"), plumbing.NewHash("6e5f97fca58df658bb93d663e2b54ce0781da8d9")},
  294. 473: {plumbing.NewHash("1ea3f44f06694aad184644f9b73135abc3eb5e29"), plumbing.NewHash("b6a776b24278bebaec721c3cfead33bfc01e32cf"), plumbing.NewHash("98974efa5f51d6f55afbf2bc125d6fd090bcf782"), plumbing.NewHash("b3a26a5b3048c6fc0911e219b6fa61b5ae2b86f5"), plumbing.NewHash("52ea31b65c09ec3370522956419f42e7507495c2"), plumbing.NewHash("55e5680535ab7bed75757d9109b58323f8aa72ee"), plumbing.NewHash("642eaca6186e9e5c4e23a2db057959ecdd1a6c74")},
  295. 474: {plumbing.NewHash("51f7cf03679f282bad7cae365c2b5cf5ee0f3905"), plumbing.NewHash("ca467cc50ee49776140830e297dd52b3f26c8f76"), plumbing.NewHash("b35b9433642ddb65081adaf4f08e06f667ee7115"), plumbing.NewHash("47c09d9557a9c4614270912cb39d55d7e7c01268"), plumbing.NewHash("e836c10c6f6a17004e6b385304b7836103ea3398"), plumbing.NewHash("5a1e63990a92f643d4fd45796d371a3565b37058"), plumbing.NewHash("d90d47310489b8816129a4a0c992db729453567f")},
  296. 477: {plumbing.NewHash("01d5e7bc4782daafcfa99e035c1bdbe13a985145"), plumbing.NewHash("0d5289141eb183df209391c8f63a7d8fd9281b3c"), plumbing.NewHash("8fa82ae5cb3bd25869b47c225218c88d8759bd66")},
  297. 479: {plumbing.NewHash("1b158ff4ed9d4ca88d659d50f89a1b248da50ead"), plumbing.NewHash("7f7300b8cbd9511277732f912aeffe18033333d4"), plumbing.NewHash("40195c2fa276cf9c7d2125e525133dd83b25e23d"), plumbing.NewHash("ff3f00d845addec5cc5189471978a6393211b589"), plumbing.NewHash("cc0e60c1012b7c72eeb5ea0c41b8a2045177ae5e"), plumbing.NewHash("8e1ebbfc11382a0f1e12592c15cdb861d7d89537"), plumbing.NewHash("946ccd32285ed762743fafce486e7f13d6d4f012"), plumbing.NewHash("726c9fc8a6344a6a0ebc90d2d64b2bd0f044c2ff"), plumbing.NewHash("381a108e6db2a9c8e728d67805c34596b65b6f95"), plumbing.NewHash("0a108b3fb2b53a95cba0fc359171594ae1d43f61"), plumbing.NewHash("7a56925176a2532e729d425a8f5e2c9a24dc5f86"), plumbing.NewHash("3533912016c64f1f03516657c1d93f02a11e5c67"), plumbing.NewHash("576cf8978bec5a779316c882d1b218aece981407"), plumbing.NewHash("4302d8060d06b8a19f2d16b5565ffdabf82277dd"), plumbing.NewHash("74c51f213c7a187b76098331ca10fee10c9e5e59"), plumbing.NewHash("2ac6811362aafdbf275c18d634ae6ba78cea406d"), plumbing.NewHash("2da66ed00943d0c5682b694828cd6f5469e10529"), plumbing.NewHash("efa5b04797ae0e881eef2d4c72f618fae066dd12"), plumbing.NewHash("3c8f91ee3d32222633c125dd97b669dbf740a1eb"), plumbing.NewHash("0a3939883ad60db9a9ab85d7602cc3b471612799")},
  298. 480: {plumbing.NewHash("23e1ad2df7c81bcf89b20225ccf90f815d5e978a"), plumbing.NewHash("b7edcf6eeaaccea61cec23c10f3b99f81c0f8bc1"), plumbing.NewHash("c034262b7857a0738339b1747b052e51ae60dce0")},
  299. 485: {plumbing.NewHash("c689b52dd1ecb4c4b260714e826353b4e6b2003d"), plumbing.NewHash("f6bcaffe4a67012b4b067038bdbc5d43b807540b")},
  300. 487: {plumbing.NewHash("df84c696763163882560f01f26d723bd87782ffd"), plumbing.NewHash("ea561ba6d87879b9a67a7454e1d29777fb59132d"), plumbing.NewHash("05abe814acd329d426645ecb4ca8c1e39defdac0"), plumbing.NewHash("b91854ea9d9764ab670b7cccdcec40ae964195be"), plumbing.NewHash("facc82361234ec9ad721daf4501d817208450ee4"), plumbing.NewHash("8778add0d66aed64a8970c34576bf5800bc19170")},
  301. 488: {plumbing.NewHash("e0179bad2f16792eef30210e206792abd0272bbb")},
  302. 492: {plumbing.NewHash("cce65ce34d62aee957d51fcdd50e23a974ec15e9"), plumbing.NewHash("1855c49d1f1045e5819100ac25aa3c38ce46f5a0"), plumbing.NewHash("a9fc2bed49c7988ccd6d365c1542818172b64ffe"), plumbing.NewHash("6c42da2abfe2e4d5eef9bf7ba3b318b8fe4154e8")},
  303. 494: {plumbing.NewHash("257ace722c04c1448f2e178330d51b9a11925b9f"), plumbing.NewHash("c725f8d354ef7f6c535aa33581a9928e66f0a6a9"), plumbing.NewHash("99f564e9724d3456fc5b48b9dcd6254c83611242"), plumbing.NewHash("1deaf71388ef7e831411ccbd9ec77ab795b286a1"), plumbing.NewHash("5367a44acb16cc496eb784d7a0518d70a94f221c"), plumbing.NewHash("97d2a73dd36e90a3502c575a8ea93125e894c84a"), plumbing.NewHash("05883934f1b9a13e09a46ec6845927d9397f3540")},
  304. 500: {plumbing.NewHash("89f1e051472cd6fd5d0c5d002ca353b50524a0db"), plumbing.NewHash("69d5139b8ccc860cd3ed8b63be83a104ceeedafb"), plumbing.NewHash("55447cbb3d33c2c209fdad8a1d53a166bc39544e"), plumbing.NewHash("4805e5856ba3f00d5953ee54a16abc7c4e96cc77")},
  305. 504: {plumbing.NewHash("dc98019d4965c728a7b2326cacd1b921b61dba70"), plumbing.NewHash("5d2acf4897456873d2dfc8b0f77a52a2ee1f90e0")},
  306. 505: {plumbing.NewHash("91310971b9b8a52e650ccbab79717d261418f104"), plumbing.NewHash("28d9c0c5116c0a61dadda65139af15c2ccc1562a"), plumbing.NewHash("04d785f4bf0e726bf75dbc5357a158449813e3b2")},
  307. 507: {plumbing.NewHash("458edeed9a9010f1ba6184ae40b474181a606502"), plumbing.NewHash("4e155139ca939d54736ff3dd65591107cd9aa91d"), plumbing.NewHash("e8190a8d8d4a59359f93bc9b366d04b9c72cc2ed"), plumbing.NewHash("b525f5f4d733d464657f158000457c0b9f45c1d2"), plumbing.NewHash("dad54ec2114f486a68ea3b1ad5c462fcf7af8097")},
  308. 508: {plumbing.NewHash("a4d191d4f97a8100a561b87802fdd6ab5db0ed26"), plumbing.NewHash("3d5bf9753f6a770bf02b3eecd8b85fb2195cbc85"), plumbing.NewHash("1941eaabe0e71d850ea7fb2fdcca4c987b8e363d"), plumbing.NewHash("b2392413fabc193727b1e2c5b12e694068c93405"), plumbing.NewHash("52c1a7456fe8fc8ecb91ed7a5122d566aa2cb4af"), plumbing.NewHash("f25e894558d9ef11d89645049e01142ef971dcc7"), plumbing.NewHash("33ff9dbce230068e65dedb2f41a00c39c70bef93"), plumbing.NewHash("3bf76379868f963b37fa2c015ba26b8f99f738a8")},
  309. 509: {plumbing.NewHash("007d2c2e25791b600f4a5e4ccee17e1a5929d8e9"), plumbing.NewHash("02ff1d4462197a4d419ed407b0b6fa45fba14632"), plumbing.NewHash("8d9cb782fb73e5d08dd75f29a1158e68df0e17fc"), plumbing.NewHash("d4f5dff8eeaa3f13e5ab3cba3e8babaea66fc90e")},
  310. 514: {plumbing.NewHash("6a6d939dea195ab94dfad2050cd2a0b3555a2568"), plumbing.NewHash("7d2f0b1ba8c1ebd4213f748878953d576626fe84"), plumbing.NewHash("d1870595966f50730dd2bc1317b85c7470ea5654")},
  311. 516: {plumbing.NewHash("e15eb40317ce7a37b2c461d6822a50be0c001405"), plumbing.NewHash("ad3db301f29b2e56cd24ebb9898009046cc09873")},
  312. 519: {plumbing.NewHash("d0659327bd2d70e47d8608ea9a48a9b9f4cd4a3d"), plumbing.NewHash("f23f2ff2c96537274beb99aca8417f054f817501"), plumbing.NewHash("fbc4f37037a29bfda9e07fcc265a431a7fbc84b3"), plumbing.NewHash("534f6b7975dcd334f6d68eef14598d75c79e7921"), plumbing.NewHash("f605769af927396415b11930761cc90a882b3947"), plumbing.NewHash("4984c5fc7cb28efccfdcde4d14470924ea62792c"), plumbing.NewHash("fe0347dbf095afbafa5a9bc159b755bbabf8dc09"), plumbing.NewHash("e02554412f36b1eb8ea59a58c41a9d2e8ddac13a"), plumbing.NewHash("756153899a5c3b532b6a52ebce943ef9d7e81709"), plumbing.NewHash("a0a0d4263073b5027688d8f56e82926c60c68bb7"), plumbing.NewHash("065fb2a74ccde5f411483df9cfa1e95c64800c73"), plumbing.NewHash("b184c76205c6811fe8e8aad59abba6c4aff89c42")},
  313. 523: {plumbing.NewHash("6f54b233f101323c55fc1d34696938713c2679b2"), plumbing.NewHash("48ae7217e482a1a3624d6e5380c972a653cacfaf"), plumbing.NewHash("2321fbbc1d1168a281b59612d35419d15a6cb485"), plumbing.NewHash("799bec66a2a812bb60f32f49bd8744dfa498fada"), plumbing.NewHash("870d7f7f936bab348589d8d0bcc4d252c6ed832f")},
  314. 528: {plumbing.NewHash("0df0177437ce672d654db6d7edfdc653aaf67533"), plumbing.NewHash("b8fddc862e3c5dc8b00fba7ba63498a3adb5750f"), plumbing.NewHash("607635d2ce421d77eea96dc3eb9c9a592ac312bc"), plumbing.NewHash("4325843ef07b00eb3f234d5d3dd5c92205effe9f"), plumbing.NewHash("f05cd95fadfd9a2084f27eb88ff62d1bdfe5d5e5"), plumbing.NewHash("cc92025fdc862e00cf787cc309c741e8944ed0a7")},
  315. 529: {plumbing.NewHash("f5ad1c57532c34c91931a24ad228d7bfe8ca03e2"), plumbing.NewHash("685ce7573d7282f6279882c5f624ef00b43fc3f4"), plumbing.NewHash("9bc2e60fd587389701c077f5bbff69250d6fb0b1"), plumbing.NewHash("4b2093ef67ab9a267dc371e3a193749ad764f78e")},
  316. 530: {plumbing.NewHash("25874ceab26bd9e08e532c442dc845bccf0590d5"), plumbing.NewHash("40685c3b2a8c535f6c68fa3917ce5f1507dce186")},
  317. 531: {plumbing.NewHash("667577664051e07fceea25a73fef9e1119366435"), plumbing.NewHash("79edae58d5892c5a7eb19b68f9e79dfae4682e20")},
  318. 533: {plumbing.NewHash("13bd33e73f0ba8b072584c807eea1ace9c00fcf5")},
  319. 536: {plumbing.NewHash("672890b1c843206c6c55afd3f2304f895c131820"), plumbing.NewHash("9f6acd960c0a0c699c79ca1d571783e1692568fb"), plumbing.NewHash("305b3bed747bb8dd358cf82d11bcb1aee5b6e517"), plumbing.NewHash("ee2d08ff798ba8aa02e14db0aeec0cca613f428a"), plumbing.NewHash("1dc5d43d3232c15247a5ddd1ed97e930e8ba1505"), plumbing.NewHash("8b42fff90e14503d5612106d8d17aebc58a9c061")},
  320. 539: {plumbing.NewHash("4fb3f1b3f384c3a05306b37ea9a736144ed6394a")},
  321. 541: {plumbing.NewHash("0b04ac3117e44e9fcc0222b479a34048171462a7"), plumbing.NewHash("b2e3780e8cf864508d29df554d1cd26aa44824cd")},
  322. 542: {plumbing.NewHash("54fc6465377da71da506827df086374b3f95cc42"), plumbing.NewHash("06f18fa1b9aaf9796445aa14a809a59281186a19")},
  323. 543: {plumbing.NewHash("875c5214137401f2e29ed7995940d8cb6613a350"), plumbing.NewHash("25ed701dbd34978a62a1a7cb0a5f7d0e3c08e23f"), plumbing.NewHash("82a22b20fcbc4db596713fc9b0766e6bca167417")},
  324. 544: {plumbing.NewHash("99bd066f38ac9603a5c00b2eab57f6d15412ddc2"), plumbing.NewHash("414d5f09781161dcc367226acb71e90d41b01013"), plumbing.NewHash("de66211afbc994a0687fb387ab0ba68c20f34be0"), plumbing.NewHash("cfc9b4d41d4a1701120a5d81df08a9a6f088d856"), plumbing.NewHash("f0d9867d09be9aad4d42aba9d2ec5a9a020037f7")},
  325. 547: {plumbing.NewHash("43060d8c7d76a11f9807e0dc918228fccc5a5701"), plumbing.NewHash("ed131973ef5748915ab3a03c9f60c9051d34881c"), plumbing.NewHash("fb6a2941b90494c18fdb995e11315c15b5b7738e")},
  326. 553: {plumbing.NewHash("5f58a6d2cae6e6e0b50e6e9495dcc31dab054e80"), plumbing.NewHash("04df170bea435e33ede59ad3f6ee94689a56fd52"), plumbing.NewHash("6ee5d61c9145dd46a80ee1429a19b88bd9eec10a"), plumbing.NewHash("d864512631668fbfb75fe7a454c85577cfbe3278"), plumbing.NewHash("c455a19f8e303b5ab1530fe98808808a30c77647"), plumbing.NewHash("e0d871b7dcc67d2f1ca88059cc21b1a503ef1ee2")},
  327. 558: {plumbing.NewHash("0ce7e4976a98e57ac7ff918860bca6576cbef37e"), plumbing.NewHash("6689189819dd37b860a137dd59bcfd292361ede4"), plumbing.NewHash("4ed71386855d62f1c1934826d23e6ff929491bec"), plumbing.NewHash("8281988842e40c48b711fbf7cfc7c2c56f9dc788"), plumbing.NewHash("4de7eaa6a80fd4257b866a6b695450c40b72dd28")},
  328. 563: {plumbing.NewHash("999f4028295217324be0fe12487444fc510ca6f2"), plumbing.NewHash("ef7911310daeb810f25d3b85001e5f5efef2ebfc"), plumbing.NewHash("6e42b0e4a77fb171295b541a6ae9a3a4a79f9c87"), plumbing.NewHash("9d7a2338b45d646772934da0bef30567811479b7")},
  329. 564: {plumbing.NewHash("d7d1db5d79fa8dee72335c82334ddab6c6919a6e"), plumbing.NewHash("6074a18ec4f23eaf06ac0f3e9be33361e05e192a"), plumbing.NewHash("9a411f367dc6106a69814bb60f2f112bd2e1d418"), plumbing.NewHash("1bc0468ada74dcc3e2da45f5e373dbd7d0a901b2")},
  330. 569: {plumbing.NewHash("b89a93faae8237561181f3d22b164de5b6dc728c"), plumbing.NewHash("d745d9ee96e5d39393ac740e5b84229beca00f1d")},
  331. 571: {plumbing.NewHash("70ebb15a333ee543e72770c1613767a64ffb1ca6"), plumbing.NewHash("3e95633b1fb920e7821737522f23725512df8d1c"), plumbing.NewHash("94ee8e15704d76fb3ef06a91c2c9c72aa07678e9"), plumbing.NewHash("6ffa6f39e6222c5417f70eea84ebd92e2d6113f5")},
  332. 575: {plumbing.NewHash("41741c38e5f29ebf69fe9bd82a604eba3c0b97e5"), plumbing.NewHash("028aae19bf5ae6efe0b32d25d1c700224eebfcf9")},
  333. 583: {plumbing.NewHash("cdab739471ee3c1d3497f8c73fad5ad05324b22f"), plumbing.NewHash("c6d2ccd453bc71144ba891abc6876772144985c4")},
  334. 584: {plumbing.NewHash("bc6880fa348eda885dfad305088b395510d74300"), plumbing.NewHash("7d143370d8ee6ceb4b72790c6b859ce9ce6880d5"), plumbing.NewHash("114b82a212ed2b2590d45550c41c3384ffa0d53e"), plumbing.NewHash("9bf55395f14e5ba02a937825dff08484ee5b5006"), plumbing.NewHash("32be731194a2ff3f82a0689bb758fbdd44e10a69")},
  335. 586: {plumbing.NewHash("650c2c8cf9d711d35ab0ca7d1653ef53cbedaab3")},
  336. 588: {plumbing.NewHash("6610880fd434dbc9ad16c7d2041b6145339afb56"), plumbing.NewHash("18d7e5e6e4ef7f1f6b0a2884745a63b4f36a08ba"), plumbing.NewHash("7f42253f46032dba423a2f63ed7cfc16304585c0"), plumbing.NewHash("f127b2f81d5d71fa9ab938ba6f42866d31864259"), plumbing.NewHash("d5030b1f8cf345195bd90d9777e787946c57af67")},
  337. 590: {plumbing.NewHash("105dd031dd4b068e37457d1d2a2fd52029f1b574"), plumbing.NewHash("b7b7c2ea9494c9c0b60c084fe20a5736654cd536"), plumbing.NewHash("02fe3718395dbb99ce8a20bbbfb2d83fe6500069"), plumbing.NewHash("c95c32e473f35917b2365558cb6d794c144860f1")},
  338. 593: {plumbing.NewHash("7db6de848ac683b2e25bc7b6c92b2184da4d3cda"), plumbing.NewHash("e8e63e307e8bc03483a7117451633421d3eaa068"), plumbing.NewHash("00e8d20eae00ca487e887d79b2da3cf3e8914d3d"), plumbing.NewHash("b74c5953f007da42095af06ec31b27f4ff2743b8"), plumbing.NewHash("fa3b17cd9622cad0f34da4ed6c3d69515033ee86"), plumbing.NewHash("08a090de43fe66b4f93a739762344aeb92290e53"), plumbing.NewHash("49a5cdf76dafce69834db2737b84c1e10011877d"), plumbing.NewHash("cb3de665d18c579fa36fe8b6b2e2c084ace0bd12"), plumbing.NewHash("92e8a20761bedbde8fd56a02a165884e8132f045"), plumbing.NewHash("e916f748db761b53ec2e6cb301e7b0fd006ccca3"), plumbing.NewHash("94fba3d8f05c65799e21198f845051f82b30416d"), plumbing.NewHash("ae7ef37c1b05393b7be5edcb59fea3e1f5deefc2")},
  339. 600: {plumbing.NewHash("8d20bac7fa534383fde1054334110691a11735c9"), plumbing.NewHash("8df3effa5f7b71558f6f2fc2a11a6968ae179d34"), plumbing.NewHash("8653060ae68b285a532097a878ce08a5e70ceb41")},
  340. 606: {plumbing.NewHash("709bc5e15ab015c874adcaa362aee04a39c0667c"), plumbing.NewHash("dad7790ec363979a1ac989a41564d87498156dcd"), plumbing.NewHash("50fdb8788898a5c4636e1d6c08212f4bad990028"), plumbing.NewHash("2878f606342d1ee8167e316ae9b0d3e2870d3caf"), plumbing.NewHash("7bd5c862a271f125a76fa1ada7f0d9ae27159549"), plumbing.NewHash("509d6d823565e094fd19a45003895f75ca4b8a3e"), plumbing.NewHash("773d4ce8cb31f220b9fd07812dd98d6581198d71"), plumbing.NewHash("88f3b3f75e2988a044dc603858754515e09a6d7c")},
  341. 608: {plumbing.NewHash("628bc6e03e1024f5d9aecd75db9465cb51cc9209"), plumbing.NewHash("ce814302acee2c474a88b2f4bfcdc92ff866f94f"), plumbing.NewHash("de256cb5d5f5a939f916b2827d2a974bc9eec60b"), plumbing.NewHash("c5cc96a4f4973c75e2eb533c3c88a234fbc333e4"), plumbing.NewHash("703d5a1298befcdcc2f87cbe3080f18fb866b63d"), plumbing.NewHash("c0b32a9a04c761e03e082d4ff955702c6aa17003"), plumbing.NewHash("0a9c0ca461f280c988e252baaf5b13a12f6204f6"), plumbing.NewHash("b2aebb30bfc113f82da4236bf0f3712b1d8c3429")},
  342. 615: {plumbing.NewHash("68af216772980a7c8811bc1ad5421970fb6a978c")},
  343. 617: {plumbing.NewHash("78988b5cd67d066bb4c9c6c29b636331e4f78c4d"), plumbing.NewHash("cb4f93913eb871a5e234db0c31f885daff87ecdf"), plumbing.NewHash("9dd06082e7fe033d17f24b0684ded49cc9727c33"), plumbing.NewHash("5430844453f5153f16dbd9d762d6a5a4106ba23f"), plumbing.NewHash("6dfa8b1d60f53f3d39e8286a5e3d6ba21dcbbb02"), plumbing.NewHash("fbd12f7d4436e26aa9fb78a9acdfcb5862e3486e"), plumbing.NewHash("9256b7622698e8728a7c8a035854f96ebaee14c4"), plumbing.NewHash("93b7dd99158ddbe99a299606d55f495629556c90")},
  344. 622: {plumbing.NewHash("52e2f3ed640aadafa0df8ec6cfd783584fa69ada"), plumbing.NewHash("4491212da4430181cd4d35cee5d341de87b3ac0d")},
  345. 623: {plumbing.NewHash("ea7b37a42a7644bc32f804eb3f4759b0bab53db4"), plumbing.NewHash("08566f22c725cc5b4435a534caefc1476e930552"), plumbing.NewHash("4871208f0294682de886d06ce26ec50bd792be60"), plumbing.NewHash("4c1353c188b3412b22d9f65042973e56a05433fe")},
  346. 625: {plumbing.NewHash("e8939f43a6df0df76dc6e0e0e97aaeca664a2d19"), plumbing.NewHash("d9657b70c0ddc8636fae9e680d58da0646d0473e"), plumbing.NewHash("d244d380474d815a1ebd7829d2ffd1542bd54f50"), plumbing.NewHash("2b27ab1c9e4c6ceb14da81af9efc0e206084cc5e"), plumbing.NewHash("909fbd19eaee0080c12bdc0f069121ee1d006907")},
  347. 626: {plumbing.NewHash("7e2e7a5e5a43443122df0b497f88dd77fd3bfc7c"), plumbing.NewHash("7c4f033c6a7a3a6d157045bb69b0dd1f22ff4ab3"), plumbing.NewHash("bed17efae8988288b38aad4284fa255db10b410c"), plumbing.NewHash("bd404b1c882ea649c17b9e26eadcc1978df21fdc"), plumbing.NewHash("4aa8aa100b00756c862c93ec4d0f8f44f091f48c")},
  348. 627: {plumbing.NewHash("22d3c8810cc2c80bc5778e8101ec193199ae7069"), plumbing.NewHash("0272587c2903d6a0c9bb0b1466db2a01aee36a96"), plumbing.NewHash("577735597212a111028c2c39db2f621823346ab5"), plumbing.NewHash("74329d0c1d26301be82955fbf8e524cb6508c953"), plumbing.NewHash("a0a0308061bcedd2913a8b755297f12958a8f4bd"), plumbing.NewHash("edae1785327dd7a418ac06c2fe85a8c1f6ea05b7"), plumbing.NewHash("c0ee5b859c5e082671c65801b38c9e5cd9b3387b"), plumbing.NewHash("3f3e0aa90e9547e749fadcbf4d07bc7554f9e564"), plumbing.NewHash("0f0d8be884a5383f29c945d7fe3a1baf4d3b11e2"), plumbing.NewHash("2b336756b661fe6d96856723f3d804c4db954c97"), plumbing.NewHash("ff62eb251b04b8301e71aee970bdb157f2649fa9"), plumbing.NewHash("c4579a9c43a5d9f0401eed4da6ca03315685ac2b"), plumbing.NewHash("3f12d7ae44436f5cef1e8475a7b5100b63d27c59"), plumbing.NewHash("ef1da479ecfdad853d0cae965459fe2cdd3936a2"), plumbing.NewHash("518fa3aa44ad3ffaa3ecd8a11a91530b019cfe2b"), plumbing.NewHash("5e1a5d07c468c37dadac7d19bcb9ff622b11ad82"), plumbing.NewHash("0fd0218ef0b513131970b2fe6b6e33f5b90f32a6")},
  349. 628: {plumbing.NewHash("c6eea03c8db29b1d8aee492246b9a4135ddee7c5"), plumbing.NewHash("0ba2626bd283bc481035f8f27077b8c8dfe8865e"), plumbing.NewHash("d811048887134013d3a0bcd9898f02ac3b4688b8"), plumbing.NewHash("b40b8a00e40db8765a06ad4cd5f1274bab93543e"), plumbing.NewHash("bc9f341165f13cb19e61cb0d78ad25abaf04bf5c")},
  350. 630: {plumbing.NewHash("914d976801c5d2323cdc87b902a5342637e86dc7"), plumbing.NewHash("30fa61d4576d103872a28ea8ae2281100a470388"), plumbing.NewHash("79406f111bfe2041b9d288a0b152daf6447235ab"), plumbing.NewHash("5f0e0d6c385d75090350eabbe27c124c767c00f1"), plumbing.NewHash("04107252f235552e44d5085b17408bdd8f349866")},
  351. 632: {plumbing.NewHash("1de4d7cfba0914f59f21ac7c91c6710eef8f9a00"), plumbing.NewHash("766572b5b8c0617e1261809f1916d29bb770a769"), plumbing.NewHash("18e5b75f67ed640ff207ae52b425e9e3c0c293be"), plumbing.NewHash("1278bf9cfa5b137c428d9d93bc66785f83dded67"), plumbing.NewHash("0c842391d34d444393ab0f55382d11a4c5105994"), plumbing.NewHash("9333179ad96fad2760221f2b3e2dec31f7c77f40"), plumbing.NewHash("d137d001829f9f911f8546401922c4b0241ba77c"), plumbing.NewHash("2a3d4722c21d99d882b2cbc2da451108147fe1c4"), plumbing.NewHash("6b1bf7d917992c66fe4d920982f561cb279ced11"), plumbing.NewHash("070609cbac6f97ef30f8752c4f4e4bd657d198a4"), plumbing.NewHash("12d068f67554f49707e34cca62981d1196c76aff")},
  352. 635: {plumbing.NewHash("c4f3155d192935c7cb659cac6d38c76b15ec971e"), plumbing.NewHash("d8b226f26b35348d934edb1213061993e7e5a1fa"), plumbing.NewHash("883f74ca410e822fba266c4c344a09e364693951"), plumbing.NewHash("c2e36f369b411ad1d0a40ac096fe35f73b9dffd3")},
  353. 641: {plumbing.NewHash("737bea8f39b06b6a19fee34b82c535309512637c"), plumbing.NewHash("00cbeecf6c345b5e4964c4d212ed9463126a0492"), plumbing.NewHash("ffe013033e7cb20b03129d346561e85b69e878a4"), plumbing.NewHash("3d4a48b12011717034e985a59e695c3d04a843dd"), plumbing.NewHash("50f7f03f6bc373b81ae9407f7857112e062c526f")},
  354. 649: {plumbing.NewHash("a6c9227372c607fc356b14e17c230cb9c1d5f589")},
  355. 653: {plumbing.NewHash("5863fc74b1d8ba0f89af4795c6896ce0c0c6a5f8")},
  356. 654: {plumbing.NewHash("6b05aebc0ce0717297d65305801e27f62a38cff5"), plumbing.NewHash("50b4f7fad50176c93c9b7764a5fc4e6d1e5e068d"), plumbing.NewHash("b57b9d3f8ee4c8a809e4fbaa62eade89d3bea456"), plumbing.NewHash("fe72033b2ebc8f7397edc99190018a41e6e0f787"), plumbing.NewHash("3071e0de2f498f1df86f91f9729e20aacc8025df"), plumbing.NewHash("97b0f9f6e41a16c37276bcc2ad06f1ee0c139c6a"), plumbing.NewHash("7b261704cfccedaaef0845808f4a1028e53e175b"), plumbing.NewHash("a04d968422861e086e69f9dbeb80ca0cca5394bc"), plumbing.NewHash("833c0b23f5b6607700f58de71c8e90f20e37cbd5"), plumbing.NewHash("a6525be4fc839ce6d561b3239c6514f02b2e445e"), plumbing.NewHash("ea47e6de279397cb1adf953ae971cf884587447d"), plumbing.NewHash("8f7574437951a8f1042407d25bffb57292e3b6a6")},
  357. 655: {plumbing.NewHash("c39546ee10f8eeaf44abf724a528ea14a9d58d01"), plumbing.NewHash("e8cd940cf850353a64846c7755274c8c2ba6fddb"), plumbing.NewHash("1ccad186fd950b77dab771686200fc4a23bc0c2c"), plumbing.NewHash("9c93d8ec0681521bffd903eab9bba1a6c42a1bd8"), plumbing.NewHash("8c0c3774e6cf88704f685784f8baba9694220d4d"), plumbing.NewHash("89f0527f3113d49cdf3e224a17e9f4f9bbb64f36"), plumbing.NewHash("875bc59ecfd0c2e53de0fd136f8f705f700d49d1"), plumbing.NewHash("aa18604fec4a309658bcc83b6aa595b5f0838d36"), plumbing.NewHash("38719480a846f70a97726ea489cfa54788c0e602"), plumbing.NewHash("a90af6f22eb13bc2647a57751c02d86cac5254f7"), plumbing.NewHash("1f5455e29efa4579eebbf894e97ee53cd1257529"), plumbing.NewHash("309f586424ea229fa9f6fa963e9345b4dbd22849"), plumbing.NewHash("c10945f53a3274748510aa9e7c120f22e9b5f01a"), plumbing.NewHash("ac1a09c787b3968b277e577a3709cd3b6c931aa5"), plumbing.NewHash("9f4734cbf1044cb5eb921f8e834423c3f528d0d4"), plumbing.NewHash("0ab4b647f82faa640c6919c8539106b445259220"), plumbing.NewHash("2c432ffeb381d5d604ac51a1fbd2422bdc76dfa9"), plumbing.NewHash("a5ec992b1fb7e52b12c9cfa220defaa9a5b0628e"), plumbing.NewHash("9120a7251d3ac13048d212b424cb92841ffd7cd4"), plumbing.NewHash("8bfd8511332282565efc7aed9cd7500356e92255"), plumbing.NewHash("7aa3114d9ff1cfbeb4128934fec05878fb7cf633"), plumbing.NewHash("6fb7ba721cf215e88176c665d2d7f90027f4e26f"), plumbing.NewHash("088dbe6866fd51f4e0e64866e442968c17abfa10"), plumbing.NewHash("7ae2f8478317cd80a2adc0d5c98e00f978d1cc99"), plumbing.NewHash("fa585c5151ccf926ae94945ed8b9f274ee747391"), plumbing.NewHash("590a5a5382dc47af188b0c7f34bed3b4ef6d394e")},
  358. 656: {plumbing.NewHash("538d368396f041826dea063c77cac6e0ace4ad58"), plumbing.NewHash("cfa1f7c3bc4d459b7684ed55d8257bbf67e1be23"), plumbing.NewHash("4973fe3069b594f1cc808cc18bf473ff21ab7956"), plumbing.NewHash("6e03136116c6cfd58a912e575c1980eeb7cd6809"), plumbing.NewHash("5d976573751fa744a4b9b4648a699dbd2ce42a6b"), plumbing.NewHash("0edecdd09e84f9cfd83e4a5775c6d6f46e7b60ff"), plumbing.NewHash("68bde67d0a73825f9de15bb2a1eef01aaf405fa0"), plumbing.NewHash("2ad3544b017fe9c0d7a25ef0640baa52281372b5"), plumbing.NewHash("887576b1139b5f21dfa4656410ebdbf986bc6fb3"), plumbing.NewHash("8a50f5dfc80030b864ef24d6c562661c24fe7c2e"), plumbing.NewHash("fbc9a18f0abc5784607cd4a2a3886558efa3f794")},
  359. 657: {plumbing.NewHash("53e541f7bf55de036f4f5641bd2947b96dd8c4c3"), plumbing.NewHash("3e933ca0ed1c526c0a9b8643ca84129db96ecc17"), plumbing.NewHash("b5746331f6c214f048467e29a0c5656aefd97950"), plumbing.NewHash("1b7800aceb1b2672630e067425790c3c9769e5f2"), plumbing.NewHash("8ef4a3da52530a86463b2aa015567cbdd6eb2d5a"), plumbing.NewHash("3a7cd05b488e327c1adccaaff10c78390d53b5a8"), plumbing.NewHash("3d176e926f848c5aacd036d6095ab015a2f8cc83"), plumbing.NewHash("5adce5266f43f89dcf7f14758e5d895fbf6791ef")},
  360. 662: {plumbing.NewHash("d8911c28850d374177a945878c223d00ad70ff94"), plumbing.NewHash("32cb83408ac2d34e69e667520b553df2ffe976e9"), plumbing.NewHash("1c630c3e3c8969b40a47d07b9f2edda50ec69720"), plumbing.NewHash("a56b1a55182acf061b1eb2e2c86b48193a0e88f7"), plumbing.NewHash("f2fe51a9d2bc9d228e3ac374579e87595f51fef6"), plumbing.NewHash("fff781cf155091c7d168ef061013876ae8034af0")},
  361. 668: {plumbing.NewHash("6aa5730ad563bc1482f4f22246b49dcfe24bdf4d"), plumbing.NewHash("8a53df633897f4d2ff82790825144a77c38d5a77"), plumbing.NewHash("200b19328275f992f21ae101d74a5e4798a712c9")},
  362. 674: {plumbing.NewHash("6ce428511a4a28e0716c4657c3c154af600bf994")},
  363. 683: {plumbing.NewHash("97f327317f5f0a3b85e00c7b461233f5101526ee")},
  364. 685: {plumbing.NewHash("023331ec2a7b0086abfc81eca16c84a1692ee653"), plumbing.NewHash("3fe89b4f7b393696039dd61d4aea3f648e2c809e"), plumbing.NewHash("6710396acaf62b40ba01fadd9d488d6641995d83"), plumbing.NewHash("46649e5d97a750379fdec85b3a8ae39f90e26674"), plumbing.NewHash("b118cef26fae748d5ec23c33b29e9989a7abbe17")},
  365. 687: {plumbing.NewHash("bc92fb32c0d60b7dca189ef5bd54918438726204"), plumbing.NewHash("a856451243d0ac7758a19f5b16392b4c416eebf4")},
  366. 689: {plumbing.NewHash("03a7eb89e27b70f2ca0ac932ef4bace7569d6fab"), plumbing.NewHash("d2828713937b728cd41b31b459d11ec678d76177")},
  367. 690: {plumbing.NewHash("10bfb1c565582803982e2205d41b095bc929d4a4"), plumbing.NewHash("2562172adcd15e60763dfd601d8f0a5697a2bbdd"), plumbing.NewHash("18cce177a8bd29b5076c4e232830377e46ed0416")},
  368. 691: {plumbing.NewHash("36ac91f0576b2295df1f3f8b23c305d69698a0ff"), plumbing.NewHash("d663fda862df1c831e7f93f1e3feb2e189a1b9ef")},
  369. 692: {plumbing.NewHash("10c76237abf9913dfcba9a0ac73e8db50ea21a44"), plumbing.NewHash("45a10bc6d708fade197a37bfbc62312caf70e6a7"), plumbing.NewHash("e1a4aea4be03dd6902e50458b0a3c291bfdcf00e")},
  370. 695: {plumbing.NewHash("f0e0527591ffd6c061c94d6027c29a31bfb23d78"), plumbing.NewHash("e092fa8b5776ee8c7bc80227d046036de900412c"), plumbing.NewHash("803e2869c76c5c73bfe1fbf6ab4af269b4a0d551"), plumbing.NewHash("55e03476674fedfd6211823e3ca8d075e499a418"), plumbing.NewHash("451d74c56d1b2f498e5c9a4856309d7f454b39e5"), plumbing.NewHash("b95cf4a1b1b5f11ecc13299a6536eb035ea1d530")},
  371. 696: {plumbing.NewHash("ebe84eb3a1c3beb44e25fa4f00a89f2c71dde2cc"), plumbing.NewHash("5bb86288e9d145b2a3cd4b708ef1659d8f24233c"), plumbing.NewHash("0564b940ec8f5008018c12c24a1431e91407d8c0"), plumbing.NewHash("1bfa665eedb02aeece1731394ece001c46a3a894"), plumbing.NewHash("f46ccff056525cef9ea2cb7885d9cc1b38db7a6f"), plumbing.NewHash("16e8b93d3852958aa7d28bb127de34681136c5b0"), plumbing.NewHash("af0d18f6164952a6610f1c9c14d2dded7e6c488e"), plumbing.NewHash("2c0809c125521f77b4d850264c4dca072ebb0f67"), plumbing.NewHash("871809bb8596a50057a1a97a795db16afe9a32ee"), plumbing.NewHash("701355a720b4405a8b2140ca3f5b5023bf9465dd"), plumbing.NewHash("7dc8a32f8955023c75f681e029c15e841da443e0"), plumbing.NewHash("b257dad10c0a5441c8638142b7de4c26fc76b69f"), plumbing.NewHash("67b43e3b0d46b2315e0f0128c691e916e65f8c83"), plumbing.NewHash("59e67fd049252fe1d9194071ed38e81f9fbc2dcf"), plumbing.NewHash("fb21b1bad8a7e6f5cf5bb7843aa9037b6213e35a"), plumbing.NewHash("3a57f184de26a70df1359c7b0afa5b1ee943fe07"), plumbing.NewHash("97db7e26661f249aefc51d4e73e0d1ba97ad7cb2"), plumbing.NewHash("10012ae9c66bbd130c3761fe4cd6712faa2cdcdf"), plumbing.NewHash("3cde2c1f8af4dc10d3ba2fd2663e1335d8f8714c"), plumbing.NewHash("7e7d2ed1f9d87e5ab46a5c1e7254a000c7d0b9a3"), plumbing.NewHash("00dc75116f80d81e4cb9bc90e2cffa9b40eed153"), plumbing.NewHash("ac4b365e0b08bd2a655bb86573995344ef750071")},
  372. 697: {plumbing.NewHash("26e6df8a98670802286978e1657526fe2afa61d5"), plumbing.NewHash("ac2a7254c2c29b4c13fb97740f2aaaf46a0282dc"), plumbing.NewHash("1422baaa8d94fd2aa42e784c9fd474ec3d3298c9"), plumbing.NewHash("13f271c7d6d259e8a478657fa05822f9584090c4"), plumbing.NewHash("79192f8358199b28a3f4b69d5aee9dd78c482d4f"), plumbing.NewHash("b7116b991dba0c1256146c71496fc558cb3dbb41"), plumbing.NewHash("197fb886cba673b385189809a1a90032032f5c26"), plumbing.NewHash("dab228bc565592230e8158ec774da40f24691c79"), plumbing.NewHash("b03b2b614011b1b11a010c3d76f305552df528d0"), plumbing.NewHash("4828bde0ce2cd1193f6bc660d213f6fc0664562b"), plumbing.NewHash("4f735c4b0d57e88dd9a6745173063e672bffa04d"), plumbing.NewHash("5ef7ce3f077abdb35ad74f720717a6721601cc72"), plumbing.NewHash("cb3e82051fb1591e33887347758549f9191d8fd5"), plumbing.NewHash("2ab5295ceebb1acc455f43ee5a8b185983620655"), plumbing.NewHash("b361e30146ed82fbd4e5b8c301bfb5196997a307"), plumbing.NewHash("a277dabc1410a36430fa63365876b504e1aa835d"), plumbing.NewHash("309f13ad8cfe5c5e711837cc8efa5bd00b01368e"), plumbing.NewHash("70e5cd1620119afdf892c656b761fb482b478842"), plumbing.NewHash("c15d8697754a74a63a1185ba31ccf310b3c5d0ee"), plumbing.NewHash("cb567d27eabd78b19145f3f6a018413aabcf3396"), plumbing.NewHash("9e25cb7d13ff6788c569051c7cad6d7ad85ab821"), plumbing.NewHash("db22fdf9afe2c92d604a52e9e9953fff203aea73"), plumbing.NewHash("0599ade6dae88be98ea1ec1d150d7f525335b0e4"), plumbing.NewHash("aeb22266c91af9d17bee7a4c266c7396a6167cbf"), plumbing.NewHash("c81f9447c7b008e075f1ef89e5bc0dd546e4fa79"), plumbing.NewHash("8d317c986721371f6d11ae9fbc0afa0212a088dc"), plumbing.NewHash("bf1d29c8c3d51ddbb7b2fb639525c07827be4fa0"), plumbing.NewHash("8e25f8b1c28263ebbb16b662c1f73ed212204634")},
  373. 698: {plumbing.NewHash("0f308d4b72c420168d8c5fd3c834c61513a958fc"), plumbing.NewHash("d046cea1a1d32372cdf27a1c947969c9d09bbde8"), plumbing.NewHash("547640587a339c8c10b5eba950080d8cba0266e5"), plumbing.NewHash("8fb8accb055201146a8e1a74268f8eadca3d08fc"), plumbing.NewHash("5566c9db1d697018bdb3c69b0aed9c9d63fd3783"), plumbing.NewHash("e90b0713f2e86f9f540b57b4306a77e3f696213b"), plumbing.NewHash("5ae158e8b06562ab3db5d55d0f0a1dbda85d8089"), plumbing.NewHash("0c8e6319cff0b0b723aa1a7b8d8a0b3628558848")},
  374. 699: {plumbing.NewHash("c81713367e3e97f13dd584ae8e48b79615951437"), plumbing.NewHash("825d58e2bbbe234019898e414fb9129af1f17014"), plumbing.NewHash("6cb1fe9490bd2050ffedb6dd3d87d17145b10a60"), plumbing.NewHash("3cfd306c76b4428237fc0fc09f06a4f179324d3d")},
  375. 701: {plumbing.NewHash("a164addd8d5d1642d613100226a94398d929804d"), plumbing.NewHash("cad4e4e8cc1680c075b8a49c92517d5f353f2c9b"), plumbing.NewHash("173a40ddc18b35ab803b017cf73c544af319c5d5"), plumbing.NewHash("4101b5fdb2262b666ba7eb901f1776ef6579902a"), plumbing.NewHash("b666ef18a126c847eb6d25da08434c92cf988b7b"), plumbing.NewHash("579cc22cda42c84119a311f20c5fbe09b3e93803"), plumbing.NewHash("7ba07aa8cec97ed41cc252bd99d516457c99bbff"), plumbing.NewHash("c8282437a7f75f9c3af7a1b2ad57297be41863ff"), plumbing.NewHash("7f5be01da96175a3cb90a2c2a20b01b3d14f80e6"), plumbing.NewHash("4992707d5705eb6ad1eefd81697fe2bf268bc33e"), plumbing.NewHash("4487510c90b0398d7107e6c931c96c8904d83f9e"), plumbing.NewHash("a85378979521640eaa7767acfd3c9af639f83711")},
  376. 702: {plumbing.NewHash("8b2f06de4949dc7f2fe9a5ac42531127fb04e48a"), plumbing.NewHash("ad21188a13b870ccd3d7dcc1afb95f8b9aa20200"), plumbing.NewHash("9683998b37667e9983e4fe7a74d1137d5dbee129"), plumbing.NewHash("4b5984b8ce8799601ddf8f3fae0344b626c4ba48"), plumbing.NewHash("6d8cf2fd6e68847b4ac6d1b6eec9d2c1eccabf75")},
  377. 703: {plumbing.NewHash("38a6dae44a507d816d5cdb0fb2f3329f21ad9f2b"), plumbing.NewHash("eec61d9d4921fb7a62926e217fbfe43cd7062ac4"), plumbing.NewHash("8d34c7ed3c632a31f1d491d733eb9a7bfa11043f"), plumbing.NewHash("fbe7873fc0f43090e2df52b85867b8b6179516ca"), plumbing.NewHash("91bab91f3c8e714c3c9604090fe48bfc8f3d982c"), plumbing.NewHash("50d83f7ab80c04283236a986a1c99550cecc5140"), plumbing.NewHash("4e504462796ee90e9e38cda6af1bcc3a40fbb39a"), plumbing.NewHash("0e9ac3dae0359db9fba655e76da5a443bf1d935d"), plumbing.NewHash("5d0cb109496d46edb6359a50893cab07e9f386ba")},
  378. 708: {plumbing.NewHash("5f94aef6685289c5451f24bcfdb2ab24e2bc87df"), plumbing.NewHash("d72514d6cca9b09842ab31536c16a01e5cd51f6f"), plumbing.NewHash("f9202817f3e2f65c83910015552da88969b44dbc")},
  379. 709: {plumbing.NewHash("5516c8fb42f01622c9a4a31c19cb46ae68cfa0f6"), plumbing.NewHash("3b660145a7c83385bae682e072341c1b5709dbee")},
  380. 712: {plumbing.NewHash("6419d52543b8af2c50c9a7678e1dd42f96745961"), plumbing.NewHash("8f6d12f45754721b11c3ce9ac62d747b391808cd"), plumbing.NewHash("46a3f9443d4c6741177e0ff970c6b60488c9398c"), plumbing.NewHash("42b18506c53f05b1c84e6a905e76afb6a534a853"), plumbing.NewHash("74cf6df1535d18b55f9a9bf0b98d1ae6c2cc1860"), plumbing.NewHash("c6c93739546cc88d7bc0be5981d27c9127b65678")},
  381. 714: {plumbing.NewHash("eaa827584fb07ee03f26248c8d4ad8af126b7ed2"), plumbing.NewHash("118027fcaeeec2ab6cc88b032d490494a14f1a4d"), plumbing.NewHash("ee04a7c77a1da239c51aa4931062a0d2257ff6b5"), plumbing.NewHash("070037d4493422f3d9420daec469c1055b2b3f3a"), plumbing.NewHash("4cf4aa1efe30c3ed0e985ba2d0ae5fa9ed3cec07"), plumbing.NewHash("0c7b9d57c1049869dd3e8d69f32752e74aa738be"), plumbing.NewHash("109b11016a87c2606b3e738d085fe0281ef3cbbe"), plumbing.NewHash("d51fc7659ecd616a40504f836e51fef9472e0ef3"), plumbing.NewHash("16feb385e5a84c8298b909fb7a8719ca9a99d105")},
  382. 715: {plumbing.NewHash("214a54d40e50681f8088ca2255a05b9dee89e685"), plumbing.NewHash("21bf90cbf561250b256d7b30c7545da3bf092552"), plumbing.NewHash("a783ceae2a970007668735419210fc67eb748db6"), plumbing.NewHash("fbc8697366de1e8bac0f76a00c6a65385dd11209"), plumbing.NewHash("7696a139956577fd0f5b527698341d2c4d9e90eb"), plumbing.NewHash("84711475f8db72ac573a2584cb85f8d6ff5facef"), plumbing.NewHash("aa826d684d97c7871daa65cdbaf19eaac32955c2"), plumbing.NewHash("8b8ffe0ea4b9739e8b4ad397cc5e232beb24cad6"), plumbing.NewHash("fa4aba7a9be78c0cffd80e6e60c344517b46da5e"), plumbing.NewHash("e71cbccc2a91c338db010802528bd9ce8f8f14bb"), plumbing.NewHash("d5a0737a01b4fd2ea24ff2fa5051f1e1fce7910f"), plumbing.NewHash("d04d05442c1596c4ad0547a0854755be6dd24fb6"), plumbing.NewHash("a00eef21c90bee7a2dbaa62d06d41fc883c85d8c"), plumbing.NewHash("2082bafd183eb6fea4778701f4c2c047970014f4"), plumbing.NewHash("28c208deab339a12c4133f9859145afb5b37f151"), plumbing.NewHash("11107c5f2c0eb8eccd6818352cc4124885ffe7e3"), plumbing.NewHash("98af4eb39faa55a6f0d89e6fe375da9b985e34a4"), plumbing.NewHash("7374442a1c1507051fb1f9ee1ef774c9a9654d8b"), plumbing.NewHash("6e289d71866b4782dbe752adce1f26ea895f5dd9"), plumbing.NewHash("f420d898646d520b94c5ec5a4494de9afd02ed98"), plumbing.NewHash("151b5f9778b4c2d2d75a46bcc438349b7659e743"), plumbing.NewHash("d704bea3b2f6230a3a0945b305857da07ced8570"), plumbing.NewHash("ac1afb3e5f6f608ca4303f0c142a86d65b05cb45"), plumbing.NewHash("96554939784f96b0bb15d069979a7a00f2716ba7"), plumbing.NewHash("53c0faa5535c4fb782271d21ca1bbb2191c5f19c"), plumbing.NewHash("e9222523ba8f3de04e5fd0a64176f5c3457cc00b")},
  383. 719: {plumbing.NewHash("c88a11c378189443cf1f95ed14edd47c9666171a"), plumbing.NewHash("3e81b668ea0997d136f43baf32a57246ed6410a2"), plumbing.NewHash("10399242459eb222afece5edc5645ad458ea43c7"), plumbing.NewHash("62fd5f7ab6f25dfd256af9d2784e950013907587"), plumbing.NewHash("467de6bb6c549fd42ed97f8f724206069fea8d25"), plumbing.NewHash("6691b9e3fbfce7ba3824503d3fceb2e5918db6d8"), plumbing.NewHash("9cf7f816f28a6ac7d7a4264ae015addd72e57321")},
  384. 720: {plumbing.NewHash("35b2aa910391b253f062e9bef766fd7be3304668")},
  385. 722: {plumbing.NewHash("90529b222e7d10ee1a987197071aa60a0ef3ac46"), plumbing.NewHash("f4f3567e156c6b964d17e24fa4a4073f00851463"), plumbing.NewHash("8566ef777927d05dd41d4a0c80755ac2c17a2aef"), plumbing.NewHash("5d512f82b58e95464dbcb074aa07e26e7775d3ca"), plumbing.NewHash("6dca6c2531c3194a7cb2d0aa9a8d2f4d060a6651"), plumbing.NewHash("2b1c4779ff4005f91898eb0b320b97cb1efd1a1b"), plumbing.NewHash("2b3579ecfce5706e356673b573ccbee9496803d7"), plumbing.NewHash("9c86aa21db0082a7296fb35e31ce2363d7a7f9f2")},
  386. 723: {plumbing.NewHash("e5a33862a35a0326576b0c2546ae23654c9914ae"), plumbing.NewHash("4a429fbe7d29c6d655525fbd77f5dee97c25a7d9"), plumbing.NewHash("01002689a640cb2692083028d131fe1790525ec3")},
  387. 724: {plumbing.NewHash("0cc56a46e826ede17705f68ade1702f57f27ae9d"), plumbing.NewHash("576f8fe8e6a21b7094316d36c315c2f6bdb487cc")},
  388. 730: {plumbing.NewHash("0be8040e7988996232da210d04002f170a5d3f98"), plumbing.NewHash("57ff6e99cada8eed046f68c99faf6dad799b96c4"), plumbing.NewHash("50057d8fe23de4e88c1b0b45eafc5bffd922a941"), plumbing.NewHash("568d1a5b8a9b592fe988f304fea90826862d416f"), plumbing.NewHash("9f6fb452a231dc83468b9c3be29cd160af53ea9b")},
  389. 731: {plumbing.NewHash("466f0b91f114a2152886e685969c404bca8de34b"), plumbing.NewHash("3dd5fc88f78f0bad58e5e10032132d36b019604b"), plumbing.NewHash("fa4c747b7e9ef8deca9f806dd8e77315f805f2ca"), plumbing.NewHash("80b72fa7b3ac69f821ea63a1e2a92412da188117"), plumbing.NewHash("ff577d84c055ea835b003e42b22ea6dd94f910a2")},
  390. 738: {plumbing.NewHash("4fe78f340053dffbc3e36644dc43fdf8fc959081"), plumbing.NewHash("0930ca9eb771ad418338a5c9d3587e977c14b398"), plumbing.NewHash("98b95762b63598e7d69410d8b6fbd69013377b1e"), plumbing.NewHash("655f5af76e008f2e547f3c038d3fd363fcaa844d"), plumbing.NewHash("4785d51705949e72316770413ba187f07f05a5bc"), plumbing.NewHash("7c6463da6f972ffaa466b0f55d06b760a98caf8e")},
  391. 740: {plumbing.NewHash("466bb39aa113a09844d6677516497242e6a5f4fb"), plumbing.NewHash("debbd4740547ab06c38e0dfa8a1852402fc0eb2c"), plumbing.NewHash("98ec9fc9720a1879fcb186b247c189863dbc7b0b"), plumbing.NewHash("75b69a5615304ac4381d2311c1028a1523ffb791")},
  392. 741: {plumbing.NewHash("8fde4fe305df814bc6a5fd8f1c07a167c834a468"), plumbing.NewHash("1a168578863eacf18fa15e68e01b27e4889b17a1"), plumbing.NewHash("65a215646c653ab808170c8b8c10de2945262613"), plumbing.NewHash("1fe9ed7b55f2ad42b4e697f2e0a471f0ff71b210"), plumbing.NewHash("ba3e2cadbe7fea49eadebdfb297d48d317e85040"), plumbing.NewHash("a8e7b19b7965bbf5bc478aec25c5d6ea39dcb100")},
  393. 745: {plumbing.NewHash("9adb43e44b46a287db94370d5081c542bf91f5db"), plumbing.NewHash("172397ebf45d58ba256c10004c6fce8b40df286b"), plumbing.NewHash("b558a7e97c93d80b21f577d67b3418139f7f7146"), plumbing.NewHash("5598fcd33e3f83d8fd368f82d2a20e52375bcb6b"), plumbing.NewHash("05589a7c27115c41002d46b3079834742fb20c64"), plumbing.NewHash("9eb7ecd3e525c9cff31ebd59a96794f212ca5e1e")},
  394. 751: {plumbing.NewHash("e0697c3768a53c7b6744e2eab253667d7a42e040"), plumbing.NewHash("73a620b6e875a2f153cb5acea194d89e845dc726"), plumbing.NewHash("fc4874f82cda48e0928f9eb730a270f49034883e")},
  395. 752: {plumbing.NewHash("8830c53135dcad4825e2f8805c78523931c3bbed"), plumbing.NewHash("83001d195c7741e16cb22ee4ecc9083b27797c27"), plumbing.NewHash("f71831790fe8c5e8735ff5847a97818a865e1e1e")},
  396. 753: {plumbing.NewHash("7088ebd2949655ce7d27e2087d583e988b0066da"), plumbing.NewHash("0976afb46d175f9bffd99ead3df06f7befb5ce1c"), plumbing.NewHash("d498a9846567e4986ba2a2541b2b4e4719c2c83f"), plumbing.NewHash("47350dc6078053403c59e8da3fd63ac3ae12b5ec")},
  397. 756: {plumbing.NewHash("7d52af64c03e71bcd23112a7086dc8aab1b37ed2")},
  398. 758: {plumbing.NewHash("b8134f529c98dbf27e74a28a3352b971fb0bfd2f"), plumbing.NewHash("5be73f1ab39a4bc2dfe278a199972e0a14ffa014")},
  399. 759: {plumbing.NewHash("2bb9014c9100a5b0344ba81af7eca8698228883a"), plumbing.NewHash("791cba094c757b04280fd8bffdfcd67c93ea7de4"), plumbing.NewHash("0d4fb04c7f460f7548c916e022310c0048051613"), plumbing.NewHash("9295efb21686b4ea5a55dfa1944c4e30bd27794e"), plumbing.NewHash("7481b5d060d77f73dc78e2fde37362d1b4656f2a"), plumbing.NewHash("d491dafb80394a83e9c2a085a0a114c8246f0a5e"), plumbing.NewHash("4f9e7bf93cc8afb4cbeaf3182e11dc66154a218b"), plumbing.NewHash("d3b9b9d5bb21fc5e804826ca6651fa83e06715b9"), plumbing.NewHash("5e51d02a94f08d461a831cd85a495d225ace7278")},
  400. 760: {plumbing.NewHash("54a417f6167841bebcb567c5ac98724f2038afa7"), plumbing.NewHash("bcbfcc000ce07078786bae5f66f8b30dbe82fb64"), plumbing.NewHash("653cfd20769db94e3234eb0be979882f12a10288")},
  401. 764: {plumbing.NewHash("c430b6c49222166d7a2c425705a80ac5a4ac2b65")},
  402. 768: {plumbing.NewHash("fdd822c03ed8eeb03e18de931d1694e10cc01ea1"), plumbing.NewHash("47dddaa7fd6947800f5f091336cf822b6db72a51"), plumbing.NewHash("dc3d164c6b1079f647ac7ed416cb917a1cc72c18"), plumbing.NewHash("b93d3b23f51ef282c90dcf924475eb1fcaa4151d"), plumbing.NewHash("75519651bbfd0a31a382eee92a982d9338e0e6d5"), plumbing.NewHash("cf550db5a5a0b740a185edf8f38874558061345d"), plumbing.NewHash("85221ccd13fc69a233b3f39311ba19382daabee4")},
  403. 769: {plumbing.NewHash("6582043276a1323b480d3c182c0181cbdae30f12"), plumbing.NewHash("1c7e63e42c3d1e8989109f502c110dfd1e4e4656"), plumbing.NewHash("61c9cdc53ca5725e43b3e9c159ea5b68966fabfe"), plumbing.NewHash("23833417cfe83723f088bea08e63844cae6f1121"), plumbing.NewHash("8ac1b1fdc90b51da0410a8d922d705a791dc1f0d"), plumbing.NewHash("d223cc0ff7fa13970c809bb2534728d2e1f2a247")},
  404. 771: {plumbing.NewHash("ea8e2edf17fed647037109f712672a44f5a66ac9"), plumbing.NewHash("a2a0f66276a6ecb8210ba8b9c5df0dbe556c6996"), plumbing.NewHash("bfae0a61917f8cc7563db80051f89e178e8bfb1c")},
  405. 775: {plumbing.NewHash("24db6bfaaf8a692059223e283c04d5c98aae8a41"), plumbing.NewHash("5f4f234f9b4b2edd78153a289cd6388a6330993d"), plumbing.NewHash("e1773974275d77d9861ce4b3988d563c3119e3e4"), plumbing.NewHash("a2dde60a2fac35475df2aa5cbf8f3ba2cecb352b")},
  406. 778: {plumbing.NewHash("6220e35ccd2dbcd852bbd07d12511af9f5a09e99"), plumbing.NewHash("0d27d903c295f00d16efaed7f596b3b974e38764")},
  407. 790: {plumbing.NewHash("fccd4f8055b4f07e88b70d1bd684bec879906c68"), plumbing.NewHash("7c73bfc50da3859d1c4fe01fa20353f4a8cb8e7c"), plumbing.NewHash("bfa38fb747904d20e4fb73aa233c88bff2151916"), plumbing.NewHash("60c52ea766b8049c4396ad76c6f4242039c5e974")},
  408. 801: {plumbing.NewHash("a5f53155a5b81f2c83f492216b96c150f79d04e1"), plumbing.NewHash("36317214ae7d7d1fff953232649651fb7b6aff92"), plumbing.NewHash("763bd6d8f17e27baa8cfadb734e004ce0e3bb6b0"), plumbing.NewHash("3c180eafedf86d9dee1ced94f31d46d83bb9a92d"), plumbing.NewHash("6a0c9a617dbdfe243578981e73f9b6b745ea1497"), plumbing.NewHash("62973243aefbe5ca73bee14f910982084f5fe8fe"), plumbing.NewHash("c3c97905fedc45e51f2aa9e2d671543475ee7d55"), plumbing.NewHash("508bb8f541dfecb858c8e694e878eb0706c8dd20"), plumbing.NewHash("552978dc58464f9f68908f371f9c6a0ff4244cb2")},
  409. 802: {plumbing.NewHash("75d9415c82b3bbba82095b11ac6c313b40de6066"), plumbing.NewHash("43e418d1d275a00f2d015959c538b8f03a5a6eb1"), plumbing.NewHash("720ed1adc4beef1189cdd71c9310bca1d289ccb5"), plumbing.NewHash("53303fdb10f75d929f2e329362d6e6bfb3a7d372"), plumbing.NewHash("fd427b8cdb35b80c72782f03ae03d4d58830faef"), plumbing.NewHash("1b1e09a3665d71061bb5b62ca475d304634a85bf"), plumbing.NewHash("8c0a8b4b0498ad96faa76ab5bae3320de28bc5b3")},
  410. 805: {plumbing.NewHash("19463a19b85b97041b318dd0bd5b2f4fd031defa"), plumbing.NewHash("d92fab69a253db150af9d3fd638f2a01d53f4aa9"), plumbing.NewHash("11d9c995cc8744d6de04f89f705957369e2865ba")},
  411. 809: {plumbing.NewHash("4a6f06f06d6a90eabfbdc48ec549d351dc012185"), plumbing.NewHash("21cf50734a6996da7023dc500bdcc8ac7d74ef48"), plumbing.NewHash("00a2724260cfb66a983fac9ad6f3754a8b8ea88b"), plumbing.NewHash("f65a56fb65062c8d14d215c9f4b1015b97cc5bf3")},
  412. 811: {plumbing.NewHash("e1c39881989826aec605f8a30dd11cbdf3872790"), plumbing.NewHash("c73ba916f6295586324a298316b5774a53505d8a"), plumbing.NewHash("be6503a8a81873109cf830bef225bf06a13edda2"), plumbing.NewHash("5ca5699b00881f6995dda787efdb1df461014b67"), plumbing.NewHash("d3c33613a1752f091229aa203bbd83efd4eb8671"), plumbing.NewHash("c0f0b660a69aa464bddeeb1e3e897d2c5b4ca9cc"), plumbing.NewHash("8d5b2ce60c21a0c18c00610dda41687b42fd5c13"), plumbing.NewHash("ced84c4b42d9186842e4cad6c11b0c8f2c18439b")},
  413. 814: {plumbing.NewHash("e0543fbfc8103fd7b3e78f066634f1685ec675ab"), plumbing.NewHash("3d9428d3445a429b535a247168d93b8a5910219d")},
  414. 815: {plumbing.NewHash("929ae992c274a13446c53c6e23367629dac94d05"), plumbing.NewHash("86c8d1dd45530aef4f8af09251ad8290be568a5a"), plumbing.NewHash("1ddf23528e38d6ef47ad42d10010da90d8c21018"), plumbing.NewHash("68145065284e645da12f364f3f853799d13c08c9"), plumbing.NewHash("ab6b82c2dbcf5ede7d2950eca1efe815f5c0df75"), plumbing.NewHash("7425e68cd65b351a09e5c8b84c089cee948f3107"), plumbing.NewHash("abf8691ade76c33da7585741e46d18531923e30f"), plumbing.NewHash("58d1d0678f1f0dfb2dca976b84fc3d419d9f4618")},
  415. 816: {plumbing.NewHash("04a20177cf332004a291cee626c20a0262427796"), plumbing.NewHash("b5ad5334fcc98c147826ac84292ed0425b797b08"), plumbing.NewHash("75470e380ff92fa52d600aa3ec0cb0be06773cc1"), plumbing.NewHash("b713122e77a723d9be2c5652e7833ba68f3c56a1"), plumbing.NewHash("a2f6ae2c66f22a2415d069913479990ce73e98b7"), plumbing.NewHash("f430de10fba7f02ff821a094b29db9a9d407eb4b"), plumbing.NewHash("219d6ee5be04fc12307fb9d21227185966f53063"), plumbing.NewHash("de73eda89a916c4dd46ce74058bb2664455ed9db")},
  416. 817: {plumbing.NewHash("1b539993aa751c0a3c48f67f27c1210bfe152fba"), plumbing.NewHash("60cf7ca6b28967b9ab9b41a959520532028eeaa5"), plumbing.NewHash("fb97b6e0fa4d7338c6bf6cd722f3d67b117e76e4"), plumbing.NewHash("1aa9e9199b3db9554426c9f8cf9efbfb2f3d7963"), plumbing.NewHash("585f33f6b734156aad960cf79551f89289cd9b62"), plumbing.NewHash("34628355976d5ffdc9fc11e84745f86c11467671")},
  417. 822: {plumbing.NewHash("fb9dbdb10c75f3a3539b77713c65c75c6c297247"), plumbing.NewHash("49a7c7376d0057ab85e00f1ee3387ce36797e994"), plumbing.NewHash("4fab0bf9a8ad03d63af0797b51526a0391e55a87"), plumbing.NewHash("9a4598da50457317a7ffdc681d2a2998380fa5a1"), plumbing.NewHash("7413956e7e5cba157674f30e55bf9127e2d4fb53"), plumbing.NewHash("98db0285ee5b8a1fcb18eb46793c117d6305e0fb"), plumbing.NewHash("94397e08aeda5f278360929de3abdd33d596f422")},
  418. 828: {plumbing.NewHash("59cd1c3994153a66084b00fadcafad2af5a15dd7")},
  419. 830: {plumbing.NewHash("985c4410141cda5d28c2ac0abb6341d6d0d4c4ca")},
  420. 831: {plumbing.NewHash("bcc68e90406cc53a2887a4cebb253ce96d95117c"), plumbing.NewHash("3fe78376913cfa310a13990589d312d47440399d"), plumbing.NewHash("205c61178d10089cbe0fbc90950dbe0cb9d380dc"), plumbing.NewHash("94dbc3042f5a85b399f5ce2859d4e8fbafd235b9"), plumbing.NewHash("cebf2084ebb0a603383ceb6807653921796cd095"), plumbing.NewHash("cad3d1e7d999d083ce31687e6576963cbd79dc28"), plumbing.NewHash("1dc67f374cde47a721e5fe5d9237bc2573bda2f0"), plumbing.NewHash("4392d120d845adf25d96d353df1513aa8e1df0ad")},
  421. 833: {plumbing.NewHash("f3f6e4facdb4e958239b4ad28e19eb1725a55153"), plumbing.NewHash("09b97e951a0d47e7d5f31a66804a8e76f389315b"), plumbing.NewHash("eae75fe2a6ee1994c9821c24bead7ad75bb9b452")},
  422. 842: {plumbing.NewHash("692e8e2023046c683b2b66f80b32b0c525570c52"), plumbing.NewHash("395a2edfd7741b5cdce7997bf8d4018d0e4b10d9"), plumbing.NewHash("29dc8ae6c8d1829cf5f4a1fe1a086070d917da77")},
  423. 843: {plumbing.NewHash("e9aa6a5ebe3468f6413ef15ddde128725139abe9"), plumbing.NewHash("d9bf40bf8cb829bf620a97f484ade27912ba630e")},
  424. 850: {plumbing.NewHash("0bc856f90a746ce3c8078f5ec4fb5156c88d8fdd"), plumbing.NewHash("43789fa5fdba49dfd3ab8c16ff8478a8e5764c3e"), plumbing.NewHash("cf026a58a01334c2bc592620b15cc6a18ccc5741"), plumbing.NewHash("1a03b57468ce514015812adc0ef9c61506daf590"), plumbing.NewHash("0ffba624c5310fd8b536b516a0c10e23f3a402fa"), plumbing.NewHash("c5ff4c967e15bd1402afd76494126ea40e67673b")},
  425. 851: {plumbing.NewHash("cafa2860a311171fbebde938d14d11bcbb2a5755"), plumbing.NewHash("3305cd39f8daa7b0a76aeb078ce50bf871688031"), plumbing.NewHash("84ceb94055b831c486dbf4955fdf1ba0f63320d1"), plumbing.NewHash("64321deb7e8b7eabc12da0a5ca48da78e132dd57"), plumbing.NewHash("cd317bb9b6bc5acb623b8f7e01e9d9bf19c326a0")},
  426. 854: {plumbing.NewHash("f98b1da727cea5a308e55c8bbf332ba71e1b865c"), plumbing.NewHash("d03a41b4262e6aab8ad56cd8395808cacf5921e3")},
  427. 855: {plumbing.NewHash("46b691e302e896053c4fd167ca4ec688d957b245"), plumbing.NewHash("ddd8f470aeac84049b9d1d05b6d9a9b568f9b5f7"), plumbing.NewHash("cbecc6a2fba8729b34c3e852ed4f17aa7bc2ecdd"), plumbing.NewHash("b95fcf7f52aca8ad0b1afb3cfc64c8eed534fafe")},
  428. 857: {plumbing.NewHash("eaa54e41534d7ad7db5669738b20f5d9152da51a"), plumbing.NewHash("858e3a8a84ba17dd607fbaad61f64ce1f588af8b"), plumbing.NewHash("b347eed98780f64cca45db93e304d3ee7fda399f"), plumbing.NewHash("4fc583691b9ebf59c0bbea09fc1f5c7e5ee9c68b")},
  429. 858: {plumbing.NewHash("bbbd585f25c25ee6c0ab95af474c40a3bc427ddd"), plumbing.NewHash("7b6bd3172c5e40f61473b5a32fbdc1809ee071b1"), plumbing.NewHash("45f8f11104e938f4c7da9feae07abffdb4706276"), plumbing.NewHash("36b862f259fd1f062e815a5212381a968a544709"), plumbing.NewHash("c83ca43005ce81de145f17d112e308b492ddc1e0")},
  430. 860: {plumbing.NewHash("7c7d73530c1ab1b47f9fb5f0612ec13fef1a26c6"), plumbing.NewHash("c78fbc42484760e2f22aba208b742763a93a7706"), plumbing.NewHash("281bc587b38b91abd3a2af41fb43848dd38b042c")},
  431. 861: {plumbing.NewHash("eeee45877cea31ce83427b08565e35238ed4edb3"), plumbing.NewHash("39d019917c3919120ee6073ed945b83f87e226c4")},
  432. 863: {plumbing.NewHash("e0fefdae0cae9c97f318fca9717467f7faf071d9"), plumbing.NewHash("a6679b7077ff102a83d2706fea671eaa3a0bf349"), plumbing.NewHash("1d176e261469651067221448ee27c9b83abd1cc7"), plumbing.NewHash("b200b7d19c6b00a99bd890b925c1c0d887c1b303")},
  433. 864: {plumbing.NewHash("2382f788b4f14646fa8b6b2d8d65f1fc138b35c4"), plumbing.NewHash("027da838d08f16ee66b538b942654a2650c2a7d5")},
  434. 866: {plumbing.NewHash("552727ed8d9eb898a27afbba7c9fd292ca149474"), plumbing.NewHash("c2b844ba2fe8d0d597da9ef6a9af3b20d18d0bec")},
  435. 867: {plumbing.NewHash("3537381fa9487559b47f4cf3711265b2d5b049db"), plumbing.NewHash("1e9ee7e1651f201c3b2f86e534c551e9688949e5"), plumbing.NewHash("d687c6eda4d9cb58756822fd77402274db309da8")},
  436. 868: {plumbing.NewHash("4e8fc3325919b6eedb70468e7df353725c54c854"), plumbing.NewHash("ca732cd117e042a9f69bdd5738466f51f7d4719c"), plumbing.NewHash("1bbd52c7081a842fb4af7f582c44de0d2ba644e3")},
  437. 870: {plumbing.NewHash("4b3b070300abececc0ad27133ae1ed1baf691725"), plumbing.NewHash("57bc4da51329a48937f809aed0b80d23575bc209"), plumbing.NewHash("e27b8b9343da4558b98570d6d45599bd0e365723"), plumbing.NewHash("71bfb00788ec6e899abc8d2de1a67a281879bccd"), plumbing.NewHash("3f9c0a30ab6a024c9b6fbd83f4ae9119c93718b9"), plumbing.NewHash("f9481339a378272258bf5b3832012ea317bc35cb")},
  438. 871: {plumbing.NewHash("a90d0428d5b48be9872979d6c18ac05d9888fd7b"), plumbing.NewHash("e67a38fc77c848ae6d34dfa539c3aaedd68687b2"), plumbing.NewHash("ec0e7667f9e4c4f18e4286df6027520fdc88d454"), plumbing.NewHash("e49916888d5099061bd73dc5aa1e86afe93589f5"), plumbing.NewHash("a095c1b16f8368a996ec345c4f3fd0a77cdf54f3"), plumbing.NewHash("630a398ea8665f7062d40e3ba3c9b11205d902db")},
  439. 874: {plumbing.NewHash("5ec6285e906788bd5669ac418239d36d1881e8ba")},
  440. 875: {plumbing.NewHash("9772b1179c39433cf6babf96b53b7987e5cb5027"), plumbing.NewHash("7af02c324004e6a743120c244101f44ce661dcdf"), plumbing.NewHash("9a24cc6bf97951b0cd2334a6897ee21e2308e1d2"), plumbing.NewHash("b889a4d88b8cd0189e1d77fb503f485b1e861b99")},
  441. 878: {plumbing.NewHash("9eb32b0a78254f7f29e1945fc8b1c9682effd2f5"), plumbing.NewHash("9164bf6184f26e40fdc561ed776181a9bb692e9d"), plumbing.NewHash("619259c1f067e342f3003865c39631f84eafc2a9")},
  442. 881: {plumbing.NewHash("1f6164695cc01a2920e3bfbd7368a85ec2a8a8aa"), plumbing.NewHash("27fa03a441962392ef4aefb3656833cc06b2d9a9"), plumbing.NewHash("90f76daf11a8376670928dacd555c69c683e12d4"), plumbing.NewHash("05b234f993b7dba88e8f468a1c8aba272ee8a55a"), plumbing.NewHash("c7dd36a1bab362de478f528078261439b803aaa7"), plumbing.NewHash("9e1dd0ac58ba5f1343af76a69a762cb75ca3315d"), plumbing.NewHash("fa97c50dc1abddef0e0fdc21dcf2192b84ffca7e")},
  443. 892: {plumbing.NewHash("5625d70ed9fbb0692ddde085009c8f2ef1111838")},
  444. 895: {plumbing.NewHash("6553a00e5f59acb09d5bc02f6d0c6ca71c3d47a5"), plumbing.NewHash("b76571f636ba37a04d4f104d701a62a536dfa0b4"), plumbing.NewHash("19862b0b1d52e7636b30b0a0955ae4ecf2544edb"), plumbing.NewHash("59f9894ff7a00df6e6f14c5bec781493b3684d2d"), plumbing.NewHash("dc92abc1101a6c445dd6dcc910ee3c7427f6fb50"), plumbing.NewHash("0f1c8550576c0320632c5541caa42325833f1335")},
  445. 900: {plumbing.NewHash("9166733c3c144739868fe0c30d57b861b4947b44"), plumbing.NewHash("cecb916bff506190bdf2aaf2c9327fc929585569"), plumbing.NewHash("5cd8a7bcf6c01ca641c4f7f3e899a48f38587dc2")},
  446. 903: {plumbing.NewHash("5938501a61329a8548d9821fded54c46a315adb9"), plumbing.NewHash("fc1178a7e6335766df6ced6d341648f5727ffc8d"), plumbing.NewHash("d4f6ff8a88e215875480da985de6b9acbb322f44"), plumbing.NewHash("ffdae928837946a31799aab0950a9aa82c6f5ce0")},
  447. 908: {plumbing.NewHash("71a791cb71ef7080ea150152633a90e7821070af")},
  448. 909: {plumbing.NewHash("a5103909bb43f6e9d655587949b54cbd95abbb18"), plumbing.NewHash("8ac788a5614570d222c484ab49cf9e878eeab9ff"), plumbing.NewHash("b88d5c3ab0ff858f40b6fae4691349411c00ac94"), plumbing.NewHash("2dfba02e0a507eddaf3db3d7f3660d75ce740639"), plumbing.NewHash("710898f759b9ec229f3513bb9f58fd032c1aaed6")},
  449. 914: {plumbing.NewHash("408de9b67f56636b9802adae82c06f70f60ae000"), plumbing.NewHash("db4b0a7a1938fe181f61cba24880e12bba8faaac")},
  450. 916: {plumbing.NewHash("1bbfdb6174c2833c7aca8261a3f97bc5235bc66b"), plumbing.NewHash("3ff0939e23c9a5e6e163b6afd99f58db09b13bb5"), plumbing.NewHash("22b66dfb44ad6c9e18a66adaa9933ce502499e8a"), plumbing.NewHash("b36c982934334f3bde7187d3a3a25aafa8f60ece"), plumbing.NewHash("2abf10c3a429a7c8146925930d8111b7a71dbdba")},
  451. 918: {plumbing.NewHash("130809e645e56d551250aef95c74f6f0f9463381")},
  452. 919: {plumbing.NewHash("081d218311cf6c61792159f523ad2dfc2bdb8df9"), plumbing.NewHash("213ec198dc7fae17b86d3d4e37dee3559761abd0"), plumbing.NewHash("4a9f551a7a532c744dcb0fa90699ca3262d10bc0"), plumbing.NewHash("ab5700962a74500aef0c6838580d285dbbb359c5"), plumbing.NewHash("19e1be2cf2592dcdb1650afc00bad9e08fa07954")},
  453. 921: {plumbing.NewHash("80dcdcd88d9ca6f93695de0d16e4ce3ff2c094a7")},
  454. 925: {plumbing.NewHash("306689c52a9674637f30b2319f90c6c0cbe770ff"), plumbing.NewHash("ff9118f68a4a9083d3a16081f5643ac098e1be44"), plumbing.NewHash("c03482506bf7a82906edcd9f91e3abc8ef88bbed")},
  455. 926: {plumbing.NewHash("2f266c3345ec65c721d4599a685fde6611fb8609"), plumbing.NewHash("797ea236df90136f7f4060d66bfdeff9dfdb8f51"), plumbing.NewHash("2ba3b00fc7f5842ea0b125efc6422a3bed1b90f6"), plumbing.NewHash("1aec8aeefc2ad6ccb39ef6c43d03ed86e0adb6ef"), plumbing.NewHash("a32f648ed1371cf5e82b5cbdd4d252355cc729e7"), plumbing.NewHash("79dab7a6acd0b65a5af1bc1404158e9d31c903bc"), plumbing.NewHash("eac0d42ea088d04247ab464edecfebd1c8ca9b7a"), plumbing.NewHash("b3370c0da4430367761e6762e85de7ab4962ff62"), plumbing.NewHash("a67a36c7341252e24bec94c784045478766a36e2"), plumbing.NewHash("6a36339b63372b51e0aad81fcc798d3183e45e26"), plumbing.NewHash("3c19add103066bc58970810e32050051ac1a7399")},
  456. 928: {plumbing.NewHash("1a3d9de50d3b10276a019a91c67cd24c69b2c888"), plumbing.NewHash("d3db58c8bf1ef9d078b3cf5d828d22346df6469b"), plumbing.NewHash("3dd3e8331677e68e7dec6ed4a1cbf16b7ef19f7f"), plumbing.NewHash("c3b512fa98f0c560ec59a73858057bc4decab75e"), plumbing.NewHash("429332ba902659175ce896e98a5e531bd4071e59"), plumbing.NewHash("c400d0e798275ac79f84fd2f974a8c01cd7deb15")},
  457. 930: {plumbing.NewHash("b9bda3700738e736c000e6ba6d33055ca99726e6"), plumbing.NewHash("16e869ae06a72418bc5acaa5fa13b94b94d83e2d"), plumbing.NewHash("46ebf30671b4e0f3fe8daf1e5a0694d6f99784b7")},
  458. 942: {plumbing.NewHash("3f148e40e1dd9b95c4cd2dd0d182b8729729a5c0"), plumbing.NewHash("34055f494f2fadbb4ca05d8a7aac0b1a723a1ab2"), plumbing.NewHash("7144aeb796d7e1bc6523bca8688dd3160d54e810"), plumbing.NewHash("9fecc510372311e0bcf89077e12669016b93782a"), plumbing.NewHash("e48bc45d1b4f760f77e5f01f128c05011a6459fc"), plumbing.NewHash("cab77c8f23bf81eaa06aeeeb28a4da3b716f7bd7"), plumbing.NewHash("970488e8a9376753cefe5e9b147558e35474e714")},
  459. 947: {plumbing.NewHash("b1d298a1260d620e3fff873d19d7ca22d832e88e")},
  460. 948: {plumbing.NewHash("7d63ae6f7861d01f8feb1d0ab5df9b5f04d80658"), plumbing.NewHash("d368dc870bfd8fdd4ca0ff82bd5b61aa549291c5"), plumbing.NewHash("159bb1aac17a8de0f96997d35703b8f26926a848"), plumbing.NewHash("7230b002163e9204ab7aa930f8e76980cfdd445f"), plumbing.NewHash("de17ff3b2aded0e94a9a747a61d4614d56ff80ef"), plumbing.NewHash("ee4b0bcc2cbac3c79c9b0b2c30036d189654bed4"), plumbing.NewHash("abccbbac42a8f5b37673b12215b04da421f1e4ce"), plumbing.NewHash("09e0044882f9311d5ea227e130209636defabfa7")},
  461. 949: {plumbing.NewHash("a3d41bb88640d034947c68cb75d2e89c54706ec8"), plumbing.NewHash("e5f2fb7852b505893e863a0c0fc0598a73e52372"), plumbing.NewHash("3fe52549783a745417cbe8552f8307161d7b8c77"), plumbing.NewHash("7fc5466f95e2f9e19c8ab7602286f642a136091b"), plumbing.NewHash("bfa7dca200c1bc2b0dae2d312930d03de6768bc4"), plumbing.NewHash("c25e06f05a245ebf127ed3856eea19a7c73a9ab9")},
  462. 951: {plumbing.NewHash("612f5307b962fb140106efcc50932c292630fda3"), plumbing.NewHash("fde40f288bae02b49c93e3a06419b5d59540c1d8"), plumbing.NewHash("5cd2ad0470f58ddd2115f20ac180aaab2f5b1fe9")},
  463. 952: {plumbing.NewHash("211ec41c1c4f62cb58a55e111010e0492d7463d0"), plumbing.NewHash("99fb182983e8691fcb0d267081e86564a687c1c3"), plumbing.NewHash("cc08f0f01fe97a9659e3da8fa9b290a54992c74a"), plumbing.NewHash("3292aa5a30350c67627f173ceac713956f68271f")},
  464. 953: {plumbing.NewHash("bbcf0e60af52bcee17f4aa61d5b29f7033176083"), plumbing.NewHash("fabb40b2183de521cc3ef4884856e89c857bfdc6"), plumbing.NewHash("a8a59c71eccb0bea3ef17feeed6970112ab4aa8e"), plumbing.NewHash("7334b9a30d7515b92458b292f53bb85b630bec63"), plumbing.NewHash("abcd7dba35fa8e687836adc1152a709b9f67cb7c"), plumbing.NewHash("cf2623b19a65d128b26c400138c291fbc98fb48d"), plumbing.NewHash("208170eafc717dd2a6c6a5919d76ba82d0f19a04"), plumbing.NewHash("5b3cf890c1d870e9ac71862750f2447da4d7b57a")},
  465. 956: {plumbing.NewHash("1fec906ab0b047546fffd5ae4d67a688eac7158b"), plumbing.NewHash("c1072cc66cb7d678d4da58d88eaa3e1162f86e67"), plumbing.NewHash("f9532ce0697087dd3b80d2407515014df58393a1"), plumbing.NewHash("ae6b66a56b702d5dcbf8a99576001f6354d96990"), plumbing.NewHash("a56b16fffec6e4a431bf14e13e7dabeeb5904cd8"), plumbing.NewHash("9634b5a8561d26706ed26a413e999f9aa50c7f47"), plumbing.NewHash("05f9d212957ab030c0b70a03267bf4a212c8e38b"), plumbing.NewHash("ce4947cbaf380589a63def4cc6eb3e460c41254f"), plumbing.NewHash("eec5da20be0eddb926a74f2c8ef6e690ccf98336")},
  466. 957: {plumbing.NewHash("b285d710f6f3ae8478b9a17e86c6b6aabbd22494"), plumbing.NewHash("a04284341c26761d4b1a92f3245a535b2b2eaad4"), plumbing.NewHash("a375cfead032ff4c211a28e5ab56502af3e705e0"), plumbing.NewHash("bbb5bd0e30fe3e75e1b2b7d670f5cbbb8ccd7c2f")},
  467. 958: {plumbing.NewHash("c25fa38deb4efc5445f64af3ec17eae0eb660d2f"), plumbing.NewHash("439d847f26526bfe6a4181e6915ee94cdd3f32e8"), plumbing.NewHash("d1ee9455afc98e09d2a24f839305293b5388469b")},
  468. 962: {plumbing.NewHash("b17169ca5d6cd1c8aeb237fc2bb0555c9e1b6a02"), plumbing.NewHash("58c219164a0050a38cdafea7a6ff3d7181855326"), plumbing.NewHash("06453e661dd93348e97a49b6eb8d524c83b9b981"), plumbing.NewHash("94e8f4661d568b02978c3926fb74fb32c88f60be"), plumbing.NewHash("21b80642d0d050bf40c66c2841955ffb48506b66"), plumbing.NewHash("6d95e7ceb77b94be104750c1e8d60197aa2dfebe"), plumbing.NewHash("31fe12a65d82fffa0ef1443810f3957db3c4d21d"), plumbing.NewHash("655b5b9f9fbace30cf71c54106c1fc6d49b169c2"), plumbing.NewHash("c6c951442602034439ae3903069fc44c274b7570"), plumbing.NewHash("018e55be7c77f168abaea898233ccec035bb39d0")},
  469. 964: {plumbing.NewHash("ae233648588141f33225fa8e6cb59df44d494db5"), plumbing.NewHash("6f3c49a1fdedcf8371eb3ca64cc4fb6e91cf844e"), plumbing.NewHash("2979250119547754c60cf5c2df1c60708a1bd2df"), plumbing.NewHash("67a432c273cbd65866b1d2cb1e2c62714b633b6e"), plumbing.NewHash("2f3edf96078d78450b985bdf3bfffe7e0c627169"), plumbing.NewHash("86d245cf08499ad824b0f8ba19cf5d5240ae70de")},
  470. 966: {plumbing.NewHash("236c3ebed52f9eb224657206d7aebfc1c1478f72"), plumbing.NewHash("bc53ef938a0e3d142ec596f75b87b642c19f10b9"), plumbing.NewHash("4001fb658aed6533983d5a3aa434060d37dff024"), plumbing.NewHash("8fe4af613260c872e9429b7e76bba43eb76eabec"), plumbing.NewHash("2c64566855b39060484f07269303dfd115d5e5fa"), plumbing.NewHash("e0d9fcb91ad4cd8d993c3225cda508a30b3095b9"), plumbing.NewHash("5665b4d41ee1cf2cda0e422e6dabe741a0e0a4ce"), plumbing.NewHash("69f231cbfd5c90092d37567bd2049ef6f4597b43")},
  471. 969: {plumbing.NewHash("dea301923dfce820701818214965f0acb82907a3"), plumbing.NewHash("c24406fc9ff05edb2ab261a65e7e07ced9094a0d"), plumbing.NewHash("bcef86fad4227dcf9a7bb111cb6a81e29fba26c6")},
  472. 972: {plumbing.NewHash("cdd95c08bafe4d70b3c1163c9b9387e0efd2325b"), plumbing.NewHash("bc285462ad8ec9b8bc00bd6e09f9bcd9ae3d84a2"), plumbing.NewHash("1702d1fc87cf2c7d59bcf80248b79012dd58f046")},
  473. 973: {plumbing.NewHash("07e0c2f75d0df082c30b2cc21379cf6bcc9f2463"), plumbing.NewHash("45e781c305edc0ee23af8a60bfa73a6e2e19839a"), plumbing.NewHash("785384016277ae624fe1f5ff0876ed95c2936ef7"), plumbing.NewHash("017fe07d01751011a054a081d9646d78081c9b3e")},
  474. 978: {plumbing.NewHash("b00ec66ba948077b61e25cf7dc004857c432b01a"), plumbing.NewHash("94b86c420f2c8fe484a177a77dbfec76a018bd0a"), plumbing.NewHash("71df15c84ab73db38a4524b7c0fb7dfac2d14f82"), plumbing.NewHash("4bec1ee434a98fb8c9c62d9ebb6df5d27c9bb72a"), plumbing.NewHash("9065eff2d38a0d44dabef4ec3cec7edb1c551609")},
  475. 979: {plumbing.NewHash("722f710a5307dfeca8945975f9550a962e60aa4b"), plumbing.NewHash("da1ff3db476c8d0ed4cf794a3c7de1c7caf3f8b5"), plumbing.NewHash("bc14560f02e7231bc1bb1121fe98dadacadb9cfb"), plumbing.NewHash("88594d7f2e219f46b8df4784ce7b2017e0456295"), plumbing.NewHash("d956d19fccf6de6344c282218f1b027453785fa9"), plumbing.NewHash("a27b4a51f4880ad3a7669531b667c1ef44b173ef"), plumbing.NewHash("4a58b178073f0ba3b166220f7ebd7d56149bfb20"), plumbing.NewHash("26d5e100148272166868da8f817e1a4fb49e34cc")},
  476. 980: {plumbing.NewHash("dec0c7b7aeddab2b31b55ccb015d7e0735206d59")},
  477. 986: {plumbing.NewHash("fefa846e573fa160da033d1c90060bc0dcff4a56"), plumbing.NewHash("795a44d649112f57748a5ff6504cdf0b1a67a558"), plumbing.NewHash("18a812e1dfa793f541e9bcf9705c62a77546c7ee"), plumbing.NewHash("3502dc599e87e2dcdaf7a8922c1c1fb882eb98a2")},
  478. 990: {plumbing.NewHash("89ac314d3727c33492791a140501244465f7abfa"), plumbing.NewHash("29ee89a0663c96f6476aa7145b4f27684442b779"), plumbing.NewHash("dc95ceca57cbfada596a10a72f0cb30e1f2ed53b")},
  479. 991: {plumbing.NewHash("a18aed4f8fcb8e4d6b4433fb56754a4a73f9224a"), plumbing.NewHash("df523f10562f3570371fbf735ccbf4a28401c48d"), plumbing.NewHash("f419af9f61160e8a1a52e5a42efd607648030ae9")},
  480. 993: {plumbing.NewHash("0611d80cd05c1c199f4c41aae523bf8b988adf24"), plumbing.NewHash("c45a2c7cba99dadd8727e83cf7070a89deb4b79d"), plumbing.NewHash("e39f6ed3ebf4fc5495ddcab4832d9ede6067aba2"), plumbing.NewHash("0d66dc4252f56bec1214874dbf766bdbb2c9ac10")},
  481. 1013: {plumbing.NewHash("4da8c81442fb49eb58b779153f528ea77a9b9006"), plumbing.NewHash("6d73fa69e0a6d13948248a8accd60e24d556c802"), plumbing.NewHash("caf05b43d1eed9f77c8b220be57f7bfd878abb76"), plumbing.NewHash("d250a2fbb23f9aad839f3cd2e7fd123b1277cd56"), plumbing.NewHash("63c48ff2044ae754084dbf5afeded86efeb7184a"), plumbing.NewHash("5c224403d3948983035682a1f71bc2e0e64cbb87"), plumbing.NewHash("54a6133bb2079f28daac911ba2aeee71bb013e92"), plumbing.NewHash("5979cb0b09d9650e05c66b1acb64a60aba832f8b"), plumbing.NewHash("0d99a49057a58af7843f25e27dee64f581801415"), plumbing.NewHash("a2e26b12e2c12eaa1d15debfe697e1354c54b6ed"), plumbing.NewHash("e32c5b4b9e3068ec213f90489e0dd7487c9d191b"), plumbing.NewHash("322592521db2b7a285d54b31a6f307db580169a8")},
  482. 1014: {plumbing.NewHash("04c11f8c13b1182b43eaed36dbfdedb23efd38b9"), plumbing.NewHash("b197ba91d7bd3c390b67cd67da2eb381b25a22b2"), plumbing.NewHash("f6eda660f7bc3ec9b7b2423b47e4804a31fcff8b"), plumbing.NewHash("8ed57c168f171de7420e9a96f9e305b8236757df"), plumbing.NewHash("12a79c863e41290343d897ded71e8d7f8df456bc"), plumbing.NewHash("c67adf1765d600737b0606fd3fde48045413dee4"), plumbing.NewHash("091685954504fb46cd77f6f40bd5a780bddbb06f"), plumbing.NewHash("2b349696e6845fff9903e06f24c32f122fc01e0b"), plumbing.NewHash("c476792e366626bec3b5a44b642c315239311516"), plumbing.NewHash("eac78b859beb31cafa65a3edb4eaa888d3b6c2e6")},
  483. 1016: {plumbing.NewHash("bae5c6d0dc07c23e0af730448570949fbf27435c"), plumbing.NewHash("b187ac51e0c9ae6fab4aabfb12ceed57c83b12ba")},
  484. 1017: {plumbing.NewHash("d72c2b383e8fb5b13fe710a3343e54cb93d18eef"), plumbing.NewHash("ab97958e0b1b9ed224d5475fb6ccb8f40242794b"), plumbing.NewHash("3966daa90b4c47d2ad36e977c36a6895f2697dd8"), plumbing.NewHash("f0872fc27f684885f7507b5c1b7c60450523b5d3")},
  485. 1018: {plumbing.NewHash("a4c1d44192690a8f677b7c17bf798324883e3f75"), plumbing.NewHash("bbc3fcfa542a2489e838b793a9778eac360f5ae4"), plumbing.NewHash("9a50d7bece2e09093f7eec251fc2be5736b131da"), plumbing.NewHash("58e702091c60aedb169df2292851fab42019cea7"), plumbing.NewHash("fb083168b300b50d3085b275870a24902603cd49"), plumbing.NewHash("958239c621a6be037c5f8b30be9270310735f725"), plumbing.NewHash("2bf4a17899820f23ff2f6e014cd78b6c8f09f4a7"), plumbing.NewHash("9e6ffc36973148e0d99929c96edfe20dfaefca8e"), plumbing.NewHash("4778fa0cec5e35be043fd157ce70b99421b621e1"), plumbing.NewHash("9357c41bbe9d1fb2fa8669066b029350f49f3767")},
  486. 1019: {plumbing.NewHash("86a0e2c86354cc3874a524e8f500ac8ee94eb08f"), plumbing.NewHash("3ce40705a7235cabe81cfaa2ab9b9d56f225af52"), plumbing.NewHash("869114cadf7e590712d67623d11e370fd9ab5806"), plumbing.NewHash("9c24aed50db289f0fdeedc6e0e69c916a5b66f8f"), plumbing.NewHash("4c7143ef59a69c08fc3b09f8d12d93fdfbf497b8"), plumbing.NewHash("0a32488e8f546fe46d7380a46715e1d48428d33e"), plumbing.NewHash("d2c94474ac11da442dff4548c0fe325eeecc5561")},
  487. 1020: {plumbing.NewHash("d6ab99f4ca5311877ab0891db737fc084ecae1e5"), plumbing.NewHash("24246ea53157f7e9faef74db3e4c06642a50509d"), plumbing.NewHash("507374c899f938139dacfa179acad28677f4c8f7")},
  488. 1021: {plumbing.NewHash("616a9b01468c5f61c73401f7d7d7b1352e346b39"), plumbing.NewHash("76c5b616f2cac0d4d1852049d98ab8a067142373"), plumbing.NewHash("f3f9e020119736920b5bf3adc91fd658e1910775"), plumbing.NewHash("6f09be91e695d74f19aac084808b998ecf6eb24e")},
  489. 1025: {plumbing.NewHash("03e181623c67fa82698291d9969ba927a52cfad3"), plumbing.NewHash("b59d17a7a4dff6789c238473b38ce65b834142a8"), plumbing.NewHash("e1a1e880d53fbbec97e8c3dfe1c82281a5761d44"), plumbing.NewHash("fa6de5a45e6d80216b4b57aeaf7d750f5d088747"), plumbing.NewHash("bbad459960173ecfe97b5aa2035410998a195002"), plumbing.NewHash("ca8172a50fd4d6a3e22960d60dae3bf112721dbd"), plumbing.NewHash("13402f1d2d55c55087d84d80769cefe8b890c48f"), plumbing.NewHash("53ec990d54130dd0a457dd235c93d39de32d571d"), plumbing.NewHash("64f80d6077edd5f277a1181df94bf4510ea0517a"), plumbing.NewHash("7113063a93ef8d77c6d7d21a2756c1ac802d83de")},
  490. 1026: {plumbing.NewHash("149272a25af10bda7bb58205a162f6bc590ef488")},
  491. 1028: {plumbing.NewHash("a6542e845e9af539ba2def25b36e4afdc1162f0a"), plumbing.NewHash("950e5d063320a72ca61f2082c154a65a48766239"), plumbing.NewHash("58cf55038e4011bc80632f9b5fe271de22ca71f9"), plumbing.NewHash("97acd91baf198d059628b22d172e9078dcde8831"), plumbing.NewHash("a0859a4201e6f5700520e127d66ef2c6a7af6251"), plumbing.NewHash("4c7425fe4d7836f32d123af19c90719dd9fe567a"), plumbing.NewHash("2fbac5a775fb846796d53e2aec8af4fe6d95963e"), plumbing.NewHash("8350eb80bb37a643b29afc0ab19e99796b475fac")},
  492. 1029: {plumbing.NewHash("1f2e7a749a6fc67a59debf6fba4bc32fe57f5ad7"), plumbing.NewHash("475def3da3a1a3e6f55f438fad06c0d691a204a2"), plumbing.NewHash("1a7848772636e29231b23888b48f22e24d41c67a"), plumbing.NewHash("f65a562d8266b769e16cc2cd14f0049720e8d81c"), plumbing.NewHash("b300bfb198cb1c4967b912009e18ada1affa8c38"), plumbing.NewHash("f353f09c065222313991be3ef5bd6e19c4307aac"), plumbing.NewHash("67b823aeeba62f6895e00424524b0e1cd19e561a"), plumbing.NewHash("d4f609860c8e16a12197a941ff8ad704870d95e7"), plumbing.NewHash("abe06d593391d25e463970e8618fc0a0f9279c45")},
  493. 1032: {plumbing.NewHash("632d811f2f65bce1806559bf3eede37e517afb6d"), plumbing.NewHash("05fe6076a117a184781c2c2dce087189995bf4d6"), plumbing.NewHash("7222b95ea5de9e88fbc288b84d3805efc3f5cb9a"), plumbing.NewHash("c7efb4e30f914555954f1d07c585c7a2ff9150de"), plumbing.NewHash("a3dcce76451751ff8462a2069984f76230fb7572"), plumbing.NewHash("0478e07ccab79db931061329484d6e97c4a6e072")},
  494. 1033: {plumbing.NewHash("5822ee2b95ab2fff640abcc7233c0ee67620cdf6"), plumbing.NewHash("7d1e0bc5872855af5bf35a725025d3bdb6f07d6c"), plumbing.NewHash("c8bef99ec7a2032b9bea6e9a1260d05a2b6a80f1"), plumbing.NewHash("10b46cbdc47f7164cd92209077939a7f7882b3c0"), plumbing.NewHash("13669e80933f9270608e5bb1c9ead28173864110"), plumbing.NewHash("84d06a530ebbe59679acdc03b3b13460037582e9")},
  495. 1037: {plumbing.NewHash("a3ace15d3f8f8b808fc9d9876039d84c2a35e163"), plumbing.NewHash("8a6fb0b15aeb8ed887f1cc216da55fda26d35264")},
  496. 1039: {plumbing.NewHash("db0707b7b25d16d3a26c8c9651987a7d0a441e5b"), plumbing.NewHash("21f78b62a3081b6be209c2827ac6a68114725a47"), plumbing.NewHash("81f6b3aa5b2b6215a533180e848a3b4dff851d03"), plumbing.NewHash("1d2ad790dd43a2d702176c1170b2f3fd592a385a")},
  497. 1053: {plumbing.NewHash("e0ce5daf453729c178542de684a959577fbadcf7"), plumbing.NewHash("596cca7d5aa356d9315eb4458b0adc02800b8632"), plumbing.NewHash("5d54eeb3967f3364955478c9520ed9ba05c4f9f2")},
  498. 1057: {plumbing.NewHash("f1df4297c27f4a2dab675d253be67eb0372817af"), plumbing.NewHash("173a1a545954bae38e40f4fb0bde228765a9b059"), plumbing.NewHash("4eab0556d29f11ff41758d80c15d6457263f6a93"), plumbing.NewHash("8193bae22a3aec7b9b2ad2962b62ab71ee5a6f2e")},
  499. 1058: {plumbing.NewHash("8b5cc7fff1df3f7295fa12a4036a64c0f21f9150")},
  500. 1062: {plumbing.NewHash("4f2e65c385d60fa87bb143c6c506cbe428895f44"), plumbing.NewHash("06eaeebecfb73c23bfd531013ca172ee3bf5069c")},
  501. 1065: {plumbing.NewHash("125f4234772c48f4eba6bda3022a98ba7f9c9e9c"), plumbing.NewHash("5e797436c3defd2d863ac1ffab11c48dbd42588e")},
  502. }
  503. testSentimentComments = map[int][]string{
  504. 0: {"class MaxPooling1D(Layer): TODO", "class Maxout(Layer): # input of model (first layer must have an input attribute!) Reference: http://www-etud.iro.umontreal.ca/goodfeli/maxout.html # cut texts after this number of words (among top max_features most common words) convert class vectors to binary class matrices def __init__(self): pass only keep most common topics # ouput of model", "filter wires with rare topics # shape: (nb_samples, time (padded with zeros at the end), input_dim) new shape: (time, nb_samples, input_dim) - because theano.scan iterates over main dimension"},
  505. 1: {"class MaxPooling1D(Layer): TODO", "only keep most common topics # ouput of model", "filter wires with rare topics # shape: (nb_samples, time (padded with zeros at the end), input_dim) new shape: (time, nb_samples, input_dim) - because theano.scan iterates over main dimension"},
  506. 2: {"scale preds so that the class probas of each sample sum to 1"},
  507. 3: {"output of model # TODO: support for custom output shapes"},
  508. 4: {"use the new accumulator and the *old* delta_accumulator", "update delta_accumulator"},
  509. 6: {"only keep most common topics"},
  510. 13: {"two different embeddings for pivot word and its context because p(wc) != p(cw)"},
  511. 15: {"text preprocessing utils", "one gradient update per sentence (one sentence = a few 1000s of word couples)", "recover the embedding weights trained with skipgram:"},
  512. 17: {"the update below seems missing from the paper, but is obviously required"},
  513. 22: {"save_weights_to_hdf5(fpath + .h5, weights)", "FIXME: fail if file exists, or add option to overwrite!"},
  514. 37: {"convert class vectors to binary class matrices", "convert class vectors to binary class matrices"},
  515. 39: {"RGB: height, width, channel - channel, height, width", "grayscale: height, width - channel, height, width"},
  516. 49: {"the data, shuffled and split between tran and test sets", "convert class vectors to binary class matrices"},
  517. 64: {"the data, shuffled and split between tran and test sets", "the data, shuffled and split between tran and test sets", "convert class vectors to binary class matrices", "model = model.add(Dense(784, 50)) model.add(Activation(relu)) model.add(Dense(50, 10)) model.add(Activation(softmax))"},
  518. 68: {"the data, shuffled and split between tran and test sets", "convert class vectors to binary class matrices", "dense model test #", "The TimeDistributedDense isnt really necessary, however you need a lot of GPU memory to do 784x394-394x784", "You need another layer before a denoising autoencoder This is similar to the dropout layers, etc..", "Build our autoencoder model", "Do NOT use validation data with return output_reconstruction=True", "Do an inference pass"},
  519. 72: {"the data, shuffled and split between tran and test sets", "convert class vectors to binary class matrices", "Create a slightly larger network than required to test best validation save only"},
  520. 73: {"Create a slightly larger network than required to test best validation save only"},
  521. 80: {"helper function to sample an index from a probability array"},
  522. 81: {"the data, shuffled and split between tran and test sets", "convert class vectors to binary class matrices", "eval model which was trained with", "parameter for rescaling the objective function eval model which was trained with", "calculate the weight vector for the loss function binary crossentropy", "hinge losses, map labels", "calculate weight vector for current batch"},
  523. 83: {"the data, shuffled and split between tran and test sets", "convert class vectors to binary class matrices"},
  524. 84: {"the data, shuffled and split between tran and test sets", "convert class vectors to binary class matrices", "no weights: reference point"},
  525. 91: {"the data, shuffled and split between tran and test sets", "convert class vectors to binary class matrices", "Test using a reference implementation of softmax"},
  526. 93: {"mask is (nb_samples, time) # (nb_samples, time, 1) # (time, nb_samples, 1) matrix. # (time, nb_samples, 1)", "left-pad in time with 0 # Well save these so we can reset it later", "by convention, use 2 as OOV word reserve index_from (=3 by default) characters: 0 (padding), 1 (start), 2 (OOV) Uniform score: 4 options = ln(4) nats (2 bits) we should not do better than this when we mask out the part of the input that gives us the correct answer", "Train it to guess 0th dim", "Train without showing it the 0th dim to learn 1st dim", "Train to guess 0th dim when 0th dim has been masked (should fail)", "Finally, make sure the mask is actually blocking input, mask out timesteps 1 and 2, and see if it can learn timestep 0 (should fail)", "by convention, use 2 as OOV word reserve index_from (=3 by default) characters: 0 (padding), 1 (start), 2 (OOV)"},
  527. 95: {"loss is a dictionary mapping output name to loss functions", "data is a dictionary mapping output and input names to arrays", "data is a dictionary mapping input names to arrays"},
  528. 98: {"convert class vectors to binary class matrices", "the data, shuffled and split between train and test sets", "convert class vectors to binary class matrices", "no weights: reference point"},
  529. 109: {"we start off with an efficient embedding layer which maps our vocab indices into embedding_dims dimensions", "we add a Convolution1D, which will learn nb_filters word group filters of size filter_length:", "We flatten the output of the conv layer, so that we can add a vanilla dense layer:", "Computing the output shape of a conv layer can be tricky; for a good tutorial, see: http://cs231n.github.io/convolutional-networks/", "We add a vanilla hidden layer:"},
  530. 110: {"recovered_model = model_from_yaml(yaml_string) recovered_model.get_config(verbose=1)", "save multi-branch sequential model # For now, this can only happen for regularizers and constraints #####################################", "layer_dictk =vname(**v)", "Create a container layer and set class to respective model", "Create a container then set class to appropriate model"},
  531. 111: {"mode 3 does not exist", "Values calculated by hand", "Weights must be an iterable of gamma AND beta."},
  532. 116: {"pick the one with the correct shape", "if it has an optimizer, the model is assumed to be compiled", "As long as there is no input, an error should be raised.", "Once an input is provided, it should be reachable through the appropriate getters", "Make sure the output has the desired shape", "As long as there is no previous layer, an error should be raised.", "After connecting, input of layer1 should be passed through"},
  533. 118: {"TODO: If the model is used as a part of another model, get_input will return the input of the whole model and this wont work. So this is not handled yet", "theano.function excepts a list of variables"},
  534. 121: {"get mask for this input", "get mask for this input, if not all the values are 5, shouldnt masked"},
  535. 129: {"QA2 with 1000 samples challenge = tasks_1-20_v1-2/en-10k/qa2_two-supporting-facts_.txt", "challenge = tasks_1-20_v1-2/en/qa1_single-supporting-fact_.txt", "challenge = tasks_1-20_v1-2/en-10k/qa1_single-supporting-fact_.txt", "QA2 with 10,000 samples challenge = tasks_1-20_v1-2/en-10k/qa2_two-supporting-facts_.txt"},
  536. 134: {"centered on 5.0, variance 10.0", "Weights must be an iterable of gamma AND beta."},
  537. 135: {"We assume the time index to be masked is axis=1"},
  538. 143: {"Parameters for the model and dataset", "Try replacing JZS1 with LSTM, GRU, or SimpleRNN", "Skip any addition questions weve already seen Also skip any such that X+Y == Y+X (hence the sorting)", "Shuffle (X, y) in unison as the later parts of X will almost all be larger digits", "For the decoders input, we repeat the encoded input for each time step", "The decoder RNN could be multiple layers stacked or a single layer", "For each of step of the output sequence, decide which character should be chosen", "Train the model each generation and show predictions against the validation dataset"},
  539. 149: {"test yaml serialization"},
  540. 151: {"cant use sample weights with validation data at this point"},
  541. 152: {"cant use sample weights with validation data at this point"},
  542. 161: {"shape of the image (SHAPE x SHAPE) level of convolution to perform (CONV x CONV)", "number of convolutional filters to use at each layer", "level of pooling to perform at each layer (POOL x POOL)", "level of convolution to perform at each layer (CONV x CONV)"},
  543. 173: {"dont need to append number for names since all nodes labeled"},
  544. 190: {"return dictionary mapping output names to shape tuples"},
  545. 193: {"we add a Convolution1D, which will learn nb_filter", "note: in a situation where your input sequences have a variable length, use input_shape=(None, nb_feature)."},
  546. 209: {"convert class vectors to binary class matrices", "the data, shuffled and split between train and test sets", "create two datasets one with digits below 5 and one with 5 and above", "define two groups of layers: feature (convolutions) and classification (dense)", "create complete model", "train model for 5-digit classification 0..4", "freeze feature layers and rebuild model"},
  547. 210: {"hdf5 dataset only support list object as indices"},
  548. 226: {"try using different optimizers and different optimizer configs"},
  549. 227: {"QA1 with 10,000 samples", "QA2 with 10,000 samples", "we output a probability distribution over the vocabulary", "Note: you could use a Graph model to avoid repeat the input twice"},
  550. 232: {"VARIABLE MANIPULATION", "VARIABLE MANIPULATION", "ELEMENT-WISE OPERATIONS", "does not work yet, wait for bool - int casting in TF (coming soon) check_single_tensor_operation(any, (4, 2)) check_single_tensor_operation(any, (4, 2), axis=1, keepdims=True)", "ELEMENT-WISE OPERATIONS", "SHAPE OPERATIONS VALUE MANIPULATION", "GRAPH MANIPULATION dropout", "VALUE MANIPULATION def test_conv2d(self): conv2d works properly with Theano and TF but outputs different values in each case. Cause unclear (input / kernel shape format?)", "check_single_tensor_operation(maxpool2d, (5, 3, 9, 11), pool_size=(2, 3), CONTROL FLOW strides=(1, 1), border_mode=valid)", "scale preds so that the class probas of each sample sum to 1", "manual computation of crossentropy", "avoid numerical instability with _EPSILON clipping", "the dummy 1. works around a TF bug (float32_ref vs. float32 incomptability)", "RANDOMNESS TODO: add implementation for border_mode=same", "TODO: use concatenate instead", "TODO: use concatenation instead", "zero init of velocity"},
  551. 235: {"self.p refers to drop probability rather than retain probability (as in paper), for consistency"},
  552. 236: {"because no negatives in test values"},
  553. 237: {"the loss per batch should be proportional to the number of unmasked sampled.", "in the unit norm constraint, it should be equal to 1."},
  554. 243: {"def test_jzs1(self): input shape: (nb_samples, time (padded with zeros), input_dim) _runner(recurrent.JZS1)", "def test_jzs2(self): _runner(recurrent.JZS2)", "def test_jzs3(self): apply mask _runner(recurrent.JZS3)", "build an all-zero tensor of shape (samples, output_dim) # (samples, timesteps, input_dim) # (samples, input_dim)", "implement a simple RNN", "deal with Theano API inconsistency", "tf max_pool only supports float32", "LSTM - Run this for comparison", "get_w parameters for reading operation", "get_w parameters for writing operation"},
  555. 246: {"We flatten the output of the conv layer, so that we can add a vanilla dense layer:", "save config file, for easy edition", "for now we raise an exception because tf.reduce_any will not work", "convert class vectors to binary class matrices convert class vectors to binary class matrices", "Convolutional # ################ test validation data", "in the unit norm constraint, it should be equal to 1.", "set_value test yaml serialization", "does not work yet, wait for bool - int casting in TF (coming soon) check_single_tensor_operation(any, (4, 2)) check_single_tensor_operation(any, (4, 2), axis=1, keepdims=True)", "This is the expected output mask, one dimension less two-tensor ops", "get mask for this input", "no weights: reference point", "implement a simple RNN", "def test_conv2d(self): conv2d works properly with Theano and TF but outputs different values in each case. Cause unclear (input / kernel shape format?)", "check_single_tensor_operation(maxpool2d, (5, 3, 9, 11), pool_size=(2, 3), strides=(1, 1), border_mode=valid)", "apply softmax to each timestep"},
  556. 250: {"add new line in order for bash cat display the content correctly"},
  557. 252: {"set temporary input to first layer"},
  558. 255: {"def test_pool2d(self): pool2d works properly with Theano and TF but outputs different", "if the state is not reset, output should be different", "check that output stays the same when state is reset", "The layer can be a container layer, in which case we can recurse", "We got a container layer, recursively transform it", "Maps keras layer to the pydot.Node representing them"},
  559. 256: {"score_array has ndim = 2"},
  560. 257: {"for sequential models, we start by printing the expect input shape"},
  561. 259: {"check that container-level works", "check that the call to predict updated the states", "This method does not make use of Sequential. for backwards compatibility.", "save module page. Either insert content into existing page, or create page otherwise"},
  562. 260: {"the data, shuffled and split between tran and test sets", "convert class vectors to binary class matrices", "test with custom weights", "calling build here causes an error, unclear if this is a bug", "TODO: make sure the normalization is working as inteded test with default weights", "test yaml serialization", "large values cause overflow in exp"},
  563. 262: {"turn off layer cache temporarily"},
  564. 263: {"test window size and categorical labels"},
  565. 265: {"start generator thread storing batches into a queue", "construct epoch logs", "assumed to be generator TODO: call self.", "generate alphabet: http://stackoverflow.com/questions/16060899/alphabet-range-python", "prime the model with ab sequence and let it generate the learned alphabet", "check that it did generate the alphabet correctly"},
  566. 268: {"fall back on pydot if necessary", "case 1 Sequential wo accuracy", "case 2 Sequential w accuracy"},
  567. 270: {"hdf5 datasets only support list objects as indices", "to the number of unmasked samples."},
  568. 279: {"centered on 5.0, variance 10.0", "regression test for issue #1386", "regression test for issue #1275"},
  569. 281: {"zero init of exponentially weighted infinity norm"},
  570. 286: {"the name keyword argument of layers is saved as custom_name", "the data, shuffled and split between tran and test sets", "convert class vectors to binary class matrices", "next, compare with an equivalent network with2x bigger Dense layers and ReLU", "if a custom loss function is passed replace it in loss"},
  571. 289: {"these are the weights of the different loss components", "dimensions of the generated picture.", "util function to open, resize and format pictures into appropriate tensors", "util function to convert a tensor into a valid image", "get tensor representations of our images", "this will contain our generated image", "load the weights of the VGG16 networks (trained on ImageNet, won the ILSVRC competition in 2014) note: when there is a complete match between your model definition and your weight savefile, you can simply call model.load_weights(filename)", "get the symbolic outputs of each key layer (we gave them unique names).", "compute the neural style loss first we need to define 4 util functions", "the 3rd loss function, total variation loss, designed to keep the generated image locally coherent", "combine these loss functions into a single scalar", "get the gradients of the generated image wrt the loss", "set up helper functions to extract the loss and gradients from the computational graph as Numpy arrays", "run scipy-based optimization (L-BFGS) over the pixels of the generated image so as to minimize the neural style loss"},
  572. 291: {"dimensions of the generated picture.", "some settings we found interesting", "the settings we will use in this experiment", "util function to open, resize and format pictures into appropriate tensors", "util function to convert a tensor into a valid image", "this will contain our generated image", "load the weights of the VGG16 networks (trained on ImageNet, won the ILSVRC competition in 2014) note: when there is a complete match between your model definition and your weight savefile, you can simply call model.load_weights(filename)", "get the symbolic outputs of each key layer (we gave them unique names).", "continuity loss util function", "add the L2 norm of the features of a layer to the loss", "we avoid border artifacts by only involving non-border pixels in the loss", "add continuity loss (gives image local coherence, can result in an artful blur)", "feel free to further modify the loss as you see fit, to achieve new effects...", "compute the gradients of the dream wrt the loss", "set up helper functions to extract the loss and gradients from the computational graph as Numpy arrays", "run scipy-based optimization (L-BFGS) over the pixels of the generated image so as to minimize the loss", "decode the dream and save it"},
  573. 294: {"this Evaluator class makes it possible to compute loss and gradients in one pass while retrieving them via two separate functions, loss and grads. This is done because scipy.optimize requires separate functions for loss and gradients, but computing them separately would be inefficient.", "run L-BFGS for 7 steps", "this Evaluator class makes it possible to compute loss and gradients in one pass while retrieving them via two separate functions, loss and grads. This is done because scipy.optimize requires separate functions for loss and gradients, but computing them separately would be inefficient.", "TODO: make sure the normalization is working as inteded"},
  574. 297: {"fit the model on the batches generated by datagen."},
  575. 306: {"reduce score_array to same ndim as weight array", "a twist: sample-wise weights with temporal output"},
  576. 307: {"the name of the layer we want to visualize (see model definition below)", "util function to convert a tensor into a valid image", "normalize tensor: center on 0., ensure std is 0.1", "convert to RGB array", "this will contain our generated image", "load the weights of the VGG16 networks (trained on ImageNet, won the ILSVRC competition in 2014) note: when there is a complete match between your model definition and your weight savefile, you can simply call model.load_weights(filename)", "get the symbolic outputs of each key layer (we gave them unique names).", "we compute the gradient of the input picture wrt this loss", "we start from a gray image with some random noise", "we run gradient ascent for 12 steps", "we will stich the best 64 filters on a 8 x 8 grid.", "the filters that have the highest loss are assumed to be better-looking. we will only keep the top 64 filters.", "build a black picture with enough space for our 8 x 8 filters of size 128 x 128, with a 5px margin in between", "save the result to disk", "we run gradient ascent for 20 steps"},
  577. 309: {"the model is still trainable, although it now expects compressed representations as targets:"},
  578. 317: {"import pytest from keras.preprocessing.image import * from PIL import Image import numpy as np import os import shutil", "def teardown_function(func): shutil.rmtree(test_images)", "def: for color_mode in gsc, rgb: file_list = list_pictures(test_images/ + color_mode) img_list = for f in file_list: img_list.append(img_to_array(load_img(f))None, ...)", "the data, shuffled and split between tran and test sets", "for python 2.x Keep under lock only the mechainsem which advance the indexing of each batch see # http://anandology.com/blog/using-iterators-and-generators/", "The transformation of images is not under thread lock so it can be done in parallel", "test join mode in Lambda", "pytest.main(__file__)", "fit with validation data and accuracy", "fit generator without validation data", "fit generator with validation data and accuracy", "fit generator without validation data and accuracy"},
  579. 324: {"ensure the the right part is always to the right of the left"},
  580. 325: {"make value available to next callbacks"},
  581. 329: {"TODO: make into kwargs?"},
  582. 331: {"turn off layer cache temporarily test nested Graph models", "create a temporary layer", "need to turn off cache because were reusing model", "test Graph model nested inside Sequential model", "create a temporary layer for each input", "test Sequential model inside Graph model", "return dictionary mapping input names to shape tuples", "TH input shape: (samples, input_depth, conv_dim1, conv_dim2, conv_dim3) TF input shape: (samples, conv_dim1, conv_dim2, conv_dim3, input_depth) TH kernel shape: (out_depth, input_depth, kernel_dim1, kernel_dim2, kernel_dim3) TF kernel shape: (kernel_dim1, kernel_dim2, kernel_dim3, input_depth, out_depth)", "support strides by manually slicing the output", "pooling over conv_dim2, conv_dim1 (last two channels)", "pooling over conv_dim3"},
  583. 333: {"TODO: implement batched calls to sess.run (current call will likely go OOM on GPU)"},
  584. 334: {"wont work with TensorFlow", "collapse time dimension and batch dimension together", "reshape to 3D tensor"},
  585. 336: {"if TensorFlow, we can infer the output shape directly:"},
  586. 338: {"Usage of sklearns grid_search from sklearn import grid_search"},
  587. 339: {"no specific assumptions"},
  588. 344: {"first, test with Dense layer", "compare to TimeDistributedDense", "test with Convolution2D", "batch size matters, use rnn-based implementation"},
  589. 346: {"test yaml serialization", "test join mode in Lambda"},
  590. 350: {"note: not serializable at the moment."},
  591. 357: {"False = test, True = train", "note: topology.Node is an internal class, -*- coding: utf-8 -*- it isnt meant to be used by Keras users.", "legacy attributes (we prefix them with _graph_) # strings", "this is the placeholder tensor for the input sequences", "apply backwards LSTM test in functional API", "this will crash if the input/output layers have multiple nodes concatenate the outputs of the 2 LSTMs no plans to support that case since Graph is deprecated", "build the VGG16 network actually create the model", "test whether container recursion works with sample_weight", "node is node part of the current network", "check shapes compatibility", "this is a placeholder tensor that will contain our generated images", "skip the first axis create the underlying Model", "this should also work", "input and output masks sample-wise normalization # list of tensors, 1:1 mapping with input_tensor # list of tensors, created by outbound_layer.", "no model-level masking for now test basics", "actually create the model", "mirror model attributes", "we then apply all metrics to all outputs.", "target shape not fully defined", "test multi-input layer", "def: # Note: not working. TODO: fix it. graph = hdf5 datasets only support list objects as indices graph.add_input(name=input1, input_shape=(32,)) graph.add_input(name=input2, input_shape=(32,))", "graph.add_output(name=output1, inputs=dense1, dense2, merge_mode=sum) graph.compile(rmsprop, output1: mse)", "create one new layer per output node of layer, and add them to the Graph with their own identifiers these properties will be set upon call of self., which itself will be calld upon self.add_inbound_node if necessary.", "these properties should be set by the user via keyword arguments. note that input_dtype, input_shape and batch_input_shape are only applicable to input layers: do not pass these keywords to non-input layers.", "TODO: proper error message g2, h2 = model(e, f)", "test separate manipulation of different layer outputs", "we dont check names of first 2 layers (inputs) because ordering of same-level layers is not fixed instantiate the input layer", "test multi-input multi-output", "test with single output as 1-elem list", "prepare loss weights", "input is not an Input tensor prepare loss functions", "collect input shapes to build layer", "this should work lol TODO: raise a warning", "raise exceptions in case the input is not compatible with the layer prepare output masks build and connect layer", "i have not idea what Im doing: garbage as inputs/outputs", "this will call layer. if necessary magic", "get the output tensor to be returned", "output was already computed when calling self.add_inbound_node", "if single output tensor: return it, else return a list (at least 2 elements)", "this case appears if the input was not a Keras tensor", "a layer instance is callable on a tensor, and returns a tensor", "this creates a model that includes the Input layer and three Dense layers", "the data, shuffled and split between tran and test sets", "compute total loss try calling the sequential model", "when we reuse the same layer instance multiple times, the weights of the layer prepare metrics are also being reused (it is effectively *the same* layer) list of same size as output_names. contains tuples (metrics for output, names of metrics)", "we can then concatenate the two vectors:", "and add a logistic regression on top", "case: categorical accuracy", "model inputs: add learning phase if necessary start with a basic example of using a Sequential model inside the functional API", "returns loss and metrics. Updates weights at each call.", "return loss and metrics, no gradient updates.", "returns network outputs. Does not update weights. Does update the network states. more advanced model with multiple branches", "test whether impromtu input_shape breaks the model", "check that the inbound node is an Input node", "set Keras tensor metadata", "no activation, this layer is only linear. get_weights, set_weights: inherited", "do not slice the training phase flag", "prepare input arrays and training function", "prepare validation data", "prepare display labels", "delegate logic to _fit_loop", "create an input node to add to self.outbound_node and set output_tensors _keras_history", "prepare inputs, delegate logic to _predict_loop", "x has shape (samples, timesteps, input_dim)", "python 2 has next, 3 has __next__ # compatible with whatever avoid any explicit version checks", "no need for try/except because data has already been validated", "output_shape = list(self.get_output_shape_for(inputs0._keras_shape)) output_shape0 = l1.shape0 output = output.reshape(tuple(output_shape))", "pre-defined merge modes", "check for redundancy in inputs:", "TODO: probably useless", "all layers in order of horizontal graph traversal. Entries are unique. Includes input and output layers.", "arguments validation", "check that x is a Keras tensor", "check that x is an input tensor", "build self.input_names and self.output_names", "build a map depth: list of nodes with this depth", "set self.nodes and self.nodes_by_depth", "ensure name unicity, which will be crucial for serialization (since serialized nodes refer to layers by their name).", "no container-level masking for now", "bad luck, have to run the graph manually", "dictionary mapping reference tensors to tuples (computed tensor, compute mask) we assume a 1:1 mapping from tensor to mask TODO: raise exception when a .compute_mask does not return a list the same size as call", "if all previous input tensors are available in tensor_map, then call node.inbound_layer on them # list of tuples (input, mask)", "update cache; keys are based on ids on input tensors and inputs masks", "the node is relevant to the model: add to filtered_inbound_nodes", "gather info about inputs and outputs", "iterate over saved layers, instantiate them, then call them on appropriate inputs to create graph nodes", "call layer on its inputs, thus creating the node and building the layer if needed", "support for legacy Sequential/Merge behavior", "avoid input redundancy"},
  592. 369: {"pytest.main(__file__)", "the data, shuffled and split between tran and test sets", "convert class vectors to binary class matrices"},
  593. 370: {"with function argument", "check return_sequences", "pytest.main(__file__)", "test in functional API", "graph.add_output(name=output1, inputs=dense1, dense2, merge_mode=sum) graph.compile(rmsprop, output1: mse)", "graph. json_str = graph. new_graph = model_from_json(json_str)"},
  594. 372: {"test wrapping Sequential model", "test with functional API"},
  595. 373: {"Theano might not accept long type", "the data, shuffled and split between train and test sets", "the data, shuffled and split between train and test sets", "the data, shuffled and split between train and test sets", "the data, shuffled and split between train and test sets", "raise exceptions in case the input is not compatible with the input_spec set at build time", "if our output includes timesteps we need to reshape"},
  596. 376: {"covered_so_far = for module, module_name in MODULES: class_pages = for name in dir(module): if name in SKIP:", "module_member = getattr(module, name) if module_member in covered_so_far: continue if inspect.isclass(module_member): cls = module_member if cls.__module__ == module_name:", "functions = functions_not_defined_here = for name in dir(cls): if name in SKIP:", "if name0 == _: continue cls_member = getattr(cls, name) if inspect.isfunction(cls_member): function = cls_member signature = inspect.getargspec(function) defaults = signature.defaults args = signature.args1: if defaults: kwargs = zip(args-len(defaults):, defaults) args = args:-len(defaults)", "blocks = blocks.append(span style=float:right; + class_to_source_link(cls) + /span) blocks.append(# + cls.__name__ + n) blocks.append(code_snippet(class_signature)) docstring = cls.__doc__ if docstring: blocks.append(process_class_docstring(docstring))", "first, populate the nodes of the graph", "second, add the edges"},
  597. 381: {"make sure child model callbacks will call the parent Sequential model:", "its possible to callback a different model than self (used by Sequential models)"},
  598. 382: {"x is a single image, so it doesnt have image number at index 0"},
  599. 383: {"get trainable weights"},
  600. 384: {"test with a custom metric function"},
  601. 388: {"global constants # number of classes # th (channels, width, height) or tf (width, height, channels) # L2 regularization factor # whether to use batch normalization", "Final pooling and prediction"},
  602. 389: {"convert class vectors to binary class matrices", "case: model expects multiple inputs but only received a single Numpy array"},
  603. 390: {"get sorted list of layer depths", "get sorted list of node depths"},
  604. 391: {"case: categorical accuracy with sparse targets", "case: categorical accuracy with dense targets"},
  605. 394: {"False = test, True = train", "VARIABLE MANIPULATION", "VARIABLE MANIPULATION", "ELEMENT-WISE OPERATIONS", "ELEMENT-WISE OPERATIONS", "deal with Theano API inconsistency", "avoid numerical instability with _EPSILON clipping", "Theano might not accept long type", "scale preds so that the class probas of each sample sum to 1", "manual computation of crossentropy", "transform back to logits", "support strides by manually slicing the output", "the dummy 1. works around a TF bug (float32_ref vs. float32 incomptability)", "pooling over conv_dim2, conv_dim1 (last two channels)", "pooling over conv_dim3", "tf max_pool only supports float32"},
  606. 404: {"check implementation modes", "Compare models accuracy, loss and elapsed time per epoch."},
  607. 411: {"pytest.main(__file__)"},
  608. 413: {"note that output_shape isnt necessary with the TensorFlow backend so you could write Lambda(sampling)(z_mean, z_log_sigma)", "build a model to project inputs on the latent space", "build a digit generator that can sample from the learned distribution", "we will sample n points within -15, 15 standard deviations"},
  609. 415: {"so you could write Lambda(sampling)(z_mean, z_log_std)"},
  610. 418: {"line_length: total length of printed lines positions: relative or absolute positions of log elements in each line"},
  611. 419: {"The equivalent shape of both test fixtures"},
  612. 428: {"also fill in the output mask cache", "use node_marker to prevent cycles"},
  613. 435: {"see http://anandology.com/blog/using-iterators-and-generators/", "first, count the number of samples and classes", "build batch of image data", "optionally save augmented images to disk for debugging purposes", "build batch of labels"},
  614. 436: {"which itself will be called upon self.add_inbound_node if necessary."},
  615. 437: {"check if binary classification"},
  616. 438: {"rename duplicated metrics name", "power method for approximating the dominant eigenvector: # initial values for the dominant eigenvector"},
  617. 444: {"Use weighting scheme 2 in https://en.wikipedia.org/wiki/Tf%E2%80%93idf"},
  618. 453: {"this should have been caught earlier"},
  619. 461: {"pytest.main(__file__)"},
  620. 464: {"TH input shape: (samples, input_depth, conv_dim1, conv_dim2, conv_dim3) TF input shape: (samples, conv_dim1, conv_dim2, conv_dim3, input_depth) TH kernel shape: (depth, input_depth, x, y, z) TF kernel shape: (x, y, z, input_depth, depth)", "test in dim_ordering = th", "test in dim_ordering = tf", "TH input shape: (samples, input_depth, conv_dim1, conv_dim2, conv_dim3) TF input shape: (samples, conv_dim1, conv_dim2, conv_dim3, input_depth)", "Reset random seed else all children processes share the same seed", "Terminate all daemon processes", "only run data download tests 20% of the time to speed up frequent testing"},
  621. 473: {"TODO: remove the if statement when theano with no filter dilation is deprecated."},
  622. 474: {"attempt automatic input shape inference"},
  623. 477: {"first, build index mapping words in the embeddings set to their embedding vector", "finally, vectorize the text samples into a 2D integer tensor", "words not found in embedding index will be all-zeros.", "train a 1D convnet with global maxpooling"},
  624. 479: {"We flatten the output of the conv layer, so that we can add a vanilla dense layer:"},
  625. 480: {"else: assume learning phase is a placeholder."},
  626. 485: {"note that output_shape isnt necessary with the TensorFlow backend so you could write Lambda(sampling)(z_mean, z_log_std)", "NOTE: binary_crossentropy expects a batch_size by dim for x and x_decoded_mean, so we MUST flatten these!", "build a model to project inputs on the latent space", "build a digit generator that can sample from the learned distribution", "we will sample n points within -15, 15 standard deviations", "so you could write Lambda(sampling)(z_mean, z_log_var)"},
  627. 487: {"if obj is a serializable Keras class instance e.g. optimizer, layer", "misc functions (e.g. loss function)", "test with funkier config", "test that new updates are the same with both models save optimizer weights", "test with custom optimizer, loss", "instantiate optimizer", "recover loss functions and metrics", "set optimizer weights", "build train function (to get weight updates)"},
  628. 488: {"file found; verify integrity if a hash was provided"},
  629. 492: {"test concatenation with masked and non-masked inputs", "Mask is smaller than the input, expand it", "test load_weights on model file"},
  630. 494: {"you may download synset_words from address given at the begining of this file", "Note: SeparableConvolution not included since only supported by TF.", "you may download synset_words from the address given at the begining of this file", "auto-infered shape takes priority"},
  631. 500: {"container.layers needs to have a deterministic order: here we order them by traversal order"},
  632. 504: {"we start off with an efficient embedding layer which maps our vocab indices into embedding_dims dimensions", "we add a AveragePooling1D, which will average the embeddings of all words in the document"},
  633. 505: {"the data, shuffled and split between train and test sets", "Reshape to 4D for Hierarchical RNN", "convert class vectors to binary class matrices", "Encodes columns of encoded pixels", "Training parameters.", "Embedding dimensions.", "The data, shuffled and split between train and test sets.", "Converts class vectors to binary class matrices.", "Encodes columns of encoded rows."},
  634. 507: {"this creates larger blotches of noise which look more realistic than just adding gaussian noise assumes greyscale with pixels ranging from 0 to 1", "bigram file contains common word pairings in english speech", "each time an image is requested from train/val/test, a new random painting of the text is performed", "translational invariance seems to be the hardest thing for the RNN to learn, so start with = 4 letter words.", "After 10 epochs, translational invariance should be learned so start feeding longer words and eventually multiple words with spaces", "the actual loss calc occurs here despite it not being an internal Keras loss function", "For a real OCR application, this should be beam search with a dictionary and language model. For this example, best path is sufficient.", "26 is space, 27 is CTC blank char", "cuts down input size going into RNN:", "Keras doesnt currently support loss funcs with extra parameters so CTC loss is implemented in a lambda layer", "clipnorm seems to speeds up convergence", "the loss calc occurs elsewhere, so use a dummy lambda func for the loss", "captures output of softmax so we can decode the output during visualization", "simplified version of TensorFlows test", "dimensions are batch x time x categories", "batchifies original CTC code", "undocumented feature soon to be made public"},
  635. 508: {"test with Sequential model"},
  636. 509: {"a wider fc1 compared to teacher model", "add another conv2d layer to make original conv2 deeper", "add another fc layer to make original fc1 deeper"},
  637. 514: {"whether container weights are trainable"},
  638. 516: {"loss=categorical_crossentropy,", "assert model.input == inputs", "assert model.input_shape == (None, 784) assert model.output_shape == (None, 10)", "model. assert model.inputs == tweet_a, tweet_b assert model.outputs == predictions assert model.input == tweet_a, tweet_b", "all this circus is to recover the last vector in the sequence."},
  639. 519: {"the name of the layer we want to visualize (see model definition at keras/applications/vgg16.py)", "build the VGG16 network with ImageNet weights", "load weights Create model", "Determine proper input shape"},
  640. 523: {"dimensions are time x depth", "change tensorflow order to keras backend order", "Random entry added in at time=5", "batch_size length vector of negative log probabilities"},
  641. 528: {"delete and recreate model", "delete and recreate model using Functional API", "Reverse index of layer name to list of layers with name."},
  642. 529: {"the data, shuffled and split between train and test sets", "The total number of feature maps at each layer", "Batch size during training", "if using a 5 layer net of pool_size = 2", "if using a 3 layer net of pool_size = 3", "Define the model and its mean square error loss, and compile it with Adam"},
  643. 530: {"To get better generation qualities, use more conv layers for style features", "Create tensor variables for images", "Create tensor variables for masks", "Build image model, mask model and use layer outputs as features image model as VGG19", "Define loss functions", "Overall loss is the weighted sum of content_loss, style_loss and tv_loss Each individual loss uses features from image/mask models."},
  644. 531: {"Theano has some dependency issues for sparse"},
  645. 533: {"Create set of unique n-gram from the training set.", "Dictionary mapping n-gram token to a unique integer. Integer values are greater than max_features in order to avoid collision with existing features.", "Augmenting X_train and X_test with n-grams features"},
  646. 536: {"this is for backwards compatibility with the old Conv1D weights format."},
  647. 539: {"return dummy state, otherwise _dynamic_rnn_loop breaks"},
  648. 541: {"check dynamic behavior", "recover output size by calling _step on the first input"},
  649. 542: {"functools doesnt propagate arguments info for pytest correctly in 2.7 and wrapped doesnt work with pytest in 3.4"},
  650. 543: {"this will contain our generated image", "build the VGG16 network with our placeholder the model will be loaded with pre-trained ImageNet weights"},
  651. 544: {"Set theano as default backend for Windows users since tensorflow is not available for Windows yet."},
  652. 547: {"mel-spectrogram parameters", "Determine proper input shape"},
  653. 553: {"NOTE: binary_crossentropy expects a batch_size by dim for x and x_decoded_mean, so we MUST flatten these!"},
  654. 558: {"Count positive samples.", "If there are no true samples, fix the F score at 0."},
  655. 563: {"we add a GlobalAveragePooling1D, which will average the embeddings"},
  656. 564: {"TODO: remove this if statement when Theano without AbstractConv3d is deprecated", "TODO: remove this function when theano without AbstractConv3d is deprecated"},
  657. 569: {"TODO: remove this if statement when Theano without pool_3d is deprecated"},
  658. 571: {"Determine proper input shape"},
  659. 575: {"TF variables have auto-generated the name, while Theano has auto-generated the auto_name variable. name in Theano is None"},
  660. 583: {"Append a wrapped layers label to nodes label, if it exists.", "Connect nodes with edges."},
  661. 584: {"TensorFlow does not support full convolution.", "This dictionary holds a mapping graph: learning_phase. A learning phase is a bool tensor used to run Keras models in either train mode (learning_phase == 1) or test mode (learning_phase == 0).", "else: assume learning phase is a placeholder tensor."},
  662. 586: {"Instantiate a Keras optimizer"},
  663. 588: {"Calculated using sklearn.metrics.recall_score", "Calculated using sklearn.metrics.fbeta_score", "We create a layer which take as input movies of shape (n_frames, width, height, channels) and returns a movie of identical shape.", "No need to check statefulness for both Add 3 to 7 moving squares", "check that container-level works", "Cut to a 40x40 window check that the call to predict updated the states", "Testing the network on one movie feed it with the first 7 positions and then predict the new positions", "check dropout And then compare the predictions to the ground truth", "U : from nb_filter to nb_filter Same because must be stable in the ouptut space"},
  664. 590: {"Raise exceptions in case the input is not compatible with the input_spec set at build time.", "This will call layer. if necessary.", "Outputs were already computed when calling self.add_inbound_node.", "If single output tensor: return it, else return a list (at least 2 elements).", "This case appears if the input was not a Keras tensor.", "Check that the inbound node is an Input node.", "Auto-infered shape takes priority.", "Reset layer connections.", "Set Keras tensor metadata.", "Attempt automatic input shape inference.", "This exists for backwards compatibility.", "By default we connect to the 1st output stream in the input layer.", "This should have been caught earlier.", "Whether container weights are trainable.", "Arguments validation.", "Check that x is a Keras tensor.", "Check that x is an input tensor.", "Build self.input_names and self.output_names.", "Container_nodes: set of nodes included in the graph", "Build a dict depth: list of nodes with this depth", "Get sorted list of layer depths.", "Get sorted list of node depths.", "Set self.nodes and self.nodes_by_depth.", "Ensure name unicity, which will be crucial for serialization", "No container-level masking for now.", "without the container being notified of it.", "Bad luck, we have to run the graph manually.", "Its an input layer: get_output_shape_for is identity,", "If all previous input tensors are available in tensor_map, then call node.inbound_layer on them. # List of tuples (input, mask).", "Update _keras_shape.", "TODO: Better error message.", "The node is relevant to the model: add to filtered_inbound_nodes.", "Call layer on its inputs, thus creating the node and building the layer if needed.", "Support for legacy Sequential/Merge behavior.", "Avoid input redundancy.", "Layer instance (NOT a list).", "Tensor inputs and outputs of outbound_layer. # List of tensors. 1:1 mapping with inbound_layers. # List of tensors, created by outbound_layer..", "These properties should have been set", "In this case we will create an input layer", "Tensorflow shape inference."},
  665. 593: {"TODO remove the old call once Theano older than 0.9.0dev4 is deprecated new interface (introduced in 0.9.0dev4)"},
  666. 600: {"Test model-level reuse", "Update self._per_input_updates", "The model owns this layer node.", "update model updates"},
  667. 606: {"take a channel axis reduction", "this is the z space commonly refered to in GAN papers", "build a relatively standard conv net, with LeakyReLUs as suggested in the reference paper", "first output (name=generation) is whether or not the discriminator thinks the image that is being shown is fake, and the second output (name=auxiliary) is the class that the discriminator thinks the image belongs to.", "Adam parameters suggested in https://arxiv.org/abs/1511.06434", "we only want to be able to train generation for the combined model", "get a batch of real images", "For the generator, we want all the fake, not-fake labels to say not-fake", "evaluate the testing loss here", "generate a new batch of noise", "generate an epoch report on performance", "save weights every epoch", "generate some digits to display", "get a batch to display", "arrange them into a grid", "HIGH ORDER FUNCTIONS", "HIGH ORDER FUNCTIONS", "test lambda with output_mask lambda", "linearly spaced coordinates on the unit square were transformed through the inverse CDF (ppf) of the Gaussian to produce values of the latent variables z, since the prior of the latent space is Gaussian", "linearly spaced coordinates on the unit square were transformed through the inverse CDF (ppf) of the Gaussian to produce values of the latent variables z, since the prior of the latent space is Gaussian"},
  668. 608: {"Sort weights by name", "by setting the trainable argument, in Sequential", "with constructor argument, in Model", "a non-trainable model has no trainable weights", "a Model inside a Model", "a Model inside a Sequential"},
  669. 615: {"Ensure that the model takes into account any potential predecessors of input_tensor. Create model."},
  670. 617: {"Original Numpy array x has format (height, width, channel)", "Numpy array x has format (height, width, channel) or (channel, height, width) but original PIL image has format (width, height, channel)"},
  671. 622: {"The data, shuffled and split between train and test sets:", "Convert class vectors to binary class matrices.", "Fit the model on the batches generated by datagen.."},
  672. 623: {"Match the behavior of numpy and Theano by returning an empty seqence."},
  673. 625: {"create folders and subfolders", "check number of classes and images", "bool is available since theano v0.9dev", "add filename relative to directory", "based on TensorFlows default: normalize along rightmost dimension", "TODO remove this function when Theano without T.nnet.bn.batch_normalization_train is deprecated", "TODO remove this if statement when Theano without T.nnet.bn.batch_normalization_test is deprecated"},
  674. 626: {"Lets train the model using RMSprop", "This example assume th dim ordering."},
  675. 627: {"Power method for approximating the dominant eigenvector: # Number of iterations of the power method. # Initial values for the dominant eigenvector.", "Apply activity regularizer if any:", "Apply activity regularizer if any:", "add regularization penalties and other layer-specific losses", "In case self.losses isnt settable", "In case self.updates isnt settable (i.e. its a getter method). In that case the updates property is auto-computed and shouldnt be set.", "Collect updates that are dependent on inputs", "Collect losses that are dependent on inputs that are part of the model.", "Collect unconditional losses.", "Keep track of updates that depend on the inputs (e.g. BN updates).", "Keep track of unconditional updates (e.g. a counter).", "Keep track of unconditional losses (e.g. weight regularizers)."},
  676. 628: {"Reset random seed else all children processes share the same seed", "Make a list of masks while making sure the dimensionality of each mask", "Keep track of updates that depend on the inputs (e.g. BN updates).", "Keep track of unconditional losses (e.g. weight regularizers)."},
  677. 630: {"tf.select needs its condition tensor", "Try some incorrect values", "Check correct values"},
  678. 632: {"With TensorFlow, we can infer the output shape directly:", "a function to be called from the Lambda layer"},
  679. 635: {"interlace to mix up the easy and hard words", "rebind the paint function to implement curriculum learning", "increase to wider images and start at epoch 20. The learned weights are reloaded"},
  680. 641: {"TODO: keras_shape inference."},
  681. 649: {"This is a regression test for issue #4881 with the old batch normalization functions in the Theano backend."},
  682. 653: {"Make value available to next callbacks."},
  683. 654: {"Assuming convolution kernels (2D or 3D).", "No specific assumptions.", "Pick the one with the correct shape."},
  684. 655: {"File found; verify integrity if a hash was provided.", "Fall back on pydot if necessary."},
  685. 656: {"The transformation of images is not under thread lock so it can be done in parallel"},
  686. 657: {"reserve index_from (=3 by default) characters: 0 (padding), 1 (start), 2 (OOV)", "reserve index_from (=3 by default) characters: 0 (padding), 1 (start), 2 (OOV)", "Assumes that self.layer is already set. Should be called at the end of . in the children classes.", "This example assume channels_first data format.", "test in data_format = th", "test in data_format = tf"},
  687. 662: {"Pick the one with the correct shape.", "No specific assumptions.", "Poor mans truncated normal: we literally clip the tensor", "T.nnet.conv2d uses **kwargs, so the filter_dilation parameter will be ignored by versions that do not support it"},
  688. 668: {"Theano likes to make shape==1 dimensions in the initial states (outputs_info) broadcastable", "case 1, create new file with defined separator"},
  689. 674: {"case 3, reuse of CSVLogger object"},
  690. 683: {"Parameters for the model and dataset.", "All the numbers, plus sign and space for padding.", "Also skip any such that X+Y == Y+X (hence the sorting).", "Reverse the query, e.g., 12+345 becomes 543+21. (Note the space used for padding.)", "Explicitly set apart 10% for validation data that we never train over.", "The decoder RNN could be multiple layers stacked or a single layer.", "Apply a dense layer to the every temporal slice of an input. For each of step of the output sequence, decide which character should be chosen.", "Train the model each generation and show predictions against the validation dataset."},
  691. 685: {"This dictionary holds a mapping graph: UID_DICT. each UID_DICT is a dictionary mapping name prefixes to a current index, used for generatic graph-specific string UIDs for various names (e.g. layer names).", "If the class is private the name starts with _ which is not secure for creating scopes. We prefix the name with private in this case.", "In this case we will later create an input layer", "Handle laying building (weight creating, input spec locking).", "Handle mask propagation.", "The previous layer generated a mask.", "Actually call the layer, collecting output(s), mask(s), and shape(s).", "Infering the output shape is only relevant for Theano.", "Create node, add it to inbound nodes.", "Update tensor history, _keras_shape and _uses_learning_phase.", "TODO: check last dim in input_dim", "update x._keras_shape", "add dim to kernel (always same format independently of data_format) i.e. (rows, 1, input_depth, depth)", "TODO: set_shape with static shape", "channels_first input shape: (n, input_depth, rows, cols)", "else: assume learning phase is a placeholder tensor."},
  692. 687: {"Mask is smaller than the input, expand it"},
  693. 689: {"Determines whether broadcasting is needed.", "input shape: (nbias_samples, time (padded with zeros), input_dim)", "TODO: better handling of input spec"},
  694. 690: {"Test with Sequential API", "Test with functional API we add a Convolution1D, which will learn filters", "use input_shape=(None, num_feature).", "model.fit(data_a, data_b, labels, epochs=1)"},
  695. 691: {"the data, shuffled and split between tran and test sets", "a more explicit example", "In the unit norm constraint, it should be equal to 1.", "test window size and categorical labels", "create folders and subfolders", "Usage of sklearns grid_search from sklearn import grid_search create iterator", "Test tf data format count_params", "check_single_tensor_operation(any, (4, 2)) check_single_tensor_operation(any, (4, 2), axis=1, keepdims=True)", "implement a simple RNN", "channels_first input shape: (n, input_depth, rows, cols)", "TH input shape: (samples, input_depth, conv_dim1, conv_dim2, conv_dim3) TF input shape: (samples, conv_dim1, conv_dim2, conv_dim3, input_depth) TH kernel shape: (depth, input_depth, x, y, z) TF kernel shape: (x, y, z, input_depth, depth)", "simplified version of TensorFlows test", "dimensions are batch x time x categories", "dimensions are time x depth", "change tensorflow order to keras backend order", "Random entry added in at time=5", "batch_size length vector of negative log probabilities", "Theano has some dependency issues for sparse", "Try some incorrect values", "Check correct values", "first, test with Dense layer", "TensorFlow does not support full convolution.", "with string argument test wrapping Sequential model", "This is a regression test for issue #4881 with the old batch normalization functions in the Theano backend.", "centered on 5.0, variance 10.0", "test in functional API", "Properly set learning phase Pre-defined merge modes.", "Mask is smaller than the input, expand it", "This should have been caught earlier."},
  696. 692: {"No need to check following tests for both data formats", "Simple lookup in custom objects", "test get_weights , set_weights at layer level", "test training mode (e.g. useful for dropout tests)"},
  697. 695: {"Also skip any such that x+Y == Y+x (hence the sorting).", "test modes: sum, mul, concat, ave, cos, dot.", "No need to check following tests for both data formats test functional API", "if the state is not reset, output should be different test weight saving", "test lambda with output_shape lambda check that output changes after states are reset (even though the model itself didnt change)", "check that container-level works", "check that the call to predict updated the states", "if the state is not reset, output should be different", "check that container-level works", "check that the call to predict updated the states", "check dropout test function with output_mask function Check masking time dimension is required for masking", "test lambda with output_mask lambda", "test concatenation with masked and non-masked inputs", "start with a basic example of using a Sequential model inside the functional API", "more advanced model with multiple branches", "test whether impromtu input_shape breaks the model", "Support for legacy models", "Support for legacy behavior", "Legacy support test serialization", "Shape: (num_samples * timesteps, ...)"},
  698. 696: {"Test with negative tuple of axes.", "Test the ability to pass and serialize arguments to call.", "left = models. left.add(layers.Dense(num_hidden, input_shape=(input_dim,))) left.add(layers.Activation(relu))", "right = models. right.add(layers.Dense(num_hidden, input_shape=(input_dim,))) right.add(layers.Activation(relu))", "model = models. model.add(legacy_layers.Merge(left, right, mode=sum)) model.add(layers.Dense(num_class)) model.add(layers.Activation(softmax))", "right = Sequential(name=branch_2) right.add(Dense(num_hidden, input_shape=(input_dim,), name=dense_2)) right.add(Activation(relu, name=relu_2))", "model.predict(x_test, x_test, verbose=0) model.predict_classes(x_test, x_test, verbose=0) model.predict_proba(x_test, x_test, verbose=0) model.", "nloss = model.evaluate(x_test, x_test, y_test, verbose=0)", "right = right.add(Dense(num_hidden, input_shape=(input_dim,))) right.add(Activation(relu))", "righter = righter.add(Dense(num_hidden, input_shape=(input_dim,))) righter.add(Activation(relu))", "intermediate = intermediate.add(Merge(left, right, mode=sum)) intermediate.add(Dense(num_hidden)) intermediate.add(Activation(relu))", "model.add(Merge(intermediate, righter, mode=sum))", "model.predict(x_test, x_test, x_test, verbose=0) model.predict_classes(x_test, x_test, x_test, verbose=0) model.predict_proba(x_test, x_test, x_test, verbose=0)", "nloss = model.evaluate(x_test, x_test, x_test, y_test, verbose=0)", "model = model.add(Merge(left, left, mode=sum)) model.add(Dense(num_class)) model.add(Activation(softmax)) model.compile(loss=categorical_crossentropy, optimizer=rmsprop)", "model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs, verbose=1, validation_data=(x_test, y_test)) model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs, verbose=2, validation_split=0.1) model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs, verbose=0) model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs, verbose=1, shuffle=False)", "loss = model.evaluate(x_test, y_test, verbose=0) model.predict(x_test, verbose=0) model.predict_classes(x_test, verbose=0) model.predict_proba(x_test, verbose=0)", "fname = test_merge_overlap_temp.h5 print(model.layers) model.save_weights(fname, overwrite=True) print(model.trainable_weights)", "model.load_weights(fname) os.remove(fname)", "nloss = model.evaluate(x_test, y_test, verbose=0) assert(loss == nloss)", "model. json_str = model. model_from_json(json_str)", "yaml_str = model. model_from_yaml(yaml_str)", "no activation, this layer is only linear.", "List of integers, 1:1 mapping with inbound_layers.", "Following 2 properties: tensor inputs and outputs of outbound_layer.", "List of tensors. 1:1 mapping with inbound_layers.", "List of tensors, created by outbound_layer..", "Following 2 properties: input and output masks. List of tensors, 1:1 mapping with input_tensor.", "List of tensors, created by outbound_layer..", "Following 2 properties: input and output shapes.", "Optional keyword arguments to layers call.", "Needed if we want to do something like:"},
  699. 697: {"Recover loss functions and metrics.", "Set optimizer weights.", "Build train function (to get weight updates).", "Add to the model any layers passed to the constructor.", "Save optimizer weights.", "Make sure child model callbacks will call the parent Sequential model.", "Case: model expects multiple inputs but only received a single Numpy array.", "Prepare loss weights.", "Prepare loss functions.", "Add regularization penalties and other layer-specific losses.", "Prepare gradient updates and state updates.", "Sort weights by name.", "Returns loss and metrics. Updates weights at each call.", "Return loss and metrics, no gradient updates.", "Returns network outputs. Does not update weights.", "Gets loss and metrics. Updates weights at each call.", "Gets network outputs. Does not update weights.", "check_single_tensor_operation(any, (4, 2)) check_single_tensor_operation(any, (4, 2), axis=1, keepdims=True)", "implement a simple RNN", "channels_first input shape: (n, input_depth, rows, cols)", "TH input shape: (samples, input_depth, conv_dim1, conv_dim2, conv_dim3) TF input shape: (samples, conv_dim1, conv_dim2, conv_dim3, input_depth) TH kernel shape: (depth, input_depth, x, y, z) TF kernel shape: (x, y, z, input_depth, depth)", "simplified version of TensorFlows test", "dimensions are batch x time x categories", "dimensions are time x depth", "change tensorflow order to keras backend order", "Random entry added in at time=5", "batch_size length vector of negative log probabilities", "Theano has some dependency issues for sparse", "Try some incorrect values", "Check correct values"},
  700. 698: {"Same, without learning phase i.e. we dont pass any data to fit the model.", "Same without dropout.", "None loss, only regularization loss.", "No dropout, external loss.", "Test fit with no external data at all.", "Retrieve losses for all internal layers.", "Construct epoch logs.", "No need for try/except because data has already been validated."},
  701. 699: {"Assuming convolution kernels (1D, 2D or 3D)."},
  702. 701: {"Causal dilated with larger kernel size:", "causal (dilated) convolution:", "causal (dilated) convolution:"},
  703. 702: {"Default values of symbolic_weights is /variable for theano"},
  704. 703: {"Globally-importable utils."},
  705. 708: {"No need to check following tests for both data formats", "if the state is not reset, output should be different", "check that container-level works", "check that the call to predict updated the states"},
  706. 709: {"We dont want those compilation to show up in Theano profiler."},
  707. 712: {"make sure that only keyword argument pool_size(or pool_length in the legacy interface) can be also used as positional argument, which is keyword argument originally."},
  708. 714: {"Test with Keras tensor", "If initial_state is specified, and if it a Keras tensor, then add it to the inputs and temporarily modify the input spec to include the state.", "input shape: (samples, time (padded with zeros), input_dim)"},
  709. 715: {"cut texts after this number of words (among top max_features most common words)", "util function to open, resize and format pictures into appropriate tensors", "Run L-BFGS for 7 steps", "Decode the dream and save it"},
  710. 719: {"lets not forget that index 0 is reserved", "lets not forget that index 0 is reserved", "concatenate the match matrix with the question vector sequence"},
  711. 720: {"Layers that change the shape should already implement compute_output_shape anyway TODO: If the merge layer in the future accepts broadcastable inputs then both this function and build should be changed"},
  712. 722: {"Its an input layer: compute_output_shape is identity,"},
  713. 723: {"Test shape inference when input shape has None entries", "Usage of sklearns grid_search from sklearn import grid_search"},
  714. 724: {"but cast it to bool first", "but cast it to bool first"},
  715. 730: {"Properly set learning phase", "If inputs have been transposed, we have to transpose the output too."},
  716. 731: {"Compute quantities required for feature-wise normalization"},
  717. 738: {"assure that model is working"},
  718. 740: {"Save config file, if possible.", "Set backend based on KERAS_BACKEND flag, if applicable.", "initiate RMSprop optimizer"},
  719. 741: {"handle k 1 and k = predictions.shape1 cases to match TF behavior", "dtype=bool is only available since Theano 0.9.0"},
  720. 745: {"Do not slice the training phase flag.", "Prepare input arrays and training function.", "Rename duplicated metrics name (can happen with an output layer shared among multiple dataflows).", "Delegate logic to _fit_loop.", "Prepare inputs, delegate logic to _predict_loop.", "Test that dropout is not applied during testing", "We dont use this output."},
  721. 751: {"node is not part of the current network"},
  722. 752: {"if the model has multiple nodes or if the nodes have multiple inbound_layers the model is no longer sequential"},
  723. 753: {"Importable from root because its technically not a layer"},
  724. 756: {"Theano has a built-in optimization for logsumexp (see https://github.com/Theano/Theano/pull/4736) so we can just write the expression directly:"},
  725. 758: {"If the learning is either dynamic, or set to training:"},
  726. 759: {"Util function to open, resize and format pictures into appropriate tensors.", "Util function to convert a tensor into a valid image.", "Get the symbolic outputs of each key layer (we gave them unique names).", "Add the L2 norm of the features of a layer to the loss.", "We avoid border artifacts by only involving non-border pixels in the loss.", "Compute the gradients of the dream wrt the loss.", "Playing with these hyperparameters will also allow you to achieve new effects", "If the depth is not set, the node has no outbound nodes (depth 0).", "Update the depth of inbound nodes."},
  727. 760: {"Functions or classes with less than MIN_CODE_SIZE lines can be ignored"},
  728. 764: {"Early return if compilation is not required."},
  729. 768: {"switch to channels_first to display every kernel as a separate image", "not possible to handle 3D convnets etc.", "callback validation data should always have x, y, and sample weights"},
  730. 769: {"create folders and subfolders", "check number of classes and images", "check if input and output have the same shape", "check if the input and output images are not the same numpy array"},
  731. 771: {"Except permission denied and potential race conditions in multi-threaded environments.", "Except permission denied."},
  732. 775: {"Allow instances to be re-used"},
  733. 778: {"Creating dataset to store features", "Creating dataset to store labels", "Instantiating HDF5Matrix for the training set, which is a slice of the first 150 elements", "Likewise for the test set", "But they do not support negative indices, so dont try print(X_train-1)", "test that evalutation and prediction dont crash and return reasonable results"},
  734. 790: {"this should work with a warning"},
  735. 801: {"pydotplus is an improved version of pydot", "Check that layer is an InputLayer.", "we need access to the shape of inputs. solution: rely on K.int_shape.", "Target shape not fully defined."},
  736. 802: {"A learning phase is a bool tensor used to run Keras models in either train mode (learning_phase == 1) or test mode (learning_phase == 0).", "cntk doesnt support gradient as symbolic op, to hook up with keras model, we will create gradient as a constant placeholder, here use this global map to keep the mapping from grad placeholder to parameter", "cntk doesnt support statefulness on LSTM yet, will enable it on cntk later", "False = test, True = train", "else: assume learning phase is a placeholder tensor.", "cntk only support float32 and float64", "cntk cant rebind the input shape, so create the model again to test different batch size", "Default values of symbolic_weights is /variable for theano and cntk", "cntk will init type based on the value type", "cntk does not support stateful yet.", "cntk doesnt support batch_dot with static axis as batch axis", "cntk doesnt support reverse yet", "cntk need create batch as dynamic axis, so cant test in this way", "create special test case for CNTK which treat the first axis as dynamic axis", "cntk calculate everything in float, so dont need case from bool / int", "cntk does not support cumsum and cumprod yet", "check_single_tensor_operation(any, (4, 2), KTF, KTH) check_single_tensor_operation(any, (4, 2), KTF, KTH, axis=1, keepdims=True)", "check_single_tensor_operation(all, (4, 2), KTF, KTH) check_single_tensor_operation(all, (4, 2), KTF, KTH,axis=1, keepdims=True)", "cntk return -85.1 for zero or negative number, not nan, so cant compare with other backend.", "sequence axis is removed by default, so dont need reshape on it", "cntk doesnt support gradient in this way", "cntk currently not support funciton in this way, so cant test as this", "cntk does not support gradients as symbolic op, to hook up with keras model we will return a constant as place holder, the cntk learner will apply the gradient during training.", "collapse axis with batch axis", "if the second axis is static axis, CNTK will do unroll by default", "cntk need pooling on dynamic axes, cant test in this way, is coverred in a seperate case", "add the time_step axis back", "causal (dilated) convolution:", "cntk not support it yet", "cntk has issue with negative number", "cntks result shape is (batch, 1), while keras expect (batch, )", "scale preds so that the class probas of each sample sum to 1", "avoid numerical instability with _EPSILON clipping", "cntk only could handle loss and 1 metric in trainer, for metrics more than 2, need manual eval", "cntk only support calculate on float, do auto cast here", "cntk output_shape does not include batch axis", "As of Keras 2.0.0, all kernels are normalized on the format (rows, cols, input_depth, depth), independently of data_format. CNTK expects (depth, input_depth, rows, cols).", "transpose kernel to output_filters first, to apply broadcast", "shape: batch, filters, row, col", "shape: batch, row, col, filters"},
  737. 805: {"saving the shape to avoid converting sparse tensor to dense"},
  738. 809: {"input_shape and default_size are not identical.", "training/testing doesnt work before compiling.", "Test invalid use cases", "input_shape is smaller than min_size.", "the number of channels is 5 not 3.", "Test invalid use cases", "not specified validation_steps", "validation data is neither a tuple nor a triple.", "validation generator is neither a tuple nor a triple.", "TF optimizers do not support weights constraints", "not supported learning_phase", "Test invalid use case", "x does not match _feed_input_names.", "Test invalid use cases", "Test invalid use cases", "TODO remove this if statement when Theano without T.nnet.bn.batch_normalization_train is deprecated", "Get affine transformation params"},
  739. 811: {"Checking for empty weights array to avoid a problem where some legacy layers return bad values from"},
  740. 814: {"Test invalid use cases"},
  741. 815: {"non-trainable weights", "Reset random seed else all children processes share the same seed", "with pytest.raises(StopIteration): model.predict_generator(, good_batches + 1, 1, workers=4, use_multiprocessing=True, )"},
  742. 816: {"Checking for empty weights array to avoid a problem where some legacy layers return bad values from"},
  743. 817: {"if masking is explicitly supported, by default", "due to the algo difference, we cant guarantee CNTK has the same result on the garbage input.", "in current version cntk cant support input with variable length. Will support it in next release."},
  744. 822: {"Collected trainable weights, sorted in topological order.", "Test invalid use case", "Determine proper input shape. Note, include_top is False by default, as input shape can be anything larger than 32x32 and the same number of parameters will be used.", "input shape can be anything larger than 32x32 and the same number of parameters will be used."},
  745. 828: {"cntk currently not support function in this way, so cant test as this"},
  746. 830: {"subtract the sets to pick all missing classes"},
  747. 831: {"the Theano and TensorFlow CTC code use different methods to ensure", "for special test cases of CNTK which treat dynamic axis", "Register the Holder class using the ListProxy (allows __len__ and __getitem__)", "CNTK currently dont support cond op, so here we use element_select approach as workaround. It may have perf issue, will resolve it later with cntk cond op.", "Similiar as in_train_phase, use element_select as workaround.", "Some ops (like dropout) wont be applied during eval in cntk. They only evaluated in training phase. To make it work, call forward method to let cntk know we want to evaluate them.from But the assign ops wont be executed under this mode, thats why we need this check."},
  748. 833: {"cntk doesnt support eval convolution with static variable, will enable it later"},
  749. 842: {"First, we create all layers and enqueue nodes to be processed"},
  750. 843: {"simulate multi-input/output models", "fit without validation data", "do not slice the learning phase", "ImageDataGenerator.standardize should work on batches"},
  751. 850: {"This test checks the consistency of the stop_gradient backend API. It doesnt check the functionality (which is checked at the test_gradient test)."},
  752. 851: {"Save model and weights"},
  753. 854: {"Default values of symbolic_weights is /variable for theano and cntk"},
  754. 855: {"In the current CNTK backend, _preprocess_conv3d_input is misimplemented."},
  755. 857: {"We set NA so that csv parsers do not fail for this last epoch.", "There is a bug in cntk gather op which may cause crash. We have made a fix but not catched in CNTK 2.1 release. Will udpate with gather op in next release", "Current cntk does not support shape like (1, batch). so using the workaround here to mapping the correct axis. Will remove this tricky after we add support in native cntk op"},
  756. 858: {"Here we use tf.tile to mimic behavior of np.repeat so that"},
  757. 860: {"If the wrapper modifies the inputs, use the modified inputs to get the updates from the inner layer.", "test with BatchNormalization", "Shape: (num_samples * timesteps, ...). And track the transformation in self._input_map."},
  758. 861: {"Test invalid use case"},
  759. 863: {"Initializers saved from tf.keras may contain an unused dtype argument."},
  760. 864: {"TODO: move to tf.get_variable when supported in public release."},
  761. 866: {"Augment the mask to match the length of the output."},
  762. 867: {"The capacity variable controls the maximum queue size allowed when prefetching data for training.", "min_after_dequeue is the minimum number elements in the queue after a dequeue, which ensures sufficient mixing of elements.", "If enqueue_many is False, tensors is assumed to represent a single example. An input tensor with shape x, y, z will be output as a tensor with shape batch_size, x, y, z. If enqueue_many is True, tensors is assumed to represent a batch of examples, where the first dimension is indexed by example, and all members of tensors should have the same size in the first dimension. If an input tensor has shape *, x, y, z, the output will have shape batch_size, x, y, z.", "Do not pass the loss directly to model. because it is not yet supported for Input Tensors.", "Second Session to test loading trained model without tensors"},
  763. 868: {"fit without validation data should raise ValueError if histogram_freq 0"},
  764. 870: {"On top of new tensors", "On top of new, non-Keras tensors", "With placeholder creation", "On top of new tensor", "On top of new, non-Keras tensor", "Make sure that all input tensors come from a Keras layer. If tensor comes from an input layer: cache the input layer.", "Cache newly created input layer.", "Get or create layer.", "Dont call InputLayer multiple times.", "If all previous input tensors are available in tensor_map, then call node.inbound_layer on them. # List of tuples (input, mask).", "Check that we did compute the model outputs, then instantiate a new model from inputs and outputs."},
  765. 871: {"test invalid arguments", "multi-output, as list", "test with sample weights", "i.e. we mark it to be saved", "1. Default returns linear", "One dimensional arrays are supposed to raise a value error", "cntk cant rebind the input shape, so create the model again to test different batch size"},
  766. 874: {"test with custom TF placeholder as target"},
  767. 875: {"test dictionary of target_tensors"},
  768. 878: {"require_flatten=True with dynamic input shape.", "TODO: move to Variable constructor when supported in public release."},
  769. 881: {"Fit the model using data from the TFRecord data tensors."},
  770. 892: {"noinspection SpellCheckingInspection", "this font list works in CentOS 7", "Similar as in_train_phase, use element_select as workaround.", "test whether impromptu input_shape breaks the model", "Inferring the output shape is only relevant for Theano.", "so create a separate test case for valid label input"},
  771. 895: {"Determine proper input shape", "Ensure that the model takes into account any potential predecessors of input_tensor", "Test flow behavior as Sequence", "Test with shuffle=True"},
  772. 900: {"This step is expensive, so we only run it on variables not already marked as initialized."},
  773. 903: {"there is a bug in cntk 2.1s unpack_batch implementation", "backend information not available"},
  774. 908: {"Create model in a subprocess so that the memory consumed by InceptionResNetV2 will be released back to the system after this test (to deal with OOM error on CNTK backend) TODO: remove the use of multiprocessing from these tests once a memory clearing mechanism is implemented in the CNTK backend"},
  775. 909: {"Test that dropout is applied during training", "Check masking: output with left padding and right padding should be the same.", "Test stacked RNN serialization.", "Test regularization losses", "TODO: consider batch calls to set_value.", "build an all-zero tensor of shape (samples, output_dim) # (samples, timesteps, input_dim) # (samples,) # (samples, 1) # (samples, output_dim)", "If initial_state is specified, and if it a Keras tensor, then add it to the inputs and temporarily modify the input spec to include the state.", "input shape: (samples, time (padded with zeros), input_dim) note that the . method of subclasses MUST define self.input_spec and self.state_spec with complete input shapes.", "Properly set learning phase", "Theano likes to make shape==1 dimensions in the initial states (outputs_info) broadcastable"},
  776. 914: {"input shape (partially) unknown? replace -1s with Nones", "input shape known? then we can compute the output shape"},
  777. 916: {"We use tab as the start sequence character for the targets, and n as end sequence character.", "decoder_target_data will be ahead by one timestep and will not include the start character.", "Define an input sequence and process it.", "Define sampling models", "Generate empty target sequence of length 1.", "Sampling loop for a batch of sequences (to simplify, here we assume a batch of size 1).", "Add the sampled character to the sequence"},
  778. 918: {"3) Repeat with the current target token and current states"},
  779. 919: {"search for shared layers", "Ask the pool to update till everyone is updated.", "Were done with the update"},
  780. 921: {"DEVICE MANIPULATION AND PROBING"},
  781. 925: {"character classes and matching regex filter", "Translation of characters to unique integer values", "These methods were only introduced in version 3.4.0 (2016).", "Test that nothing is changed when target size is equal to original.", "Test down-sampling with bilinear interpolation."},
  782. 926: {"convert class vectors to binary class matrices", "Save model and weights", "Fit the model on the batches generated by datagen..", "when lahead 1, need to convert the input to rolling window view https://docs.scipy.org/doc/numpy/reference/generated/numpy.repeat.html", "split train/test data", "drop the first tsteps-1 because it is not possible to predict them since the previous timesteps to use do not exist", "input shape: (samples, time (padded with zeros), input_dim) note that the . method of subclasses MUST define self.input_spec and self.state_spec with complete input shapes.", "if the state is not reset, output should be different", "check that container-level works", "check that the call to predict updated the states", "Convert class vectors to binary class matrices.", "Start model definition.", "Instantiate and compile model.", "Prepare model model saving directory."},
  783. 928: {"Place a copy of the model on each GPU, each getting a slice of the inputs.", "Retrieve a slice of the input.", "Apply model on slice (creating a model replica on the target device).", "Save the outputs for merging back together later."},
  784. 930: {"Prepend the module name."},
  785. 942: {"If any of initial_state or constants are specified and are Keras tensors, then add them to the inputs and temporarily modify the input_spec to include them.", "at this point additional_inputs cannot be empty", "Perform the call with temporarily replaced input_spec", "will (and should) raise if more than one constant passed", "verify that state is used", "And shouldnt warn if we recompile"},
  786. 947: {"Place marker in docstring for later reinjection.", "Format docstring section titles.", "Format docstring lists.", "Strip all leading spaces.", "Reinject code blocks."},
  787. 948: {"Model creation using tensors from the graph node.", "Second session to test loading trained model without tensors."},
  788. 949: {"Also importable from root"},
  789. 951: {"One epoch is completed so enqueuer will switch the Sequence", "One epoch has been completed so enqueuer2 will switch", "Be sure that both Sequence were updated", "Tear down everything", "We use a Value to provide unique id to different processes."},
  790. 952: {"updated value in feed_dict will be modified within the K.", "additional tensor substitutions"},
  791. 953: {"This list holds the available devices. It is populated when is called for the first time.", "test with Sequential model", "test with functional API", "Transpose to time-major, i.e. from (batch, time, ...) to (time, batch, ...)", "Dropout is disabled with CNTK for now."},
  792. 956: {"use soft real/fake labels"},
  793. 957: {"Subtracting pixel mean improves accuracy", "Model version Orig paper: version = 1 (ResNet v1), Improved ResNet: version = 2 (ResNet v2)", "Model name, depth and version", "If subtract pixel mean is enabled", "v1 does not use BN after last shortcut connection-ReLU", "v2 performs Conv2D on input w/o BN-ReLU", "set each sample mean to 0", "randomly shift images horizontally", "randomly shift images vertically"},
  794. 958: {"First layer in model: check that it is an input layer.", "In case of nested models: recover the first layer of the deepest model to infer input shape and dtype.", "We were passed a regular layer, and it should know about its input shape. Otherwise, thats an error.", "Make sure there are only 0s and 1s", "Get original labels back from one hots"},
  795. 962: {"Make sure there is exactly one 1 in a row"},
  796. 964: {"fit_generator will throw an exception if steps is unspecified for regular generator"},
  797. 966: {"If loss_fn is not a function (e.g. callable class) or if it not in the losses module, then it is a user-defined loss and we make no assumptions about it."},
  798. 969: {"if theres no bias weight in the file, skip this conversion"},
  799. 972: {"hack for function. function is not available under tensorflow r1.3."},
  800. 973: {"Global Numpy arrays of imagenet mean for preprocessing symbolic inputs"},
  801. 978: {"if our output includes timestep dimension or spatial dimensions we need to reshape"},
  802. 979: {"Rethrow any exceptions found in the queue", "Make sure to rethrow the first exception in the queue, if any"},
  803. 980: {"Dropout is disabled with CNTK/Theano."},
  804. 986: {"Global tensor of imagenet mean for preprocessing symbolic inputs", "Currently, CTNK cant instantiate ones with symbolic shapes. Will update workaround once CTNK supports it."},
  805. 990: {"this test ensures that models serialized prior to version 2.1.2 can still be deserialized", "see https://github.com/evhub/keras/blob/2.1.1/keras/utils/generic_utils.py#L166", "process boundaries - make sure raises raises ValueError exception and does not attempt to run the generator.", "process boundaries - make sure raises ValueError", "On Windows, avoid **SYSTEMATIC** error in multiprocessing: TypeError: cant pickle generator objects = Suggest multithreading instead of multiprocessing on Windows", "Cant pickle tracebacks.", "On all OSes, avoid **SYSTEMATIC** error in multithreading mode: ValueError: generator already executing = Serialize calls to infinite iterator/generators function", "process boundaries - make sure raises ValueError"},
  806. 991: {"accuracy of the auxiliary classifier on generated images, so we"},
  807. 993: {"assert layers rick are equal", "Multiple outputs and multiple steps.", "Create a model with a single output.", "Single output and one step.", "Single output and multiple steps."},
  808. 1013: {"Determine proper input shape", "Function invoked at end of each epoch. Prints generated text."},
  809. 1014: {"Defalut, without OOV flag"},
  810. 1016: {"Generate corrupted MNIST images by adding noise with normal dist centered at 0.5 and std=0.5", "Encoder/Decoder number of CNN layers and filters per layer", "Build the Autoencoder Model First build the Encoder Model", "2) Use MaxPooling2D as alternative to strides1", "Shape info needed to build Decoder Model", "Instantiate Encoder Model", "Stack of Transposed Conv2D blocks Notes: 1) Use Batch Normalization before ReLU on deep networks 2) Use UpSampling2D as alternative to strides1 - faster but not as good as strides1", "Instantiate Decoder Model", "Autoencoder = Encoder + Decoder Instantiate Autoencoder Model", "Mean Square Error (MSE) loss function, Adam optimizer", "Display the 1st 8 corrupted and denoised images"},
  811. 1017: {"Check if generator is only accessed an expected number of times"},
  812. 1018: {"update self._inbound_nodes", "Set self._container_nodes and self._nodes_by_depth.", "For new processes that may spawn"},
  813. 1019: {"On all OSes, avoid **SYSTEMATIC** error in multithreading mode:"},
  814. 1020: {"test window size and categorical labels", "test if the state of a BiRNN is the concatenation of the underlying RNNs"},
  815. 1021: {"custom handling of accuracy/crossentropy", "case: binary accuracy/crossentropy", "case: categorical accuracy/crossentropy with sparse targets", "case: categorical accuracy/crossentropy"},
  816. 1025: {"no time axis in the input shape passed to RNN cells", "linear projection residual shortcut connection to match changed dims"},
  817. 1026: {"In this case the OS does not allow us to use multiprocessing. We resort to an int for enqueuer indexing."},
  818. 1028: {"use one-sided soft real/fake labels Salimans et al., 2016 https://arxiv.org/pdf/1606.03498.pdf (Section 3.4)", "The data, shuffled and split between train and test sets:", "Convert class vectors to binary class matrices.", "initiate RMSprop optimizer", "Lets train the model using RMSprop", "Save model and weights", "We use tab as the start sequence character for the targets, and n as end sequence character.", "Decodes an input sequence. Future work should support beam search.", "Generate empty target sequence of length 1.", "Sampling loop for a batch of sequences (to simplify, here we assume a batch of size 1).", "Take one sequence (part of the training set)"},
  819. 1029: {"CNTK does not support dynamic padding.", "Pass the target tensor y_test_batch to compile"},
  820. 1032: {"Currently, CNTK cant instantiate ones with symbolic shapes. Will update workaround once CNTK supports it."},
  821. 1033: {"instantiate VAE model", "instantiate VAE model"},
  822. 1037: {"Default, without OOV flag", "Will update with gather op in next release", "Match the behavior of numpy and Theano by returning an empty sequence."},
  823. 1039: {"Standardize initial_state into list", "Applies the same workaround as in RNN.__call__, without handling constants", "Perform the call with temporarily replaced input_spec"},
  824. 1053: {"define our own softmax function instead of K.softmax because K.softmax can not specify axis.", "define the margin loss like hinge loss", "Test on simple model", "A common Conv2D model", "we use a margin loss", "we can compare the performance with or without data augmentation", "Fit the model on the batches generated by datagen..", "Get metric name as string"},
  825. 1057: {"This layer name will make the layers_name HDF5 attribute blow out of proportion. Note that it fits into the internal HDF5 attribute memory limit on its own but because h5py converts the list of layer names into numpy array, which uses the same amout of memory for every item, it increases the memory requirements substantially.", "This layer name will make the weights_name HDF5 attribute blow out of proportion.", "Check that the HDF5 files contains chunked array of weight names.", "The chunking of layer names array should have happend.", "Check that no item in data is larger than HDF5_OBJECT_HEADER_LIMIT because in that case even chunking the array would not make the saving possible.", "Expecting this to never be true."},
  826. 1058: {"ensure biases are non-zero and properly converted", "A model is needed to initialize weights.", "A model is needed to initialize weights.", "separate biases for input and recurrent kernels Note: the shape is intentionally different from CuDNNGRU biases (2 * 3 * self.units,), so that we can distinguish the classes when loading and converting saved weights.", "NOTE: need to flatten, since slicing in CNTK gives 2D array", "bias for hidden state - just for compatibility with CuDNN", "inputs projected by all gate matrices at once", "biases: bias_z_i, bias_r_i, bias_h_i", "hidden state projected by all gate matrices at once", "hidden state projected separately for update/reset and new", "previous and candidate state mixed by update gate", "convert the weights between CuDNNLSTM and LSTM", "transpose (and reshape) input and recurrent kernels"},
  827. 1062: {"the data, split between train and test sets", "the data, split between train and test sets The data, split between train and test sets:", "the data, split between train and test sets", "The data, split between train and test sets.", "the data, split between train and test sets", "the data, split between train and test sets", "the data, split between train and test sets", "the data, split between train and test sets", "The data, split between train and test sets:"},
  828. 1065: {"The StackedConvRNN2DCells isnt implemented yet.", "If any of initial_state or constants are specified and are Keras tensors, then add them to the inputs and temporarily modify the input_spec to include them.", "at this point additional_inputs cannot be empty", "Perform the call with temporarily replaced input_spec", "note that the . method of subclasses MUST define self.input_spec and self.state_spec with complete input shapes.", "Properly set learning phase", "TODO: consider batch calls to set_value."},
  829. }
  830. )