瀏覽代碼

Merge pull request #10979 from jakesmith/hpcc-19337

HPCC-19337 Add streaming remote (dafilesrv) disk read support

Reviewed-by: Gavin Halliday <ghalliday@hpccsystems.com>
Gavin Halliday 7 年之前
父節點
當前提交
5a00c4e3cc

+ 2 - 0
common/remote/remoteerr.hpp

@@ -54,6 +54,8 @@
 #define RFSERR_AuthenticateFailed               8025
 #define RFSERR_CopySectionFailed                8026
 #define RFSERR_TreeCopyFailed                   8027
+#define RFSERR_StreamReadFailed                 8028
+#define RFSERR_InternalError                    8029
 
 
 #define RAERR_InvalidUsernamePassword           8040

File diff suppressed because it is too large
+ 730 - 419
common/remote/sockfile.cpp


+ 9 - 6
common/remote/sockfile.hpp

@@ -27,7 +27,6 @@
 #define REMOTE_API DECL_IMPORT
 #endif
 
-#define RFEnoerror      0
 
 enum ThrottleClass
 {
@@ -60,7 +59,7 @@ interface IRemoteFileServer : extends IInterface
     virtual StringBuffer &getStats(StringBuffer &stats, bool reset) = 0;
 };
 
-#define FILESRV_VERSION 21 // don't forget VERSTRING in sockfile.cpp
+#define FILESRV_VERSION 22 // don't forget VERSTRING in sockfile.cpp
 
 interface IKeyManager;
 interface IDelayedFile;
@@ -73,12 +72,16 @@ extern REMOTE_API IRemoteFileServer * createRemoteFileServer(unsigned maxThreads
 extern REMOTE_API int setDafsTrace(ISocket * socket,byte flags);
 extern REMOTE_API int setDafsThrottleLimit(ISocket * socket, ThrottleClass throttleClass, unsigned throttleLimit, unsigned throttleDelayMs, unsigned throttleCPULimit, unsigned queueLimit, StringBuffer *errMsg=NULL);
 extern REMOTE_API bool enableDafsAuthentication(bool on);
-extern void remoteExtractBlobElements(const SocketEndpoint &ep, const char * prefix, const char * filename, ExtractedBlobArray & extracted);
-extern int getDafsInfo(ISocket * socket, unsigned level, StringBuffer &retstr);
-extern void setDafsEndpointPort(SocketEndpoint &ep);
-extern void setDafsLocalMountRedirect(const IpAddress &ip,const char *dir,const char *mountdir);
+extern REMOTE_API void remoteExtractBlobElements(const SocketEndpoint &ep, const char * prefix, const char * filename, ExtractedBlobArray & extracted);
+extern REMOTE_API int getDafsInfo(ISocket * socket, unsigned level, StringBuffer &retstr);
+extern REMOTE_API void setDafsEndpointPort(SocketEndpoint &ep);
+extern REMOTE_API void setDafsLocalMountRedirect(const IpAddress &ip,const char *dir,const char *mountdir);
 extern REMOTE_API ISocket *connectDafs(SocketEndpoint &ep, unsigned timeoutms); // NOTE: might alter ep.port if configured for multiple ports ...
 extern REMOTE_API ISocket *checkSocketSecure(ISocket *socket);
+class IOutputMetaData;
+class RowFilter;
+extern REMOTE_API IFileIO *createRemoteFilteredFile(SocketEndpoint &ep, const char * filename, IOutputMetaData *actual, IOutputMetaData *projected, const RowFilter &fieldFilters, bool compressed, bool grouped);
+
 
 // client only
 extern void clientSetDaliServixSocketCaching(bool set);

+ 97 - 90
ecl/hthor/hthor.cpp

@@ -518,7 +518,7 @@ void CHThorDiskWriteActivity::open()
     serializedOutputMeta.set(input->queryOutputMeta()->querySerializedDiskMeta());//returns outputMeta if serialization not needed
 
     Linked<IRecordSize> groupedMeta = input->queryOutputMeta()->querySerializedDiskMeta();
-    if(grouped)
+    if (grouped)
         groupedMeta.setown(createDeltaRecordSize(groupedMeta, +1));
     blockcompressed = checkIsCompressed(helper.getFlags(), serializedOutputMeta.getFixedSize(), grouped);//TDWnewcompress for new compression, else check for row compression
     void *ekey;
@@ -526,7 +526,8 @@ void CHThorDiskWriteActivity::open()
     helper.getEncryptKey(ekeylen,ekey);
     encrypted = false;
     Owned<ICompressor> ecomp;
-    if (ekeylen!=0) {
+    if (ekeylen!=0)
+    {
         ecomp.setown(createAESCompressor256(ekeylen,ekey));
         memset(ekey,0,ekeylen);
         rtlFree(ekey);
@@ -547,9 +548,9 @@ void CHThorDiskWriteActivity::open()
         diskout->seek(0, IFSend);
 
     unsigned rwFlags = rw_autoflush;
-    if(grouped)
+    if (grouped)
         rwFlags |= rw_grouped;
-    if(!(helper.getFlags() & TDRnocrccheck))
+    if (!(helper.getFlags() & TDRnocrccheck))
         rwFlags |= rw_crc;
     IExtRowWriter * writer = createRowWriter(diskout, rowIf, rwFlags);
     outSeq.setown(writer);
@@ -8009,7 +8010,6 @@ void CHThorDiskReadBaseActivity::ready()
     CHThorActivityBase::ready(); 
 
     grouped = false;
-    recordsize = 0;
     fixedDiskRecordSize = 0;
     eofseen = false;
     opened = false;
@@ -8097,12 +8097,12 @@ void CHThorDiskReadBaseActivity::resolve()
 
 void CHThorDiskReadBaseActivity::gatherInfo(IFileDescriptor * fileDesc)
 {
-    if(fileDesc)
+    if (fileDesc)
     {
         if (!agent.queryResolveFilesLocally())
         {
             grouped = fileDesc->isGrouped();
-            if(grouped != ((helper.getFlags() & TDXgrouped) != 0))
+            if (grouped != ((helper.getFlags() & TDXgrouped) != 0))
             {
                 StringBuffer msg;
                 msg.append("DFS and code generated group info. differs: DFS(").append(grouped ? "grouped" : "ungrouped").append("), CodeGen(").append(grouped ? "ungrouped" : "grouped").append("), using DFS info");
@@ -8119,25 +8119,14 @@ void CHThorDiskReadBaseActivity::gatherInfo(IFileDescriptor * fileDesc)
     }
 
     actualDiskMeta.set(helper.queryDiskRecordSize()->querySerializedDiskMeta());
-    if (grouped)
-        actualDiskMeta.setown(new CSuffixedOutputMeta(+1, actualDiskMeta));
-    if (outputMeta.isFixedSize())
-    {
-        recordsize = outputMeta.getFixedSize();
-        if (grouped)
-            recordsize++;
-    }
-    else
-        recordsize = 0;
     calcFixedDiskRecordSize();
-
-    if(fileDesc)
+    if (fileDesc)
     {
         compressed = fileDesc->isCompressed(&blockcompressed); //try new decompression, fall back to old unless marked as block
-        if(fixedDiskRecordSize)
+        if (fixedDiskRecordSize)
         {
             size32_t dfsSize = fileDesc->queryProperties().getPropInt("@recordSize");
-            if(!((dfsSize == 0) || (dfsSize == fixedDiskRecordSize) || (grouped && (dfsSize+1 == fixedDiskRecordSize)))) //third option for backwards compatibility, as hthor used to publish @recordSize not including the grouping byte
+            if (!((dfsSize == 0) || (dfsSize == fixedDiskRecordSize) || (grouped && (dfsSize+1 == fixedDiskRecordSize)))) //third option for backwards compatibility, as hthor used to publish @recordSize not including the grouping byte
                 throw MakeStringException(0, "Published record size %d for file %s does not match coded record size %d", dfsSize, mangledHelperFileName.str(), fixedDiskRecordSize);
             if (!compressed && (((helper.getFlags() & TDXcompress) != 0) && (fixedDiskRecordSize >= MIN_ROWCOMPRESS_RECSIZE)))
             {
@@ -8196,11 +8185,21 @@ void CHThorDiskReadBaseActivity::closepart()
     inputfile.clear();
 }
 
+
+bool CHThorDiskReadBaseActivity::forceRemote(const RemoteFilename &rfn) const
+{
+    StringBuffer localPath;
+    rfn.getLocalPath(localPath);
+    return testForceRemote(localPath);
+}
+
 bool CHThorDiskReadBaseActivity::openNext()
 {
     offsetOfPart += localOffset;
     localOffset = 0;
     saveOpenExc.clear();
+    actualFilter.clear();
+    unsigned projectedCrc = helper.getFormatCrc();
 
     if (dfsParts||ldFile)
     {
@@ -8226,58 +8225,22 @@ bool CHThorDiskReadBaseActivity::openNext()
                 }
             }
 
+            unsigned actualCrc = 0;
             if (dFile)
             {
                 IPropertyTree &props = dFile->queryAttributes();
-                unsigned thisFormatCrc = props.getPropInt("@formatCrc");
-                if (thisFormatCrc != lastFormatCrc)
-                {
-                    translator.clear();
-                    lastFormatCrc = thisFormatCrc;
-                    if (thisFormatCrc != helper.getFormatCrc() && helper.getFormatCrc() && (helper.getFlags() & TDRnocrccheck) == 0)
-                    {
-                        actualDiskMeta.setown(getDaliLayoutInfo(props));
-                        if (grouped)
-                            actualDiskMeta.setown(new CSuffixedOutputMeta(+1, actualDiskMeta));
-                        if (actualDiskMeta)
-                        {
-                            translator.setown(createRecordTranslator(projectedDiskMeta->queryRecordAccessor(true), actualDiskMeta->queryRecordAccessor(true)));
-                            if (translator->needsTranslate())
-                            {
-                                keyedTranslator.setown(createKeyTranslator(actualDiskMeta->queryRecordAccessor(true), expectedDiskMeta->queryRecordAccessor(true)));
-                                if (translator->canTranslate())
-                                {
-                                    if (agent.rltEnabled()==RecordTranslationMode::None)
-                                    {
-    #ifdef _DEBUG
-                                        translator->describe();
-    #endif
-                                        throw MakeStringException(0, "Translatable key layout mismatch reading file %s but translation disabled", logicalFileName.str());
-                                    }
-                                }
-                                else
-                                    throw MakeStringException(0, "Untranslatable key layout mismatch reading file %s", logicalFileName.str());
-                            }
-                            else
-                                translator.clear();  // MORE - could question why the format appeared to mismatch
-                        }
-                        else
-                            throw MakeStringException(0, "Untranslatable key layout mismatch reading file %s - key layout information not found", logicalFileName.str());
-                    }
-                    else
-                    {
-                        actualDiskMeta.set(helper.queryDiskRecordSize()->querySerializedDiskMeta());
-                        if (grouped)
-                            actualDiskMeta.setown(new CSuffixedOutputMeta(+1, actualDiskMeta));
-                    }
-                }
-            }
+                actualDiskMeta.setown(getDaliLayoutInfo(props));
+                actualCrc = props.getPropInt("@formatCrc");
+            }
+            if (!actualDiskMeta)
+                actualDiskMeta.set(expectedDiskMeta->querySerializedDiskMeta());
+            keyedTranslator.setown(createKeyTranslator(actualDiskMeta->queryRecordAccessor(true), expectedDiskMeta->queryRecordAccessor(true)));
+            if (keyedTranslator && keyedTranslator->needsTranslate())
+                keyedTranslator->translate(actualFilter, fieldFilters);
             else
-            {
-                translator.clear();
-                keyedTranslator.clear();
-            }
-            calcFixedDiskRecordSize();
+                actualFilter.appendFilters(fieldFilters);
+
+            bool canSerializeTypeInfo = actualDiskMeta->queryTypeInfo()->canSerialize() && projectedDiskMeta->queryTypeInfo()->canSerialize();
             for (unsigned copy=0; copy < numCopies; copy++)
             {
                 RemoteFilename rfilename;
@@ -8289,22 +8252,42 @@ bool CHThorDiskReadBaseActivity::openNext()
                 filelist.append('\n').append(file);
                 try
                 {
-                    inputfile.setown(createIFile(rfilename));   
-                    if(compressed)
+                    inputfile.setown(createIFile(rfilename));
+
+                    // NB: only binary handles can be remotely processed by dafilesrv at the moment
+                    if ((rt_binary != readType) || !canSerializeTypeInfo || (rfilename.isLocal() && !forceRemote(rfilename)))
                     {
-                        Owned<IExpander> eexp;
-                        if (encryptionkey.length()!=0) 
-                            eexp.setown(createAESExpander256((size32_t)encryptionkey.length(),encryptionkey.bufferBase()));
-                        inputfileio.setown(createCompressedFileReader(inputfile,eexp));
-                        if(!inputfileio && !blockcompressed) //fall back to old decompression, unless dfs marked as new
+                        if (compressed)
                         {
-                            inputfileio.setown(inputfile->open(IFOread));
-                            if(inputfileio)
-                                rowcompressed = true;
+                            Owned<IExpander> eexp;
+                            if (encryptionkey.length()!=0)
+                                eexp.setown(createAESExpander256((size32_t)encryptionkey.length(),encryptionkey.bufferBase()));
+                            inputfileio.setown(createCompressedFileReader(inputfile,eexp));
+                            if(!inputfileio && !blockcompressed) //fall back to old decompression, unless dfs marked as new
+                            {
+                                inputfileio.setown(inputfile->open(IFOread));
+                                if(inputfileio)
+                                    rowcompressed = true;
+                            }
                         }
+                        else
+                            inputfileio.setown(inputfile->open(IFOread));
                     }
                     else
-                        inputfileio.setown(inputfile->open(IFOread));
+                    {
+                        // Open a stream from remote file, having passed actual, expected, projected, and filters to it
+                        SocketEndpoint ep(rfilename.queryEndpoint());
+                        setDafsEndpointPort(ep);
+                        StringBuffer path;
+                        rfilename.getLocalPath(path);
+                        inputfileio.setown(createRemoteFilteredFile(ep, path, actualDiskMeta, projectedDiskMeta, actualFilter, compressed, grouped));
+                        if (inputfileio)
+                        {
+                            actualDiskMeta.set(projectedDiskMeta);
+                            expectedDiskMeta = projectedDiskMeta;
+                            actualFilter.clear();
+                        }
+                    }
                     if (inputfileio)
                         break;
                 }
@@ -8318,6 +8301,29 @@ bool CHThorDiskReadBaseActivity::openNext()
                 closepart();
             }
 
+            if (projectedCrc && actualCrc != projectedCrc)
+                translator.setown(createRecordTranslator(projectedDiskMeta->queryRecordAccessor(true), actualDiskMeta->queryRecordAccessor(true)));
+            if (translator && translator->needsTranslate())
+            {
+                if (translator->canTranslate())
+                {
+                    if (agent.rltEnabled()==RecordTranslationMode::None)
+                    {
+#ifdef _DEBUG
+                        translator->describe();
+#endif
+                        throw MakeStringException(0, "Translatable key layout mismatch reading file %s but translation disabled", logicalFileName.str());
+                    }
+                }
+                else
+                    throw MakeStringException(0, "Untranslatable key layout mismatch reading file %s", logicalFileName.str());
+            }
+            else
+            {
+                translator.clear();
+                keyedTranslator.clear();
+            }
+            calcFixedDiskRecordSize();
             if (dfsParts)
                 dfsParts->next();
             partNum++;
@@ -8405,7 +8411,7 @@ bool CHThorDiskReadBaseActivity::checkOpenedFile(char const * filename, char con
     saveOpenExc.clear();
     if (filesize)
     {
-        if (!compressed && fixedDiskRecordSize && (filesize % fixedDiskRecordSize) != 0)
+        if (!compressed && fixedDiskRecordSize && ((offset_t)-1 != filesize) && (filesize % fixedDiskRecordSize) != 0)
         {
             StringBuffer s;
             s.append("File ").append(filename).append(" size is ").append(filesize).append(" which is not a multiple of ").append(fixedDiskRecordSize);
@@ -8442,14 +8448,17 @@ CHThorBinaryDiskReadBase::CHThorBinaryDiskReadBase(IAgentContext &_agent, unsign
 : CHThorDiskReadBaseActivity(_agent, _activityId, _subgraphId, _arg, _kind),
   segHelper(_segHelper), prefetchBuffer(NULL)
 {
+    readType = rt_binary;
 }
 
 void CHThorBinaryDiskReadBase::calcFixedDiskRecordSize()
 {
     fixedDiskRecordSize = actualDiskMeta->getFixedSize();
+    if (fixedDiskRecordSize && grouped)
+        fixedDiskRecordSize += 1;
 }
 
-void CHThorBinaryDiskReadBase::append(FFoption option, IFieldFilter * filter)
+void CHThorBinaryDiskReadBase::append(FFoption option, const IFieldFilter * filter)
 {
     if (filter->isWild())
         filter->Release();
@@ -8475,12 +8484,6 @@ bool CHThorBinaryDiskReadBase::openNext()
             PROGLOG("Disk read falling back to legacy decompression routine");
             //in.setown(createRowCompReadSeq(*inputfileiostream, 0, fixedDiskRecordSize));
         }
-        actualFilter.clear();
-        if (keyedTranslator)
-            keyedTranslator->translate(actualFilter, fieldFilters);
-        else
-            actualFilter.appendFilters(fieldFilters);
-
         //Only one of these will actually be used.
         prefetcher.setown(actualDiskMeta->createDiskPrefetcher());
         deserializer.setown(actualDiskMeta->createDiskDeserializer(agent.queryCodeContext(), activityId));
@@ -8569,7 +8572,6 @@ const void *CHThorDiskReadActivity::nextRow()
                     prefetcher->readAhead(prefetchBuffer);
                     const byte * next = prefetchBuffer.queryRow();
                     size32_t sizeRead = prefetchBuffer.queryRowSize();
-                    bool eog = grouped && next[sizeRead-1];
                     size32_t thisSize;
                     if (segMonitorsMatch(next)) // NOTE - keyed fields are checked pre-translation
                     {
@@ -8584,6 +8586,9 @@ const void *CHThorDiskReadActivity::nextRow()
                     }
                     else
                         thisSize = 0;
+                    bool eog = false;
+                    if (grouped)
+                        prefetchBuffer.read(sizeof(eog), &eog);
 
                     prefetchBuffer.finishedRow();
 
@@ -8592,7 +8597,7 @@ const void *CHThorDiskReadActivity::nextRow()
                     {
                         if (grouped)
                             eogPending = eog;
-                        if ((processed - initialProcessed) >=limit)
+                        if ((processed - initialProcessed) >= limit)
                         {
                             outBuilder.clear();
                             if ( agent.queryCodeContext()->queryDebugContext())
@@ -9000,6 +9005,7 @@ const void *CHThorDiskGroupAggregateActivity::nextRow()
 CHThorCsvReadActivity::CHThorCsvReadActivity(IAgentContext &_agent, unsigned _activityId, unsigned _subgraphId, IHThorCsvReadArg &_arg, ThorActivityKind _kind) : CHThorDiskReadBaseActivity(_agent, _activityId, _subgraphId, _arg, _kind), helper(_arg)
 {
     maxRowSize = agent.queryWorkUnit()->getDebugValueInt(OPT_MAXCSVROWSIZE, defaultMaxCsvRowSize) * 1024 * 1024;
+    readType = rt_csv;
 }
 
 CHThorCsvReadActivity::~CHThorCsvReadActivity()
@@ -9139,6 +9145,7 @@ void CHThorCsvReadActivity::checkOpenNext()
 
 CHThorXmlReadActivity::CHThorXmlReadActivity(IAgentContext &_agent, unsigned _activityId, unsigned _subgraphId, IHThorXmlReadArg &_arg, ThorActivityKind _kind) : CHThorDiskReadBaseActivity(_agent, _activityId, _subgraphId, _arg, _kind), helper(_arg)
 {
+    readType = rt_xml;
 }
 
 void CHThorXmlReadActivity::ready()

+ 6 - 5
ecl/hthor/hthor.ipp

@@ -2239,7 +2239,6 @@ protected:
     Owned<IDistributedFilePartIterator> dfsParts;
     Owned<ILocalOrDistributedFile> ldFile;
     Owned<IException> saveOpenExc;
-    size32_t recordsize;
     size32_t fixedDiskRecordSize;
     Owned<IOutputMetaData> actualDiskMeta;
     IOutputMetaData *expectedDiskMeta;
@@ -2253,7 +2252,8 @@ protected:
     MemoryAttr encryptionkey;
     bool persistent;
     bool grouped;
-    unsigned lastFormatCrc = 0;
+    enum ReadType:byte { rt_unknown, rt_binary, rt_csv, rt_xml } readType = rt_unknown;
+
     unsigned __int64 localOffset;
     unsigned __int64 offsetOfPart;
     StringBuffer mangledHelperFileName;
@@ -2263,6 +2263,8 @@ protected:
     Owned<const IDynamicTransform> translator;
     Owned<const IKeyTranslator> keyedTranslator;
     IPointerArrayOf<IOutputMetaData> actualLayouts;  // Do we need to keep more than one?
+    IConstArrayOf<IFieldFilter> fieldFilters;  // These refer to the expected layout
+    RowFilter actualFilter;               // This refers to the actual disk layout
     void close();
     virtual void open();
     void resolve();
@@ -2279,6 +2281,7 @@ protected:
     {
         agent.reportProgress(NULL);
     }
+    bool forceRemote(const RemoteFilename &rfn) const;
 
 public:
     CHThorDiskReadBaseActivity(IAgentContext &agent, unsigned _activityId, unsigned _subgraphId, IHThorDiskReadBaseArg &_arg, ThorActivityKind _kind);
@@ -2303,8 +2306,6 @@ public:
 class CHThorBinaryDiskReadBase : public CHThorDiskReadBaseActivity, implements IIndexReadContext
 {
 protected:
-    IConstArrayOf<IFieldFilter> fieldFilters;  // These refer to the expected layout
-    RowFilter actualFilter;               // This refers to the actual disk layout
     IHThorCompoundBaseArg & segHelper;
     Owned<ISourceRowPrefetcher> prefetcher;
     Owned<IOutputRowDeserializer> deserializer;
@@ -2319,7 +2320,7 @@ public:
     virtual void append(IKeySegmentMonitor *segment) override { throwUnexpected(); }
     virtual unsigned ordinality() const override { throwUnexpected(); }
     virtual IKeySegmentMonitor *item(unsigned idx) const override { throwUnexpected();  }
-    virtual void append(FFoption option, IFieldFilter * filter) override;
+    virtual void append(FFoption option, const IFieldFilter * filter) override;
 
 protected:
     virtual void verifyRecordFormatCrc() { ::verifyFormatCrcSuper(helper.getFormatCrc(), ldFile?ldFile->queryDistributedFile():NULL, false, true); }

+ 1 - 1
roxie/ccd/ccdactivities.cpp

@@ -928,7 +928,7 @@ public:
         throwUnexpected();
     }
 
-    virtual void append(FFoption option, IFieldFilter * filter)
+    virtual void append(FFoption option, const IFieldFilter * filter)
     {
         if (filter->isWild())
             filter->Release();

+ 1 - 1
roxie/ccd/ccdserver.cpp

@@ -21640,7 +21640,7 @@ public:
         throwUnexpected();
     }
 
-    virtual void append(FFoption option, IFieldFilter * filter)
+    virtual void append(FFoption option, const IFieldFilter * filter)
     {
         if (filter->isWild())
             filter->Release();

+ 85 - 76
rtl/eclrtl/eclhelper_dyn.cpp

@@ -41,8 +41,8 @@ class CDeserializedOutputMetaData : public COutputMetaData
 {
 public:
     CDeserializedOutputMetaData(MemoryBuffer &binInfo, bool isGrouped, IThorIndexCallback *callback);
-    CDeserializedOutputMetaData(IPropertyTree &jsonInfo, IThorIndexCallback *callback);
-    CDeserializedOutputMetaData(const char *json, IThorIndexCallback *callback);
+    CDeserializedOutputMetaData(IPropertyTree &jsonInfo, bool isGrouped, IThorIndexCallback *callback);
+    CDeserializedOutputMetaData(const char *json, bool isGrouped, IThorIndexCallback *callback);
 
     virtual const RtlTypeInfo * queryTypeInfo() const override { return typeInfo; }
     virtual unsigned getMetaFlags() override { return flags; }
@@ -61,16 +61,20 @@ CDeserializedOutputMetaData::CDeserializedOutputMetaData(MemoryBuffer &binInfo,
         flags |= MDFgrouped;
 }
 
-CDeserializedOutputMetaData::CDeserializedOutputMetaData(IPropertyTree &jsonInfo, IThorIndexCallback *callback)
+CDeserializedOutputMetaData::CDeserializedOutputMetaData(IPropertyTree &jsonInfo, bool isGrouped, IThorIndexCallback *callback)
 {
     deserializer.setown(createRtlFieldTypeDeserializer(callback));
     typeInfo = deserializer->deserialize(jsonInfo);
+    if (isGrouped)
+        flags |= MDFgrouped;
 }
 
-CDeserializedOutputMetaData::CDeserializedOutputMetaData(const char *json, IThorIndexCallback *callback)
+CDeserializedOutputMetaData::CDeserializedOutputMetaData(const char *json, bool isGrouped, IThorIndexCallback *callback)
 {
     deserializer.setown(createRtlFieldTypeDeserializer(callback));
     typeInfo = deserializer->deserialize(json);
+    if (isGrouped)
+        flags |= MDFgrouped;
 }
 
 extern ECLRTL_API IOutputMetaData *createTypeInfoOutputMetaData(MemoryBuffer &binInfo, bool isGrouped, IThorIndexCallback *callback)
@@ -78,14 +82,14 @@ extern ECLRTL_API IOutputMetaData *createTypeInfoOutputMetaData(MemoryBuffer &bi
     return new CDeserializedOutputMetaData(binInfo, isGrouped, callback);
 }
 
-extern ECLRTL_API IOutputMetaData *createTypeInfoOutputMetaData(IPropertyTree &jsonInfo, IThorIndexCallback *callback)
+extern ECLRTL_API IOutputMetaData *createTypeInfoOutputMetaData(IPropertyTree &jsonInfo, bool isGrouped, IThorIndexCallback *callback)
 {
-    return new CDeserializedOutputMetaData(jsonInfo, callback);
+    return new CDeserializedOutputMetaData(jsonInfo, isGrouped, callback);
 }
 
-extern ECLRTL_API IOutputMetaData *createTypeInfoOutputMetaData(const char *json, IThorIndexCallback *callback)
+extern ECLRTL_API IOutputMetaData *createTypeInfoOutputMetaData(const char *json, bool isGrouped, IThorIndexCallback *callback)
 {
-    return new CDeserializedOutputMetaData(json, callback);
+    return new CDeserializedOutputMetaData(json, isGrouped, callback);
 }
 //---------------------------------------------------------------------------------------------------------------------
 
@@ -99,10 +103,74 @@ static int compareOffsets(const unsigned *a, const unsigned *b)
         return 1;
 }
 
-class FilterSet
+class ECLRTL_API CDynamicDiskReadArg : public CThorDiskReadArg
+{
+public:
+    CDynamicDiskReadArg(const char *_fileName, IOutputMetaData *_in, IOutputMetaData *_out, unsigned __int64 _chooseN, unsigned __int64 _skipN, unsigned __int64 _rowLimit)
+        : fileName(_fileName), in(_in), out(_out), chooseN(_chooseN), skipN(_skipN), rowLimit(_rowLimit)
+    {
+        translator.setown(createRecordTranslator(out->queryRecordAccessor(true), in->queryRecordAccessor(true)));
+    }
+    virtual bool needTransform() override
+    {
+        return true;
+    }
+    virtual unsigned getFlags() override
+    {
+        return flags;
+    }
+    virtual void createSegmentMonitors(IIndexReadContext *irc) override
+    {
+        filters.createSegmentMonitors(irc);
+    }
+
+    virtual IOutputMetaData * queryOutputMeta() override
+    {
+        return out;
+    }
+    virtual const char * getFileName() override final
+    {
+        return fileName;
+    }
+    virtual IOutputMetaData * queryDiskRecordSize() override final
+    {
+        return in;
+    }
+    virtual IOutputMetaData * queryProjectedDiskRecordSize() override final
+    {
+        return in;
+    }
+    virtual unsigned getFormatCrc() override
+    {
+        return 0;  // engines should treat 0 as 'ignore'
+    }
+    virtual size32_t transform(ARowBuilder & rowBuilder, const void * src) override
+    {
+        return translator->translate(rowBuilder, (const byte *) src);
+    }
+    virtual unsigned __int64 getChooseNLimit() { return chooseN; }
+    virtual unsigned __int64 getRowLimit() { return rowLimit; }
+    void addFilter(const char *filter)
+    {
+        filters.addFilter(in->queryRecordAccessor(true), filter);
+        flags |= TDRkeyed;
+    }
+private:
+    StringAttr fileName;
+    unsigned flags = 0;
+    Owned<IOutputMetaData> in;
+    Owned<IOutputMetaData> out;
+    Owned<const IDynamicTransform> translator;
+    RowFilter filters;
+    unsigned __int64 chooseN = I64C(0x7fffffffffffffff); // constant(s) should be commoned up somewhere
+    unsigned __int64 skipN = 0;
+    unsigned __int64 rowLimit = (unsigned __int64) -1;
+};
+
+class LegacyFilterSet
 {
 public:
-    FilterSet(const RtlRecord &_inrec) : inrec(_inrec)
+    LegacyFilterSet(const RtlRecord &_inrec) : inrec(_inrec)
     {
 
     }
@@ -132,7 +200,12 @@ public:
         if (!epos)
             throw MakeStringException(0, "Invalid filter string: expected = or ~ after fieldname");
         StringBuffer fieldName(epos-filter, filter);
-        unsigned fieldNum = inrec.getFieldNum(fieldName);
+
+        unsigned fieldNum;
+        if (isdigit(fieldName[0]))
+            fieldNum = atoi(fieldName);
+        else
+            fieldNum = inrec.getFieldNum(fieldName);
         if (fieldNum == (unsigned) -1)
             throw MakeStringException(0, "Invalid filter string: field '%s' not recognized", fieldName.str());
         unsigned numOffsets = inrec.getNumVarFields() + 1;
@@ -203,70 +276,6 @@ protected:
     const RtlRecord &inrec;
 };
 
-class ECLRTL_API CDynamicDiskReadArg : public CThorDiskReadArg
-{
-public:
-    CDynamicDiskReadArg(const char *_fileName, IOutputMetaData *_in, IOutputMetaData *_out, unsigned __int64 _chooseN, unsigned __int64 _skipN, unsigned __int64 _rowLimit)
-        : fileName(_fileName), in(_in), out(_out), chooseN(_chooseN), skipN(_skipN), rowLimit(_rowLimit), filters(in->queryRecordAccessor(true))
-    {
-        translator.setown(createRecordTranslator(out->queryRecordAccessor(true), in->queryRecordAccessor(true)));
-    }
-    virtual bool needTransform() override
-    {
-        return true;
-    }
-    virtual unsigned getFlags() override
-    {
-        return flags;
-    }
-    virtual void createSegmentMonitors(IIndexReadContext *irc) override
-    {
-        filters.createSegmentMonitors(irc);
-    }
-
-    virtual IOutputMetaData * queryOutputMeta() override
-    {
-        return out;
-    }
-    virtual const char * getFileName() override final
-    {
-        return fileName;
-    }
-    virtual IOutputMetaData * queryDiskRecordSize() override final
-    {
-        return in;
-    }
-    virtual IOutputMetaData * queryProjectedDiskRecordSize() override final
-    {
-        return in;
-    }
-    virtual unsigned getFormatCrc() override
-    {
-        return 0;  // engines should treat 0 as 'ignore'
-    }
-    virtual size32_t transform(ARowBuilder & rowBuilder, const void * src) override
-    {
-        return translator->translate(rowBuilder, (const byte *) src);
-    }
-    virtual unsigned __int64 getChooseNLimit() { return chooseN; }
-    virtual unsigned __int64 getRowLimit() { return rowLimit; }
-    void addFilter(const char *filter)
-    {
-        filters.addFilter(filter);
-        flags |= TDRkeyed;
-    }
-private:
-    StringAttr fileName;
-    unsigned flags = 0;
-    Owned<IOutputMetaData> in;
-    Owned<IOutputMetaData> out;
-    Owned<const IDynamicTransform> translator;
-    FilterSet filters;
-    unsigned __int64 chooseN = I64C(0x7fffffffffffffff); // constant(s) should be commoned up somewhere
-    unsigned __int64 skipN = 0;
-    unsigned __int64 rowLimit = (unsigned __int64) -1;
-};
-
 class ECLRTL_API CDynamicIndexReadArg : public CThorIndexReadArg, implements IDynamicIndexReadArg
 {
 public:
@@ -330,7 +339,7 @@ private:
     Owned<IOutputMetaData> in;
     Owned<IOutputMetaData> out;
     Owned<const IDynamicTransform> translator;
-    FilterSet filters;
+    LegacyFilterSet filters;
     unsigned __int64 chooseN = I64C(0x7fffffffffffffff); // constant(s) should be commoned up somewhere
     unsigned __int64 skipN = 0;
     unsigned __int64 rowLimit = (unsigned __int64) -1;

+ 2 - 2
rtl/eclrtl/eclhelper_dyn.hpp

@@ -23,8 +23,8 @@
 #include "eclhelper.hpp"
 
 extern ECLRTL_API IOutputMetaData *createTypeInfoOutputMetaData(MemoryBuffer &mb, bool isGroupedPersist, IThorIndexCallback *callback);
-extern ECLRTL_API IOutputMetaData *createTypeInfoOutputMetaData(IPropertyTree &jsonTree, IThorIndexCallback *callback);
-extern ECLRTL_API IOutputMetaData *createTypeInfoOutputMetaData(const char *json, IThorIndexCallback *callback);
+extern ECLRTL_API IOutputMetaData *createTypeInfoOutputMetaData(IPropertyTree &jsonTree, bool isGroupedPersist, IThorIndexCallback *callback);
+extern ECLRTL_API IOutputMetaData *createTypeInfoOutputMetaData(const char *json, bool isGroupedPersist, IThorIndexCallback *callback);
 
 interface IDynamicIndexReadArg
 {

+ 5 - 3
rtl/eclrtl/rtlds.cpp

@@ -1851,9 +1851,10 @@ bool RtlSimpleIterator::next()
 
 byte * MemoryBufferBuilder::ensureCapacity(size32_t required, const char * fieldName)
 {
+    dbgassertex(buffer);
     if (required > reserved)
     {
-        void * next = buffer.reserve(required-reserved);
+        void * next = buffer->reserve(required-reserved);
         self = (byte *)next - reserved;
         reserved = required;
     }
@@ -1862,9 +1863,10 @@ byte * MemoryBufferBuilder::ensureCapacity(size32_t required, const char * field
 
 void MemoryBufferBuilder::finishRow(size32_t length)
 {
+    dbgassertex(buffer);
     assertex(length <= reserved);
-    size32_t newLength = (buffer.length() - reserved) + length;
-    buffer.setLength(newLength);
+    size32_t newLength = (buffer->length() - reserved) + length;
+    buffer->setLength(newLength);
     self = NULL;
     reserved = 0;
 }

+ 13 - 5
rtl/eclrtl/rtlds_imp.hpp

@@ -669,9 +669,17 @@ class ECLRTL_API MemoryBufferBuilder : public RtlRowBuilderBase
 {
 public:
     MemoryBufferBuilder(MemoryBuffer & _buffer, unsigned _minSize)
-        : buffer(_buffer), minSize(_minSize)
+        : buffer(&_buffer), minSize(_minSize)
     {
-        reserved = 0;
+    }
+
+    MemoryBufferBuilder(unsigned _minSize) : minSize(_minSize)
+    {
+    }
+
+    void setBuffer(MemoryBuffer &_buffer)
+    {
+        buffer = &_buffer;
     }
 
     virtual byte * ensureCapacity(size32_t required, const char * fieldName);
@@ -695,9 +703,9 @@ protected:
     }
 
 protected:
-    MemoryBuffer & buffer;
-    size32_t minSize;
-    size32_t reserved;
+    MemoryBuffer * buffer = nullptr;
+    size32_t minSize = 0;
+    size32_t reserved = 0;
 };
 
 class ECLRTL_API CHThorDictHelper : public IHash, public ICompare

+ 9 - 6
rtl/eclrtl/rtldynfield.cpp

@@ -242,7 +242,6 @@ private:
         {
             StringBuffer filterText;
             filter->serialize(filterText);
-            addProp("filterField", filter->queryFieldIndex());
             addPropType("filterType", &filter->queryType());
             addProp("filter", filterText);
         }
@@ -440,7 +439,6 @@ private:
         const IFieldFilter * filter = type->queryFilter();
         if (filter)
         {
-            out.appendPacked(filter->queryFieldIndex());
             out.appendPacked(queryTypeIdx(&filter->queryType()));
             filter->serialize(out);
         }
@@ -764,9 +762,14 @@ private:
         }
         if (baseType == type_ifblock)
         {
-            unsigned fieldId = type->getPropInt("filterField");
+            //Filter field needs to be deserialized and the type resolved separately outside the deserialize call
+            //because there isn't a RtlTypeInfo available to resolve the field (since we are currently deserializing it!)
+            const char * filterText = type->queryProp("filter");
+            StringBuffer fieldIdText;
+            readFieldFromFieldFilter(fieldIdText, filterText);
+            unsigned fieldId = atoi(fieldIdText);
             const RtlTypeInfo * fieldType = lookupType(type->queryProp("filterType"), all);
-            info.filter = deserializeFieldFilter(fieldId, *fieldType, type->queryProp("filter"));
+            info.filter = deserializeFieldFilter(fieldId, *fieldType, filterText);
         }
 
         const RtlTypeInfo * result = info.createRtlTypeInfo(callback);
@@ -796,11 +799,11 @@ private:
         unsigned baseType = (info.fieldType & RFTMkind);
         if (baseType == type_ifblock)
         {
-            unsigned fieldId;
-            type.readPacked(fieldId);
             unsigned childIdx;
             type.readPacked(childIdx);
             const RtlTypeInfo * fieldType = lookupType(childIdx);
+            unsigned fieldId;
+            type.readPacked(fieldId);
             info.filter = deserializeFieldFilter(fieldId, *fieldType, type);
         }
 

+ 4 - 2
rtl/eclrtl/rtlkey.hpp

@@ -77,6 +77,7 @@ ECLRTL_API int memcmplittleunsigned(const void *l, const void *r, unsigned size)
 ECLRTL_API int memcmplittlesigned(const void *l, const void *r, unsigned size);
 
 class RtlRow;
+class RtlRecord;
 
 interface IKeySegmentMonitor : public IInterface
 {
@@ -142,7 +143,7 @@ public:
     virtual void append(IKeySegmentMonitor *segment) = 0;
     virtual unsigned ordinality() const = 0;
     virtual IKeySegmentMonitor *item(unsigned idx) const = 0;
-    virtual void append(FFoption option, IFieldFilter * filter) = 0;
+    virtual void append(FFoption option, const IFieldFilter * filter) = 0;
 };
 
 ECLRTL_API IStringSet *createRtlStringSet(size32_t size);
@@ -321,8 +322,9 @@ extern ECLRTL_API IFieldFilter * createWildFieldFilter(unsigned fieldId, const R
 extern ECLRTL_API IFieldFilter * createSubStringFieldFilter(unsigned fieldId, size32_t subLength, IValueSet * values);
 
 extern ECLRTL_API IFieldFilter * deserializeFieldFilter(unsigned fieldId, const RtlTypeInfo & type, const char * src);
+extern ECLRTL_API IFieldFilter * deserializeFieldFilter(const RtlRecord & record, const char * src);
 extern ECLRTL_API IFieldFilter * deserializeFieldFilter(unsigned fieldId, const RtlTypeInfo & type, MemoryBuffer & in);
-
+extern ECLRTL_API void readFieldFromFieldFilter(StringBuffer & fieldText, const char * & src);
 
 
 #endif

+ 39 - 12
rtl/eclrtl/rtlnewkey.cpp

@@ -76,6 +76,11 @@ static void readUntilTerminator(StringBuffer & out, const char * & in, const cha
 }
 
 
+void readFieldFromFieldFilter(StringBuffer & fieldText, const char * & src)
+{
+    readUntilTerminator(fieldText, src, "=*:");
+}
+
 void deserializeSet(ISetCreator & creator, const char * filter)
 {
     while (*filter)
@@ -1225,12 +1230,12 @@ unsigned SetFieldFilter::queryScore() const
 
 StringBuffer & SetFieldFilter::serialize(StringBuffer & out) const
 {
-    out.append('=');
+    out.append(field).append('=');
     return values->serialize(out);
 }
 MemoryBuffer & SetFieldFilter::serialize(MemoryBuffer & out) const
 {
-    out.append('=');
+    out.appendPacked(field).append('=');
     return values->serialize(out);
 }
 
@@ -1287,13 +1292,13 @@ protected:
 
 StringBuffer & SubStringFieldFilter::serialize(StringBuffer & out) const
 {
-    out.append(':').append(subLength).append("=");
+    out.append(field).append(':').append(subLength).append("=");
     return values->serialize(out);
 }
 
 MemoryBuffer & SubStringFieldFilter::serialize(MemoryBuffer & out) const
 {
-    out.append(':').append(subLength);
+    out.appendPacked(field).append(':').append(subLength);
     return values->serialize(out);
 }
 
@@ -1510,12 +1515,12 @@ public:
 
     virtual StringBuffer & serialize(StringBuffer & out) const override
     {
-        return out.append('*');
+        return out.append(field).append('*');
     }
 
     virtual MemoryBuffer & serialize(MemoryBuffer & out) const override
     {
-        return out.append('*');
+        return out.appendPacked(field).append('*');
     }
 };
 
@@ -1569,6 +1574,18 @@ IFieldFilter * deserializeFieldFilter(unsigned fieldId, const RtlTypeInfo & type
     UNIMPLEMENTED_X("Unknown Field Filter");
 }
 
+IFieldFilter * deserializeFieldFilter(const RtlRecord & record, const char * src)
+{
+    StringBuffer fieldText;
+    readFieldFromFieldFilter(fieldText, src);
+    unsigned fieldNum;
+    if (isdigit(fieldText.str()[0]))
+        fieldNum = atoi(fieldText.str());
+    else
+        fieldNum = record.getFieldNum(fieldText);
+    return deserializeFieldFilter(fieldNum, *record.queryType(fieldNum), src);
+}
+
 IFieldFilter * deserializeFieldFilter(unsigned fieldId, const RtlTypeInfo & type, MemoryBuffer & in)
 {
     char kind;
@@ -1625,7 +1642,15 @@ static int compareFieldFilters(IInterface * const * left, IInterface * const * r
 
 void RowFilter::addFilter(const IFieldFilter & filter)
 {
-    //assertex(filter.queryField() == filters.ordinality()); //MORE - fill with wild filters and replace existing wild
+    filters.append(filter);
+    unsigned fieldNum = filter.queryFieldIndex();
+    if (fieldNum >= numFieldsRequired)
+        numFieldsRequired = fieldNum+1;
+}
+
+void RowFilter::addFilter(const RtlRecord & record, const char * filterText)
+{
+    IFieldFilter & filter = *deserializeFieldFilter(record, filterText);
     filters.append(filter);
     unsigned fieldNum = filter.queryFieldIndex();
     if (fieldNum >= numFieldsRequired)
@@ -1651,6 +1676,12 @@ void RowFilter::appendFilters(IConstArrayOf<IFieldFilter> & _filters)
     }
 }
 
+void RowFilter::createSegmentMonitors(IIndexReadContext *irc)
+{
+    ForEachItemIn(i, filters)
+        irc->append(FFkeyed, LINK(&filters.item(i)));
+}
+
 void RowFilter::extractKeyFilter(const RtlRecord & record, IConstArrayOf<IFieldFilter> & keyFilters) const
 {
     if (!filters)
@@ -1754,7 +1785,6 @@ void RowFilter::remapField(unsigned filterIdx, unsigned newFieldNum)
     filters.replace(*filters.item(filterIdx).remap(newFieldNum), filterIdx);
 }
 
-
 //---------------------------------------------------------------------------------------------------------------------
 
 bool RowCursor::setRowForward(const byte * row)
@@ -2730,15 +2760,12 @@ protected:
     {
         StringBuffer str1, str2;
         filter->serialize(str1);
-        Owned<IFieldFilter> clone1 = deserializeFieldFilter(filter->queryFieldIndex(), filter->queryType(), str1);
+        Owned<IFieldFilter> clone1 = deserializeFieldFilter(searchRecord, str1);
         clone1->serialize(str2);
 
         MemoryBuffer mem1, mem2;
-        //Should this be part of the serialize?  There are issues with serializing conditions for ifblocks if it is.
-        mem1.appendPacked(filter->queryFieldIndex());
         filter->serialize(mem1);
         Owned<IFieldFilter> clone2 = deserializeFieldFilter(searchRecord, mem1);
-        mem2.appendPacked(filter->queryFieldIndex());
         clone2->serialize(mem2);
 
         if (!streq(str1, str2))

+ 3 - 0
rtl/eclrtl/rtlnewkey.hpp

@@ -28,12 +28,15 @@ BITMASK_ENUM(TransitionMask);
  * The RowFilter class represents a multiple-field filter of a row.
  */
 
+//MORE: This should probably have an RtlRecord member.
 class ECLRTL_API RowFilter
 {
 public:
     void addFilter(const IFieldFilter & filter);
+    void addFilter(const RtlRecord & record, const char * filter);
     bool matches(const RtlRow & row) const;
 
+    void createSegmentMonitors(IIndexReadContext *irc);
     void extractKeyFilter(const RtlRecord & record, IConstArrayOf<IFieldFilter> & keyFilters) const;
     void extractMemKeyFilter(const RtlRecord & record, const UnsignedArray &sortOrder, IConstArrayOf<IFieldFilter> & keyFilters) const;
     unsigned numFilterFields() const { return filters.ordinality(); }

+ 1 - 1
rtl/eclrtl/rtlrecord.cpp

@@ -720,7 +720,7 @@ void RtlRow::lazyCalcOffsets(unsigned _numFieldsUsed) const
     assert(row);
     if (_numFieldsUsed > numFieldsUsed)
     {
-        info.calcRowOffsets(variableOffsets, row, _numFieldsUsed); // MORE - could be optimized t oonly calc ones not previously calculated
+        info.calcRowOffsets(variableOffsets, row, _numFieldsUsed); // MORE - could be optimized to only calc ones not previously calculated
         numFieldsUsed = _numFieldsUsed;
     }
 }

+ 4 - 4
system/jhtree/jhtree.cpp

@@ -302,7 +302,7 @@ void SegMonitorList::append(IKeySegmentMonitor *segment)
     segMonitors.append(*segment);
 }
 
-void SegMonitorList::append(FFoption option, IFieldFilter * filter)
+void SegMonitorList::append(FFoption option, const IFieldFilter * filter)
 {
     UNIMPLEMENTED;
 }
@@ -603,7 +603,7 @@ public:
     }
 
 
-    virtual void append(FFoption option, IFieldFilter * filter)
+    virtual void append(FFoption option, const IFieldFilter * filter)
     {
         UNIMPLEMENTED;
     }
@@ -2988,7 +2988,7 @@ class IKeyManagerTest : public CppUnit::TestFixture
                                " { \"name\": \"f1\", \"type\": \"ty1\", \"flags\": 4 }, "
                                " { \"name\": \"f2\", \"type\": \"ty2\", \"flags\": 4 } ] "
                                "}";
-            Owned<IOutputMetaData> meta = createTypeInfoOutputMetaData(json, nullptr);
+            Owned<IOutputMetaData> meta = createTypeInfoOutputMetaData(json, false, nullptr);
             Owned <IKeyManager> tlk1 = createKeyMerger(meta->queryRecordAccessor(true), keyset, 7, NULL);
             Owned<IStringSet> sset1 = createStringSet(7);
             sset1->addRange("0000003", "0000003");
@@ -3180,7 +3180,7 @@ protected:
                 " { \"name\": \"f1\", \"type\": \"ty1\", \"flags\": 4 }, "
                 " ] "
                 "}";
-        Owned<IOutputMetaData> meta = createTypeInfoOutputMetaData(json, nullptr);
+        Owned<IOutputMetaData> meta = createTypeInfoOutputMetaData(json, false, nullptr);
         const RtlRecord &recInfo = meta->queryRecordAccessor(true);
         buildTestKeys(variable);
         {

+ 1 - 1
system/jhtree/jhtree.hpp

@@ -200,7 +200,7 @@ public:
     virtual void append(IKeySegmentMonitor *segment) override;
     virtual unsigned ordinality() const override;
     virtual IKeySegmentMonitor *item(unsigned i) const override;
-    virtual void append(FFoption option, IFieldFilter * filter) override;
+    virtual void append(FFoption option, const IFieldFilter * filter) override;
 };
 
 interface IKeyManager : public IInterface, extends IIndexReadContext

+ 160 - 147
testing/regress/ecl/key/serializeifblocks.xml

@@ -7,9 +7,8 @@
  &quot;ty2&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=[4]&quot;,
+  &quot;filter&quot;: &quot;0=[4]&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -42,9 +41,8 @@
  &quot;ty2&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=[4]&quot;,
+  &quot;filter&quot;: &quot;0=[4]&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -72,7 +70,10 @@
  <Row><Result_3>true</Result_3></Row>
 </Dataset>
 <Dataset name='Result 4'>
- <Row><Result_4>{
+ <Row><Result_4>true</Result_4></Row>
+</Dataset>
+<Dataset name='Result 5'>
+ <Row><Result_5>{
  &quot;ty1&quot;: {
   &quot;fieldType&quot;: 257,
   &quot;length&quot;: 8
@@ -80,9 +81,8 @@
  &quot;ty2&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=(4,10),[200,250]&quot;,
+  &quot;filter&quot;: &quot;0=(4,10),[200,250]&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -104,10 +104,10 @@
    &quot;flags&quot;: 1067
   }
  ]
-}</Result_4></Row>
+}</Result_5></Row>
 </Dataset>
-<Dataset name='Result 5'>
- <Row><Result_5>{
+<Dataset name='Result 6'>
+ <Row><Result_6>{
  &quot;ty1&quot;: {
   &quot;fieldType&quot;: 257,
   &quot;length&quot;: 8
@@ -115,9 +115,8 @@
  &quot;ty2&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=(4,10),[200,250]&quot;,
+  &quot;filter&quot;: &quot;0=(4,10),[200,250]&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -139,13 +138,16 @@
    &quot;flags&quot;: 1067
   }
  ]
-}</Result_5></Row>
-</Dataset>
-<Dataset name='Result 6'>
- <Row><Result_6>true</Result_6></Row>
+}</Result_6></Row>
 </Dataset>
 <Dataset name='Result 7'>
- <Row><Result_7>{
+ <Row><Result_7>true</Result_7></Row>
+</Dataset>
+<Dataset name='Result 8'>
+ <Row><Result_8>true</Result_8></Row>
+</Dataset>
+<Dataset name='Result 9'>
+ <Row><Result_9>{
  &quot;ty1&quot;: {
   &quot;fieldType&quot;: 257,
   &quot;length&quot;: 8
@@ -153,9 +155,8 @@
  &quot;ty2&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=[200,1000)&quot;,
+  &quot;filter&quot;: &quot;0=[200,1000)&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -177,10 +178,10 @@
    &quot;flags&quot;: 1067
   }
  ]
-}</Result_7></Row>
+}</Result_9></Row>
 </Dataset>
-<Dataset name='Result 8'>
- <Row><Result_8>{
+<Dataset name='Result 10'>
+ <Row><Result_10>{
  &quot;ty1&quot;: {
   &quot;fieldType&quot;: 257,
   &quot;length&quot;: 8
@@ -188,9 +189,8 @@
  &quot;ty2&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=[200,1000)&quot;,
+  &quot;filter&quot;: &quot;0=[200,1000)&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -212,13 +212,16 @@
    &quot;flags&quot;: 1067
   }
  ]
-}</Result_8></Row>
+}</Result_10></Row>
 </Dataset>
-<Dataset name='Result 9'>
- <Row><Result_9>true</Result_9></Row>
+<Dataset name='Result 11'>
+ <Row><Result_11>true</Result_11></Row>
 </Dataset>
-<Dataset name='Result 10'>
- <Row><Result_10>{
+<Dataset name='Result 12'>
+ <Row><Result_12>true</Result_12></Row>
+</Dataset>
+<Dataset name='Result 13'>
+ <Row><Result_13>{
  &quot;ty1&quot;: {
   &quot;fieldType&quot;: 257,
   &quot;length&quot;: 8
@@ -226,9 +229,8 @@
  &quot;ty2&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=[1],[2],[3],[99],[1000]&quot;,
+  &quot;filter&quot;: &quot;0=[1],[2],[3],[99],[1000]&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -250,10 +252,10 @@
    &quot;flags&quot;: 1067
   }
  ]
-}</Result_10></Row>
+}</Result_13></Row>
 </Dataset>
-<Dataset name='Result 11'>
- <Row><Result_11>{
+<Dataset name='Result 14'>
+ <Row><Result_14>{
  &quot;ty1&quot;: {
   &quot;fieldType&quot;: 257,
   &quot;length&quot;: 8
@@ -261,9 +263,8 @@
  &quot;ty2&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=[1],[2],[3],[99],[1000]&quot;,
+  &quot;filter&quot;: &quot;0=[1],[2],[3],[99],[1000]&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -285,13 +286,16 @@
    &quot;flags&quot;: 1067
   }
  ]
-}</Result_11></Row>
+}</Result_14></Row>
 </Dataset>
-<Dataset name='Result 12'>
- <Row><Result_12>true</Result_12></Row>
+<Dataset name='Result 15'>
+ <Row><Result_15>true</Result_15></Row>
 </Dataset>
-<Dataset name='Result 13'>
- <Row><Result_13>{
+<Dataset name='Result 16'>
+ <Row><Result_16>true</Result_16></Row>
+</Dataset>
+<Dataset name='Result 17'>
+ <Row><Result_17>{
  &quot;ty1&quot;: {
   &quot;fieldType&quot;: 4,
   &quot;length&quot;: 2
@@ -303,9 +307,8 @@
  &quot;ty3&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=[&apos;a &apos;],[&apos;b &apos;],[&apos;c &apos;],[&apos;f &apos;,&apos;k &apos;),[&apos;xx&apos;]&quot;,
+  &quot;filter&quot;: &quot;0=[&apos;a &apos;],[&apos;b &apos;],[&apos;c &apos;],[&apos;f &apos;,&apos;k &apos;),[&apos;xx&apos;]&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -327,10 +330,10 @@
    &quot;flags&quot;: 1067
   }
  ]
-}</Result_13></Row>
+}</Result_17></Row>
 </Dataset>
-<Dataset name='Result 14'>
- <Row><Result_14>{
+<Dataset name='Result 18'>
+ <Row><Result_18>{
  &quot;ty1&quot;: {
   &quot;fieldType&quot;: 4,
   &quot;length&quot;: 2
@@ -342,9 +345,8 @@
  &quot;ty3&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=[&apos;a &apos;],[&apos;b &apos;],[&apos;c &apos;],[&apos;f &apos;,&apos;k &apos;),[&apos;xx&apos;]&quot;,
+  &quot;filter&quot;: &quot;0=[&apos;a &apos;],[&apos;b &apos;],[&apos;c &apos;],[&apos;f &apos;,&apos;k &apos;),[&apos;xx&apos;]&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -366,13 +368,16 @@
    &quot;flags&quot;: 1067
   }
  ]
-}</Result_14></Row>
+}</Result_18></Row>
 </Dataset>
-<Dataset name='Result 15'>
- <Row><Result_15>true</Result_15></Row>
+<Dataset name='Result 19'>
+ <Row><Result_19>true</Result_19></Row>
 </Dataset>
-<Dataset name='Result 16'>
- <Row><Result_16>{
+<Dataset name='Result 20'>
+ <Row><Result_20>true</Result_20></Row>
+</Dataset>
+<Dataset name='Result 21'>
+ <Row><Result_21>{
  &quot;ty1&quot;: {
   &quot;fieldType&quot;: 1028,
   &quot;length&quot;: 0
@@ -384,9 +389,8 @@
  &quot;ty3&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=[&apos;\\&apos;&apos;],[&apos;a&apos;],[&apos;b &apos;],[&apos;bxx&apos;],[&apos;faa&apos;,&apos;kaa&apos;],(&apos;xxa&apos;,&apos;xyz&apos;)&quot;,
+  &quot;filter&quot;: &quot;0=[&apos;\\&apos;&apos;],[&apos;a&apos;],[&apos;b &apos;],[&apos;bxx&apos;],[&apos;faa&apos;,&apos;kaa&apos;],(&apos;xxa&apos;,&apos;xyz&apos;)&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -408,10 +412,10 @@
    &quot;flags&quot;: 1067
   }
  ]
-}</Result_16></Row>
+}</Result_21></Row>
 </Dataset>
-<Dataset name='Result 17'>
- <Row><Result_17>{
+<Dataset name='Result 22'>
+ <Row><Result_22>{
  &quot;ty1&quot;: {
   &quot;fieldType&quot;: 1028,
   &quot;length&quot;: 0
@@ -423,9 +427,8 @@
  &quot;ty3&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=[&apos;\\&apos;&apos;],[&apos;a&apos;],[&apos;b &apos;],[&apos;bxx&apos;],[&apos;faa&apos;,&apos;kaa&apos;],(&apos;xxa&apos;,&apos;xyz&apos;)&quot;,
+  &quot;filter&quot;: &quot;0=[&apos;\\&apos;&apos;],[&apos;a&apos;],[&apos;b &apos;],[&apos;bxx&apos;],[&apos;faa&apos;,&apos;kaa&apos;],(&apos;xxa&apos;,&apos;xyz&apos;)&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -447,13 +450,16 @@
    &quot;flags&quot;: 1067
   }
  ]
-}</Result_17></Row>
+}</Result_22></Row>
 </Dataset>
-<Dataset name='Result 18'>
- <Row><Result_18>true</Result_18></Row>
+<Dataset name='Result 23'>
+ <Row><Result_23>true</Result_23></Row>
 </Dataset>
-<Dataset name='Result 19'>
- <Row><Result_19>{
+<Dataset name='Result 24'>
+ <Row><Result_24>true</Result_24></Row>
+</Dataset>
+<Dataset name='Result 25'>
+ <Row><Result_25>{
  &quot;ty1&quot;: {
   &quot;fieldType&quot;: 4,
   &quot;length&quot;: 2
@@ -465,9 +471,8 @@
  &quot;ty3&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=[&apos;a &apos;],[&apos;b &apos;],(&apos;fa&apos;,&apos;ka&apos;],(&apos;xx&apos;,&apos;xy&apos;]&quot;,
+  &quot;filter&quot;: &quot;0=[&apos;a &apos;],[&apos;b &apos;],(&apos;fa&apos;,&apos;ka&apos;],(&apos;xx&apos;,&apos;xy&apos;]&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -489,10 +494,10 @@
    &quot;flags&quot;: 1067
   }
  ]
-}</Result_19></Row>
+}</Result_25></Row>
 </Dataset>
-<Dataset name='Result 20'>
- <Row><Result_20>{
+<Dataset name='Result 26'>
+ <Row><Result_26>{
  &quot;ty1&quot;: {
   &quot;fieldType&quot;: 4,
   &quot;length&quot;: 2
@@ -504,9 +509,8 @@
  &quot;ty3&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=[&apos;a &apos;],[&apos;b &apos;],(&apos;fa&apos;,&apos;ka&apos;],(&apos;xx&apos;,&apos;xy&apos;]&quot;,
+  &quot;filter&quot;: &quot;0=[&apos;a &apos;],[&apos;b &apos;],(&apos;fa&apos;,&apos;ka&apos;],(&apos;xx&apos;,&apos;xy&apos;]&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -528,13 +532,16 @@
    &quot;flags&quot;: 1067
   }
  ]
-}</Result_20></Row>
+}</Result_26></Row>
 </Dataset>
-<Dataset name='Result 21'>
- <Row><Result_21>true</Result_21></Row>
+<Dataset name='Result 27'>
+ <Row><Result_27>true</Result_27></Row>
 </Dataset>
-<Dataset name='Result 22'>
- <Row><Result_22>{
+<Dataset name='Result 28'>
+ <Row><Result_28>true</Result_28></Row>
+</Dataset>
+<Dataset name='Result 29'>
+ <Row><Result_29>{
  &quot;ty1&quot;: {
   &quot;fieldType&quot;: 4,
   &quot;length&quot;: 2
@@ -546,9 +553,8 @@
  &quot;ty3&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=(&apos;fa&apos;,&apos;ka&apos;]&quot;,
+  &quot;filter&quot;: &quot;0=(&apos;fa&apos;,&apos;ka&apos;]&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -570,10 +576,10 @@
    &quot;flags&quot;: 1067
   }
  ]
-}</Result_22></Row>
+}</Result_29></Row>
 </Dataset>
-<Dataset name='Result 23'>
- <Row><Result_23>{
+<Dataset name='Result 30'>
+ <Row><Result_30>{
  &quot;ty1&quot;: {
   &quot;fieldType&quot;: 4,
   &quot;length&quot;: 2
@@ -585,9 +591,8 @@
  &quot;ty3&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=(&apos;fa&apos;,&apos;ka&apos;]&quot;,
+  &quot;filter&quot;: &quot;0=(&apos;fa&apos;,&apos;ka&apos;]&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -609,13 +614,16 @@
    &quot;flags&quot;: 1067
   }
  ]
-}</Result_23></Row>
+}</Result_30></Row>
 </Dataset>
-<Dataset name='Result 24'>
- <Row><Result_24>true</Result_24></Row>
+<Dataset name='Result 31'>
+ <Row><Result_31>true</Result_31></Row>
 </Dataset>
-<Dataset name='Result 25'>
- <Row><Result_25>{
+<Dataset name='Result 32'>
+ <Row><Result_32>true</Result_32></Row>
+</Dataset>
+<Dataset name='Result 33'>
+ <Row><Result_33>{
  &quot;ty1&quot;: {
   &quot;fieldType&quot;: 4,
   &quot;length&quot;: 2
@@ -627,9 +635,8 @@
  &quot;ty3&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=[&apos;a &apos;],[&apos;b &apos;]&quot;,
+  &quot;filter&quot;: &quot;0=[&apos;a &apos;],[&apos;b &apos;]&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -651,10 +658,10 @@
    &quot;flags&quot;: 1067
   }
  ]
-}</Result_25></Row>
+}</Result_33></Row>
 </Dataset>
-<Dataset name='Result 26'>
- <Row><Result_26>{
+<Dataset name='Result 34'>
+ <Row><Result_34>{
  &quot;ty1&quot;: {
   &quot;fieldType&quot;: 4,
   &quot;length&quot;: 2
@@ -666,9 +673,8 @@
  &quot;ty3&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=[&apos;a &apos;],[&apos;b &apos;]&quot;,
+  &quot;filter&quot;: &quot;0=[&apos;a &apos;],[&apos;b &apos;]&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -690,13 +696,16 @@
    &quot;flags&quot;: 1067
   }
  ]
-}</Result_26></Row>
+}</Result_34></Row>
 </Dataset>
-<Dataset name='Result 27'>
- <Row><Result_27>true</Result_27></Row>
+<Dataset name='Result 35'>
+ <Row><Result_35>true</Result_35></Row>
 </Dataset>
-<Dataset name='Result 28'>
- <Row><Result_28>{
+<Dataset name='Result 36'>
+ <Row><Result_36>true</Result_36></Row>
+</Dataset>
+<Dataset name='Result 37'>
+ <Row><Result_37>{
  &quot;ty1&quot;: {
   &quot;fieldType&quot;: 4,
   &quot;length&quot;: 2
@@ -708,9 +717,8 @@
  &quot;ty3&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=(&apos;ax&apos;,&apos;bx&apos;],(&apos;dx&apos;,&apos;ex&apos;]&quot;,
+  &quot;filter&quot;: &quot;0=(&apos;ax&apos;,&apos;bx&apos;],(&apos;dx&apos;,&apos;ex&apos;]&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -732,10 +740,10 @@
    &quot;flags&quot;: 1067
   }
  ]
-}</Result_28></Row>
+}</Result_37></Row>
 </Dataset>
-<Dataset name='Result 29'>
- <Row><Result_29>{
+<Dataset name='Result 38'>
+ <Row><Result_38>{
  &quot;ty1&quot;: {
   &quot;fieldType&quot;: 4,
   &quot;length&quot;: 2
@@ -747,9 +755,8 @@
  &quot;ty3&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=(&apos;ax&apos;,&apos;bx&apos;],(&apos;dx&apos;,&apos;ex&apos;]&quot;,
+  &quot;filter&quot;: &quot;0=(&apos;ax&apos;,&apos;bx&apos;],(&apos;dx&apos;,&apos;ex&apos;]&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -771,13 +778,16 @@
    &quot;flags&quot;: 1067
   }
  ]
-}</Result_29></Row>
+}</Result_38></Row>
 </Dataset>
-<Dataset name='Result 30'>
- <Row><Result_30>true</Result_30></Row>
+<Dataset name='Result 39'>
+ <Row><Result_39>true</Result_39></Row>
 </Dataset>
-<Dataset name='Result 31'>
- <Row><Result_31>{
+<Dataset name='Result 40'>
+ <Row><Result_40>true</Result_40></Row>
+</Dataset>
+<Dataset name='Result 41'>
+ <Row><Result_41>{
  &quot;ty1&quot;: {
   &quot;fieldType&quot;: 4,
   &quot;length&quot;: 2
@@ -789,9 +799,8 @@
  &quot;ty3&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=[&apos;a &apos;],[&apos;b &apos;]&quot;,
+  &quot;filter&quot;: &quot;0=[&apos;a &apos;],[&apos;b &apos;]&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -813,10 +822,10 @@
    &quot;flags&quot;: 1067
   }
  ]
-}</Result_31></Row>
+}</Result_41></Row>
 </Dataset>
-<Dataset name='Result 32'>
- <Row><Result_32>{
+<Dataset name='Result 42'>
+ <Row><Result_42>{
  &quot;ty1&quot;: {
   &quot;fieldType&quot;: 4,
   &quot;length&quot;: 2
@@ -828,9 +837,8 @@
  &quot;ty3&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=[&apos;a &apos;],[&apos;b &apos;]&quot;,
+  &quot;filter&quot;: &quot;0=[&apos;a &apos;],[&apos;b &apos;]&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -852,13 +860,16 @@
    &quot;flags&quot;: 1067
   }
  ]
-}</Result_32></Row>
+}</Result_42></Row>
 </Dataset>
-<Dataset name='Result 33'>
- <Row><Result_33>true</Result_33></Row>
+<Dataset name='Result 43'>
+ <Row><Result_43>true</Result_43></Row>
 </Dataset>
-<Dataset name='Result 34'>
- <Row><Result_34>{
+<Dataset name='Result 44'>
+ <Row><Result_44>true</Result_44></Row>
+</Dataset>
+<Dataset name='Result 45'>
+ <Row><Result_45>{
  &quot;ty1&quot;: {
   &quot;fieldType&quot;: 257,
   &quot;length&quot;: 8
@@ -866,9 +877,8 @@
  &quot;ty2&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=[4]&quot;,
+  &quot;filter&quot;: &quot;0=[4]&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -890,10 +900,10 @@
    &quot;flags&quot;: 1067
   }
  ]
-}</Result_34></Row>
+}</Result_45></Row>
 </Dataset>
-<Dataset name='Result 35'>
- <Row><Result_35>{
+<Dataset name='Result 46'>
+ <Row><Result_46>{
  &quot;ty1&quot;: {
   &quot;fieldType&quot;: 257,
   &quot;length&quot;: 8
@@ -901,9 +911,8 @@
  &quot;ty2&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=[4]&quot;,
+  &quot;filter&quot;: &quot;0=[4]&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -925,13 +934,16 @@
    &quot;flags&quot;: 1067
   }
  ]
-}</Result_35></Row>
+}</Result_46></Row>
 </Dataset>
-<Dataset name='Result 36'>
- <Row><Result_36>true</Result_36></Row>
+<Dataset name='Result 47'>
+ <Row><Result_47>true</Result_47></Row>
 </Dataset>
-<Dataset name='Result 37'>
- <Row><Result_37>{
+<Dataset name='Result 48'>
+ <Row><Result_48>true</Result_48></Row>
+</Dataset>
+<Dataset name='Result 49'>
+ <Row><Result_49>{
  &quot;ty1&quot;: {
   &quot;fieldType&quot;: 257,
   &quot;length&quot;: 8
@@ -939,9 +951,8 @@
  &quot;ty2&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=(4,10),[200,250]&quot;,
+  &quot;filter&quot;: &quot;0=(4,10),[200,250]&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -963,10 +974,10 @@
    &quot;flags&quot;: 1067
   }
  ]
-}</Result_37></Row>
+}</Result_49></Row>
 </Dataset>
-<Dataset name='Result 38'>
- <Row><Result_38>{
+<Dataset name='Result 50'>
+ <Row><Result_50>{
  &quot;ty1&quot;: {
   &quot;fieldType&quot;: 257,
   &quot;length&quot;: 8
@@ -974,9 +985,8 @@
  &quot;ty2&quot;: {
   &quot;fieldType&quot;: 1067,
   &quot;length&quot;: 0,
-  &quot;filterField&quot;: 0,
   &quot;filterType&quot;: &quot;ty1&quot;,
-  &quot;filter&quot;: &quot;=(4,10),[200,250]&quot;,
+  &quot;filter&quot;: &quot;0=(4,10),[200,250]&quot;,
   &quot;fields&quot;: [
    {
     &quot;name&quot;: &quot;extra&quot;,
@@ -998,8 +1008,11 @@
    &quot;flags&quot;: 1067
   }
  ]
-}</Result_38></Row>
+}</Result_50></Row>
 </Dataset>
-<Dataset name='Result 39'>
- <Row><Result_39>true</Result_39></Row>
+<Dataset name='Result 51'>
+ <Row><Result_51>true</Result_51></Row>
+</Dataset>
+<Dataset name='Result 52'>
+ <Row><Result_52>true</Result_52></Row>
 </Dataset>

File diff suppressed because it is too large
+ 8 - 14
testing/regress/ecl/key/serializetypes.xml


+ 1 - 0
testing/regress/ecl/serializeifblocks.ecl

@@ -146,6 +146,7 @@ TEST(REC) := MACRO
 s.dumpRecordType(_empty_(REC)[1]);
 s.dumpRecordTypeNF(_empty_(REC)[1]);
 s.dumpRecordType(_empty_(REC)[1]) = s.dumpRecordTypeNF(_empty_(REC)[1]);
+s.serializeRecordType(_empty_(REC)[1]) = s.serializeRecordTypeNF(_empty_(REC)[1]);
 ENDMACRO;
 
 TEST(r1);

+ 1 - 1
thorlcr/activities/diskread/thdiskreadslave.cpp

@@ -93,7 +93,7 @@ public:
         }
     }
 
-    virtual void append(FFoption option, IFieldFilter * filter)
+    virtual void append(FFoption option, const IFieldFilter * filter)
     {
         if (filter->isWild())
             filter->Release();

+ 2 - 2
tools/testsocket/testsocket.cpp

@@ -358,7 +358,7 @@ int readResults(ISocket * socket, bool readBlocked, bool useHTTP, StringBuffer &
                 throw MakeStringException(0, "Unknown output format: %s", outputFmtStr);
             unsigned cursorHandle;
             if (responseTree)
-                cursorHandle = responseTree->getPropInt("cursor");
+                cursorHandle = responseTree->getPropInt("handle");
             else
                 mb.read(cursorHandle);
             bool retrySend = false;
@@ -398,7 +398,7 @@ int readResults(ISocket * socket, bool readBlocked, bool useHTTP, StringBuffer &
                     cursorHandle = NotFound; // fake that it's a handle dafilesrv doesn't know about
 
                 Owned<IPropertyTree> requestTree = createPTree();
-                requestTree->setPropInt("cursor", cursorHandle);
+                requestTree->setPropInt("handle", cursorHandle);
 
                 // Only the handle is needed for continuation, but this tests the behaviour of some clients which may send cursor per request (e.g. to refresh)
                 if (remoteStreamSendCursor)