Przeglądaj źródła

Merge remote-tracking branch 'origin/closedown-5.0.x'

Signed-off-by: Richard Chapman <rchapman@hpccsystems.com>
Richard Chapman 11 lat temu
rodzic
commit
42ca965ffb
49 zmienionych plików z 2975 dodań i 464 usunięć
  1. 1 1
      common/thorhelper/roxiehelper.cpp
  2. 1 1
      common/thorhelper/roxiehelper.hpp
  3. 16 7
      common/workunit/workunit.cpp
  4. 1 1
      common/workunit/workunit.hpp
  5. 29 29
      dali/base/dadfs.cpp
  6. 2 2
      dali/base/dadfs.hpp
  7. 4 0
      docs/Installing_and_RunningTheHPCCPlatform/Installing_and_RunningTheHPCCPlatform.xml
  8. 3 1
      ecl/eclccserver/eclccserver.cpp
  9. 19 11
      ecl/hql/hqlerror.cpp
  10. 4 4
      ecl/hql/hqlerror.hpp
  11. 8 3
      ecl/hql/hqlgram2.cpp
  12. 11 10
      esp/build.sh
  13. 18 18
      esp/eclwatch/ws_XSLT/dfu.xslt
  14. 3 0
      esp/files/req_array.js
  15. 7 4
      esp/scm/ws_dfu.ecm
  16. 1 0
      esp/scm/ws_workunits.ecm
  17. 77 94
      esp/services/ws_dfu/ws_dfuService.cpp
  18. 1 1
      esp/services/ws_dfu/ws_dfuService.hpp
  19. 1 1
      esp/services/ws_ecl/ws_ecl_service.cpp
  20. 2 0
      esp/services/ws_fs/CMakeLists.txt
  21. 15 1
      esp/services/ws_fs/ws_fsBinding.cpp
  22. 38 34
      esp/services/ws_fs/ws_fsService.cpp
  23. 4 4
      esp/services/ws_workunits/ws_workunitsHelpers.cpp
  24. 11 3
      esp/services/ws_workunits/ws_workunitsHelpers.hpp
  25. 41 5
      esp/services/ws_workunits/ws_workunitsQuerySets.cpp
  26. 203 189
      esp/services/ws_workunits/ws_workunitsService.cpp
  27. 1 0
      esp/services/ws_workunits/ws_workunitsService.hpp
  28. 1 1
      esp/src/eclwatch/QuerySetSuperFilesWidget.js
  29. 2 1
      esp/src/eclwatch/dojoConfig.js
  30. 3 1
      esp/src/eclwatch/nls/bs/hpcc.js
  31. 12 8
      esp/src/eclwatch/nls/es/hpcc.js
  32. 1 1
      esp/src/eclwatch/nls/hpcc.js
  33. 3 1
      esp/src/eclwatch/nls/hr/hpcc.js
  34. 9 7
      esp/src/eclwatch/nls/hu/hpcc.js
  35. 3 1
      esp/src/eclwatch/nls/sr/hpcc.js
  36. 3 1
      esp/src/eclwatch/nls/zh/hpcc.js
  37. 2 0
      esp/src/eclwatch/package.js
  38. 2 2
      esp/src/eclwatch/templates/DFUQueryWidget.html
  39. 5 2
      esp/xslt/wsecl3_form.xsl
  40. 22 1
      initfiles/bash/etc/init.d/hpcc-init.in
  41. 1 1
      roxie/ccd/ccdlistener.cpp
  42. 1 1
      system/jlib/jfile.cpp
  43. 9 8
      system/jlib/jptree.cpp
  44. 1 0
      system/jlib/jptree.hpp
  45. 1 1
      system/jlib/jthread.cpp
  46. 1 2
      testing/regress/ecl-test
  47. 2361 0
      testing/regress/hpcc/util/argparse.py
  48. 7 0
      testing/regress/hpcc/util/ecl/command.py
  49. 3 0
      testing/regress/hpcc/util/ecl/file.py

+ 1 - 1
common/thorhelper/roxiehelper.cpp

@@ -1102,7 +1102,7 @@ void FlushingJsonBuffer::encodeXML(const char *x, unsigned flags, unsigned len,
     appendJSONStringValue(s, NULL, len, x, true);
 }
 
-void FlushingJsonBuffer::startDataset(const char *elementName, const char *resultName, unsigned sequence, bool _extend)
+void FlushingJsonBuffer::startDataset(const char *elementName, const char *resultName, unsigned sequence, bool _extend, const IProperties *xmlns)
 {
     CriticalBlock b(crit);
     extend = _extend;

+ 1 - 1
common/thorhelper/roxiehelper.hpp

@@ -174,7 +174,7 @@ public:
     }
 
     void encodeXML(const char *x, unsigned flags=0, unsigned len=(unsigned)-1, bool utf8=false);
-    void startDataset(const char *elementName, const char *resultName, unsigned sequence, bool _extend = false);
+    void startDataset(const char *elementName, const char *resultName, unsigned sequence, bool _extend = false, const IProperties *xmlns=NULL);
     void startScalar(const char *resultName, unsigned sequence);
 };
 

+ 16 - 7
common/workunit/workunit.cpp

@@ -2450,12 +2450,13 @@ public:
     }
 
     IConstQuerySetQueryIterator* getQuerySetQueriesSorted( WUQuerySortField *sortorder, // list of fields to sort by (terminated by WUSFterm)
-                                                WUQuerySortField *filters,   // NULL or list of fields to folteron (terminated by WUSFterm)
+                                                WUQuerySortField *filters,   // NULL or list of fields to filter on (terminated by WUSFterm)
                                                 const void *filterbuf,  // (appended) string values for filters
                                                 unsigned startoffset,
                                                 unsigned maxnum,
                                                 __int64 *cachehint,
-                                                unsigned *total)
+                                                unsigned *total,
+                                                const MapStringTo<bool> *_subset)
     {
         struct PostFilters
         {
@@ -2475,6 +2476,7 @@ public:
             StringAttr sortOrder;
             PostFilters postFilters;
             StringArray unknownAttributes;
+            const MapStringTo<bool> *subset;
 
             void populateQueryTree(IPropertyTree* queryRegistry, const char* querySetId, IPropertyTree* querySetTree, const char *xPath, IPropertyTree* queryTree)
             {
@@ -2487,6 +2489,12 @@ public:
                     const char* queryId = query.queryProp("@id");
                     if (queryId && *queryId)
                     {
+                        if (subset)
+                        {
+                            VStringBuffer match("%s/%s", querySetId, queryId);
+                            if (!subset->getValue(match))
+                                continue;
+                        }
                         VStringBuffer xPath("Alias[@id='%s']", queryId);
                         IPropertyTree *alias = queryRegistry->queryPropTree(xPath.str());
                         if (alias)
@@ -2538,8 +2546,8 @@ public:
         public:
             IMPLEMENT_IINTERFACE_USING(CSimpleInterface);
 
-            CQuerySetQueriesPager(const char* _querySet, const char* _xPath, const char *_sortOrder, PostFilters& _postFilters, StringArray& _unknownAttributes)
-                : querySet(_querySet), xPath(_xPath), sortOrder(_sortOrder)
+            CQuerySetQueriesPager(const char* _querySet, const char* _xPath, const char *_sortOrder, PostFilters& _postFilters, StringArray& _unknownAttributes, const MapStringTo<bool> *_subset)
+                : querySet(_querySet), xPath(_xPath), sortOrder(_sortOrder), subset(_subset)
             {
                 postFilters.activatedFilter = _postFilters.activatedFilter;
                 postFilters.suspendedByUserFilter = _postFilters.suspendedByUserFilter;
@@ -2611,7 +2619,7 @@ public:
             }
         }
         IArrayOf<IPropertyTree> results;
-        Owned<IElementsPager> elementsPager = new CQuerySetQueriesPager(querySet.get(), xPath.str(), so.length()?so.str():NULL, postFilters, unknownAttributes);
+        Owned<IElementsPager> elementsPager = new CQuerySetQueriesPager(querySet.get(), xPath.str(), so.length()?so.str():NULL, postFilters, unknownAttributes, _subset);
         Owned<IRemoteConnection> conn=getElementsPaged(elementsPager,startoffset,maxnum,NULL,"",cachehint,results,total);
         return new CConstQuerySetQueryIterator(results);
     }
@@ -2887,9 +2895,10 @@ public:
                                                 unsigned startoffset,
                                                 unsigned maxnum,
                                                 __int64 *cachehint,
-                                                unsigned *total)
+                                                unsigned *total,
+                                                const MapStringTo<bool> *subset)
     {
-        return factory->getQuerySetQueriesSorted(sortorder,filters,filterbuf,startoffset,maxnum,cachehint,total);
+        return factory->getQuerySetQueriesSorted(sortorder,filters,filterbuf,startoffset,maxnum,cachehint,total, subset);
     }
 
     virtual unsigned numWorkUnits()

+ 1 - 1
common/workunit/workunit.hpp

@@ -1186,7 +1186,7 @@ interface IWorkUnitFactory : extends IInterface
     virtual unsigned numWorkUnitsFiltered(WUSortField * filters, const void * filterbuf) = 0;
     virtual void descheduleAllWorkUnits() = 0;
     virtual bool deleteWorkUnitEx(const char * wuid) = 0;
-    virtual IConstQuerySetQueryIterator * getQuerySetQueriesSorted(WUQuerySortField *sortorder, WUQuerySortField *filters, const void *filterbuf, unsigned startoffset, unsigned maxnum, __int64 *cachehint, unsigned *total) = 0;
+    virtual IConstQuerySetQueryIterator * getQuerySetQueriesSorted(WUQuerySortField *sortorder, WUQuerySortField *filters, const void *filterbuf, unsigned startoffset, unsigned maxnum, __int64 *cachehint, unsigned *total, const MapStringTo<bool> *subset) = 0;
 };
 
 

+ 29 - 29
dali/base/dadfs.cpp

@@ -11741,34 +11741,34 @@ IPropertyTreeIterator *deserializeFileAttrIterator(MemoryBuffer& mb, DFUQResultF
     class CFileAttrIterator: public CInterface, implements IPropertyTreeIterator
     {
         Owned<IPropertyTree> cur;
-        StringArray fileClusterGroups;
+        StringArray fileNodeGroups;
 
-        void setFileCluster(IPropertyTree *attr, const char* group, StringArray& clusterFilter)
+        void setFileNodeGroup(IPropertyTree *attr, const char* group, StringArray& nodeGroupFilter)
         {
             if (!group || !*group)
                 return;
 
             //The group may contain multiple clusters and some of them may match with the clusterFilter.
-            if (clusterFilter.length() == 1)
-                attr->setProp(getDFUQResultFieldName(DFUQRFcluster), clusterFilter.item(0));//Filter has been handled on server side.
+            if (nodeGroupFilter.length() == 1)
+                attr->setProp(getDFUQResultFieldName(DFUQRFnodegroup), nodeGroupFilter.item(0));//Filter has been handled on server side.
             else
             {
-                StringArray clusters;
-                clusters.appendListUniq(group, ",");
-                ForEachItemIn(i,clusters)
+                StringArray groups;
+                groups.appendListUniq(group, ",");
+                ForEachItemIn(i,groups)
                 {
-                    //Add a cluster if no cluster filter or the cluster matchs with cluster filter
-                    const char* cluster = clusters.item(i);
-                    if (cluster && *cluster && ((!clusterFilter.length()) || (clusterFilter.find(cluster) != NotFound)))
-                        fileClusterGroups.append(cluster);
+                    //Add a group if no group filter or the group matches with group filter
+                    const char* node = groups.item(i);
+                    if (node && *node && ((!nodeGroupFilter.length()) || (nodeGroupFilter.find(node) != NotFound)))
+                        fileNodeGroups.append(node);
                 }
-                if (fileClusterGroups.length())
+                if (fileNodeGroups.length())
                 {
-                    //if this file exists on multiple clusters, set one of the clusters as the "@DFUSFcluster" prop for
-                    //this attr, leaving the rest inside the fileClusterGroups array. Those clusters will be used by the
-                    //duplicateFileAttrOnOtherClusterGroup() to duplicate this file attr on other clusters.
-                    attr->setProp(getDFUQResultFieldName(DFUQRFcluster), fileClusterGroups.item(fileClusterGroups.length() -1));
-                    fileClusterGroups.pop();
+                    //if this file exists on multiple groups, set one of the groups as the "@DFUSFnodegroup" prop for
+                    //this attr, leaving the rest inside the fileNodeGroups array. Those groups will be used by the
+                    //duplicateFileAttrOnOtherNodeGroup() to duplicate this file attr on other groups.
+                    attr->setProp(getDFUQResultFieldName(DFUQRFnodegroup), fileNodeGroups.item(fileNodeGroups.length() -1));
+                    fileNodeGroups.pop();
                 }
             }
         }
@@ -11791,7 +11791,7 @@ IPropertyTreeIterator *deserializeFileAttrIterator(MemoryBuffer& mb, DFUQResultF
             return;
         }
 
-        IPropertyTree *deserializeFileAttr(MemoryBuffer &mb, StringArray& clusterFilter)
+        IPropertyTree *deserializeFileAttr(MemoryBuffer &mb, StringArray& nodeGroupFilter)
         {
             IPropertyTree *attr = getEmptyAttr();
             StringAttr val;
@@ -11823,22 +11823,22 @@ IPropertyTreeIterator *deserializeFileAttrIterator(MemoryBuffer& mb, DFUQResultF
                 mb.read(at);
                 mb.read(val);
                 attr->setProp(at.get(),val.get());
-                if (strieq(at.get(), getDFUQResultFieldName(DFUQRFgroup)))
-                    setFileCluster(attr, val.get(), clusterFilter);
+                if (strieq(at.get(), getDFUQResultFieldName(DFUQRFnodegroups)))
+                    setFileNodeGroup(attr, val.get(), nodeGroupFilter);
             }
             attr->setPropInt64(getDFUQResultFieldName(DFUQRFsize), attr->getPropInt64(getDFUQResultFieldName(DFUQRForigsize), -1));//Sort the files with empty size to front
             setRecordCount(attr);
             return attr;
         }
 
-        IPropertyTree *duplicateFileAttrOnOtherClusterGroup(IPropertyTree *previousAttr)
+        IPropertyTree *duplicateFileAttrOnOtherNodeGroup(IPropertyTree *previousAttr)
         {
             IPropertyTree *attr = getEmptyAttr();
             Owned<IAttributeIterator> ai = previousAttr->getAttributes();
             ForEach(*ai)
                 attr->setProp(ai->queryName(),ai->queryValue());
-            attr->setProp(getDFUQResultFieldName(DFUQRFcluster), fileClusterGroups.item(fileClusterGroups.length()-1));
-            fileClusterGroups.pop();
+            attr->setProp(getDFUQResultFieldName(DFUQRFnodegroup), fileNodeGroups.item(fileNodeGroups.length()-1));
+            fileNodeGroups.pop();
             return attr;
         }
 
@@ -11846,7 +11846,7 @@ IPropertyTreeIterator *deserializeFileAttrIterator(MemoryBuffer& mb, DFUQResultF
         IMPLEMENT_IINTERFACE;
         MemoryBuffer mb;
         unsigned numfiles;
-        StringArray clusterFilter;
+        StringArray nodeGroupFilter;
 
         bool first()
         {
@@ -11858,9 +11858,9 @@ IPropertyTreeIterator *deserializeFileAttrIterator(MemoryBuffer& mb, DFUQResultF
 
         bool next()
         {
-            if (fileClusterGroups.length())
+            if (fileNodeGroups.length())
             {
-                IPropertyTree *attr = duplicateFileAttrOnOtherClusterGroup(cur);
+                IPropertyTree *attr = duplicateFileAttrOnOtherNodeGroup(cur);
                 cur.clear();
                 cur.setown(attr);
                 return true;
@@ -11868,7 +11868,7 @@ IPropertyTreeIterator *deserializeFileAttrIterator(MemoryBuffer& mb, DFUQResultF
             cur.clear();
             if (mb.getPos()>=mb.length())
                 return false;
-            cur.setown(deserializeFileAttr(mb, clusterFilter));
+            cur.setown(deserializeFileAttr(mb, nodeGroupFilter));
             return true;
         }
 
@@ -11892,8 +11892,8 @@ IPropertyTreeIterator *deserializeFileAttrIterator(MemoryBuffer& mb, DFUQResultF
             {
                 int fmt = localFilters[i];
                 int subfmt = (fmt&0xff);
-                if ((subfmt==DFUQRFcluster) && fv && *fv)
-                    clusterFilter.appendListUniq(fv, ",");
+                if ((subfmt==DFUQRFnodegroup) && fv && *fv)
+                    nodeGroupFilter.appendListUniq(fv, ",");
                 //Add more if needed
                 fv = fv + strlen(fv)+1;
             }

+ 2 - 2
dali/base/dadfs.hpp

@@ -240,7 +240,7 @@ enum DFUQResultField
 {
     DFUQRFname = 0,
     DFUQRFdescription = 1,
-    DFUQRFgroup = 2,
+    DFUQRFnodegroups = 2,
     DFUQRFkind = 3,
     DFUQRFtimemodified = 4,
     DFUQRFjob = 5,
@@ -251,7 +251,7 @@ enum DFUQResultField
     DFUQRFsize = 10,
     DFUQRForigsize = 11,
     DFUQRFworkunit = 12,
-    DFUQRFcluster = 13,
+    DFUQRFnodegroup = 13,
     DFUQRFnumsubfiles = 14,
     DFUQRFaccessed = 15,
     DFUQRFnumparts = 16,

+ 4 - 0
docs/Installing_and_RunningTheHPCCPlatform/Installing_and_RunningTheHPCCPlatform.xml

@@ -1601,7 +1601,11 @@ sudo -u hpcc cp /etc/HPCCSystems/source/NewEnvironment.xml /etc/HPCCSystems/envi
 </programlisting>See the appendix for more information on using this
             script.</para>
           </listitem>
+        </orderedlist>
+
+        <?hard-pagebreak ?>
 
+        <orderedlist continuation="continues">
           <listitem>
             <para>Restart the HPCC system on <emphasis
             role="bold">every</emphasis> node. The following command starts

+ 3 - 1
ecl/eclccserver/eclccserver.cpp

@@ -231,7 +231,9 @@ class EclccCompileThread : public CInterface, implements IPooledThread, implemen
                     err->setExceptionFileName(file);
                     err->setExceptionLineNo(atoi(line));
                     err->setExceptionColumn(atoi(col));
-                    if (stricmp(errClass, "warning")==0)
+                    if (stricmp(errClass, "info")==0)
+                        err->setSeverity(ExceptionSeverityInformation);
+                    else if (stricmp(errClass, "warning")==0)
                         err->setSeverity(ExceptionSeverityWarning);
                     else
                         err->setSeverity(ExceptionSeverityError);

+ 19 - 11
ecl/hql/hqlerror.cpp

@@ -242,8 +242,24 @@ public:
         ErrorReceiverSink::report(error);
 
         ErrorSeverity severity = error->getSeverity();
-        if (severity <= SeverityInfo)
+        const char * severityText;
+        switch (severity)
+        {
+        case SeverityIgnore:
             return;
+        case SeverityInfo:
+            severityText = "info";
+            break;
+        case SeverityWarning:
+            severityText = "warning";
+            break;
+        case SeverityError:
+        case SeverityFatal:
+            severityText = "error";
+            break;
+        default:
+            throwUnexpected();
+        }
 
         unsigned code = error->errorCode();
         const char * filename = error->getFilename();
@@ -253,16 +269,8 @@ public:
 
         StringBuffer msg;
         error->errorMessage(msg);
-        if (isError(severity))
-        {
-            if (!filename) filename = "";
-            fprintf(f, "%s(%d,%d): error C%04d: %s\n", filename, line, column, code, msg.str());
-        }
-        else
-        {
-            if (!filename) filename = *unknownAtom;
-            fprintf(f, "%s(%d,%d): warning C%04d: %s\n", filename, line, column, code, msg.str());
-        }
+        if (!filename) filename = isError(severity) ? "" : *unknownAtom;
+        fprintf(f, "%s(%d,%d): %s C%04d: %s\n", filename, line, column, severityText, code, msg.str());
     }
 
 protected:

+ 4 - 4
ecl/hql/hqlerror.hpp

@@ -157,10 +157,10 @@ private:
 
 //---------------------------------------------------------------------------------------------------------------------
 
-ErrorSeverity queryDefaultSeverity(WarnErrorCategory category);
-WarnErrorCategory getCategory(const char * category);
-ErrorSeverity getSeverity(IAtom * name);
-ErrorSeverity getCheckSeverity(IAtom * name);
+extern HQL_API ErrorSeverity queryDefaultSeverity(WarnErrorCategory category);
+extern HQL_API WarnErrorCategory getCategory(const char * category);
+extern HQL_API ErrorSeverity getSeverity(IAtom * name);
+extern HQL_API ErrorSeverity getCheckSeverity(IAtom * name);
 
 //---------------------------------------------------------------------------------------------------------------------
 

+ 8 - 3
ecl/hql/hqlgram2.cpp

@@ -9156,9 +9156,14 @@ void HqlGram::defineSymbolProduction(attribute & nameattr, attribute & paramattr
     ITypeInfo *etype = expr->queryType();
     if (isSaved(failure) && !type)
     {
-        if ((etype->getSize() == 0) && (etype->isScalar()))
-            reportError(ERR_ZEROLENSTORED, nameattr, "Saved definition has zero length - missing type?");
-        else if ((etype->getTypeCode() == type_set) && etype->queryChildType() == NULL) 
+        size32_t exprTypeSize = etype->getSize();
+        if (queryOperatorInList(no_stored, failure) && (exprTypeSize != UNKNOWN_LENGTH))
+        {
+            if (isStringType(etype) || isUnicodeType(etype))
+                type.setown(getStretchedType(UNKNOWN_LENGTH, etype));
+        }
+
+        if ((etype->getTypeCode() == type_set) && etype->queryChildType() == NULL)
             reportError(ERR_ZEROLENSTORED, nameattr, "Type must be specified for this stored list");
     }
 

+ 11 - 10
esp/build.sh

@@ -39,6 +39,14 @@ fi
 mkdir -p "$DISTDIR"
 cp -r "$SRCDIR/CodeMirror2" "$DISTDIR/CodeMirror2"
 
+# Copy & minify stub.htm to dist
+cat "$SRCDIR/stub.htm" | tr '\n' ' ' | \
+perl -pe "
+  s/<\!--.*?-->//g;                          # Strip comments
+#  s/isDebug: *1/deps:['$LOADERMID']/;        # Remove isDebug, add deps
+#  s/<script src=\"$LOADERMID.*?\/script>//;  # Remove script eclwatch/run
+  s/\s+/ /g;                                 # Collapse white-space" > "$DISTDIR/stub.htm"
+
 cd "$TOOLSDIR"
 
 if which node >/dev/null; then
@@ -50,19 +58,12 @@ else
     exit 1
 fi
 
+echo "Build complete"
+
 cd "$BASEDIR"
 
 LOADERMID=${LOADERMID//\//\\\/}
 
-# Copy & minify stub.htm to dist
-cat "$SRCDIR/stub.htm" | tr '\n' ' ' | \
-perl -pe "
-  s/<\!--.*?-->//g;                          # Strip comments
-#  s/isDebug: *1/deps:['$LOADERMID']/;        # Remove isDebug, add deps
-#  s/<script src=\"$LOADERMID.*?\/script>//;  # Remove script eclwatch/run
-  s/\s+/ /g;                                 # Collapse white-space" > "$DISTDIR/stub.htm"
-
-
 for dojodir in dojo dojox dijit
 do
   for f in  $(find ${DISTDIR}/${dojo_dir} -type f -perm /a+x ! -name "*.sh" \
@@ -72,4 +73,4 @@ do
   done
 done
 
-echo "Build complete"
+echo "Post process complete"

+ 18 - 18
esp/eclwatch/ws_XSLT/dfu.xslt

@@ -20,7 +20,7 @@
     <xsl:output method="html"/>
     <xsl:variable name="owner" select="/DFUQueryResponse/Owner"/>
     <xsl:variable name="cluster" select="/DFUQueryResponse/Prefix"/>
-    <xsl:variable name="clustername" select="/DFUQueryResponse/ClusterName"/>
+    <xsl:variable name="nodegroup" select="/DFUQueryResponse/NodeGroup"/>
     
     <xsl:variable name="logicalname" select="/DFUQueryResponse/LogicalName"/>
     <xsl:variable name="descriptionfilter" select="/DFUQueryResponse/Description"/>
@@ -64,7 +64,7 @@
           <script language="JavaScript1.2" id="menuhandlers">
                     var owner = '<xsl:value-of select="$owner"/>';;
                     var cluster = '<xsl:value-of select="$cluster"/>';;
-                    var clusterName = '<xsl:value-of select="$clustername"/>';;
+                    var nodeGroup = '<xsl:value-of select="$nodegroup"/>';;
                     var logicalName = '<xsl:value-of select="$logicalname"/>';;
                     var descriptionFilter = '<xsl:value-of select="$descriptionfilter"/>';;
                     var startDate = '<xsl:value-of select="$startdate"/>';;
@@ -312,11 +312,11 @@
                                     }
                                     if (type != 3)
                                     {
-                                        if (clusterName)
+                                        if (nodeGroup)
                                         {
                                             if (numParam > 0)
                                                 url += '&';
-                                            url += 'ClusterName=' + clusterName;
+                                            url += 'NodeGroup=' + nodeGroup;
                                             numParam++;
                                         }
                                     }
@@ -324,7 +324,7 @@
                                     {
                                         if (numParam > 0)
                                             url += '&';
-                                        url += 'ClusterName=' + value;
+                                        url += 'NodeGroup=' + value;
                                         numParam++;
                                     }
                                     document.location.href=url;
@@ -570,14 +570,14 @@
                        </xsl:otherwise>
                    </xsl:choose>
                    <xsl:choose>
-                       <xsl:when test="$sortby='Cluster' and $descending &lt; 1">
-                          <th align="center" style="cursor:pointer" onmouseover="bgColor='#FFFFFF'" onmouseout="bgColor='#CCCCCC'" onclick="headerClicked('Cluster', 1)">Cluster<img src="/esp/files/img/upsimple.png" width="10" height="10"></img></th>
+                       <xsl:when test="$sortby='NodeGroup' and $descending &lt; 1">
+                          <th align="center" style="cursor:pointer" onmouseover="bgColor='#FFFFFF'" onmouseout="bgColor='#CCCCCC'" onclick="headerClicked('NodeGroup', 1)">NodeGroup<img src="/esp/files/img/upsimple.png" width="10" height="10"></img></th>
                        </xsl:when>
-                       <xsl:when test="$sortby='Cluster'">
-                          <th align="center" style="cursor:pointer" onmouseover="bgColor='#FFFFFF'" onmouseout="bgColor='#CCCCCC'" onclick="headerClicked('Cluster', 0)">Cluster<img src="/esp/files/img/downsimple.png" width="10" height="10"></img></th>
+                       <xsl:when test="$sortby='NodeGroup'">
+                          <th align="center" style="cursor:pointer" onmouseover="bgColor='#FFFFFF'" onmouseout="bgColor='#CCCCCC'" onclick="headerClicked('NodeGroup', 0)">NodeGroup<img src="/esp/files/img/downsimple.png" width="10" height="10"></img></th>
                        </xsl:when>
                        <xsl:otherwise>
-                          <th align="center" style="cursor:pointer" onmouseover="bgColor='#FFFFFF'" onmouseout="bgColor='#CCCCCC'" onclick="headerClicked('Cluster', 0)">Cluster</th>
+                          <th align="center" style="cursor:pointer" onmouseover="bgColor='#FFFFFF'" onmouseout="bgColor='#CCCCCC'" onclick="headerClicked('NodeGroup', 0)">NodeGroup</th>
                        </xsl:otherwise>
                    </xsl:choose>
                    <xsl:choose>
@@ -647,15 +647,15 @@
             <xsl:variable name="info_query">
                 <xsl:value-of select="Name"/>
                 <xsl:choose>
-                    <xsl:when test="string-length(ClusterName)">&amp;Cluster=<xsl:value-of select="ClusterName"/></xsl:when>
+                    <xsl:when test="string-length(NodeGroup)">&amp;NodeGroup=<xsl:value-of select="NodeGroup"/></xsl:when>
                 </xsl:choose>
             </xsl:variable>
       <td>
         <xsl:if test="FromRoxieCluster=1">
-          <input type="hidden" id="{Name}@{ClusterName}"/>
+          <input type="hidden" id="{Name}@{NodeGroup}"/>
         </xsl:if>
-        <input type="checkbox" name="LogicalFiles_i{position()}" value="{Name}@{ClusterName}" onclick="return clicked(this, event)"/>
-          <xsl:variable name="popup">return DFUFilePopup('<xsl:value-of select="$info_query"/>', '<xsl:value-of select="Name"/>', '<xsl:value-of select="ClusterName"/>', '<xsl:value-of select="Replicate"/>', '<xsl:value-of select="FromRoxieCluster"/>', '<xsl:value-of select="BrowseData"/>', '<xsl:value-of select="position()"/>')</xsl:variable>
+        <input type="checkbox" name="LogicalFiles_i{position()}" value="{Name}@{NodeGroup}" onclick="return clicked(this, event)"/>
+          <xsl:variable name="popup">return DFUFilePopup('<xsl:value-of select="$info_query"/>', '<xsl:value-of select="Name"/>', '<xsl:value-of select="NodeGroup"/>', '<xsl:value-of select="Replicate"/>', '<xsl:value-of select="FromRoxieCluster"/>', '<xsl:value-of select="BrowseData"/>', '<xsl:value-of select="position()"/>')</xsl:variable>
           <xsl:variable name="oncontextmenu">
             <xsl:value-of select="$popup"/>
           </xsl:variable>
@@ -731,13 +731,13 @@
             </td>
             <td>
                 <xsl:choose>
-                    <xsl:when test="string-length(ClusterName) and not(string-length($clustername))">
-                        <a href="javascript:doQuery(3, '{ClusterName}')">
-                            <xsl:value-of select="ClusterName"/>
+                    <xsl:when test="string-length(NodeGroup) and not(string-length($nodegroup))">
+                        <a href="javascript:doQuery(3, '{NodeGroup}')">
+                            <xsl:value-of select="NodeGroup"/>
                         </a>
                     </xsl:when>
                     <xsl:otherwise>
-                        <xsl:value-of select="ClusterName"/>
+                        <xsl:value-of select="NodeGroup"/>
                     </xsl:otherwise>
                 </xsl:choose>
             </td>

+ 3 - 0
esp/files/req_array.js

@@ -234,16 +234,19 @@ function onClickTriButton(btn, clicks)
         { 
             btn.value='true'; 
             btn.style.color='green';
+            btn.name=btn.id;
         } 
         else if (btn.value=='true') 
         {
             btn.value='false'; 
             btn.style.color='red';
+            btn.name=btn.id;
         } 
         else 
         {
             btn.value='default'; 
             btn.style.color='gray';
+            btn.name='';
         }
     }
 }

+ 7 - 4
esp/scm/ws_dfu.ecm

@@ -33,7 +33,8 @@ ESPStruct SpaceItem
 ESPStruct DFULogicalFile
 {
     string Prefix;      
-    string ClusterName; 
+    [depr_ver("1.26")] string ClusterName;
+    [min_ver("1.26")] string NodeGroup;
     string Directory;
     string Description;
     string Parts;
@@ -135,7 +136,8 @@ ESPStruct DFUSpaceItem
 ESPrequest [nil_remove] DFUQueryRequest
 {
     string Prefix;
-    string ClusterName;
+    [depr_ver("1.26")] string ClusterName;
+    [min_ver("1.26")] string NodeGroup;
     string LogicalName;
     string Description;
     string Owner;
@@ -166,7 +168,8 @@ DFUQueryResponse
     ESParray<ESPstruct DFULogicalFile> DFULogicalFiles;
 
     string Prefix;
-    string ClusterName;
+    [depr_ver("1.26")] string ClusterName;
+    [min_ver("1.26")] string NodeGroup;
     string LogicalName;
     string Description;
     string Owner;
@@ -638,7 +641,7 @@ ESPresponse [exceptions_inline, nil_remove, http_encode(0)] DFUSearchDataRespons
 
 //  ===========================================================================
 ESPservice [
-    version("1.25"), default_client_version("1.25"),
+    version("1.26"), default_client_version("1.26"),
     noforms, 
     exceptions_inline("./smc_xslt/exceptions.xslt")] WsDfu
 {

+ 1 - 0
esp/scm/ws_workunits.ecm

@@ -1257,6 +1257,7 @@ ESPrequest [nil_remove] WUListQueriesRequest
     string Sortby;
     bool Descending(false);
     int64 CacheHint;
+    string FileName;
 };
 
 ESPresponse [exceptions_inline] WUListQueriesResponse

+ 77 - 94
esp/services/ws_dfu/ws_dfuService.cpp

@@ -2044,22 +2044,22 @@ void CWsDfuEx::getLogicalFileAndDirectory(IEspContext &context, IUserDescriptor*
                     pref.append(logicalName);
 
                 const char* owner=attr.queryProp("@owner");
-#if 0
-                char* clusterName=(char*)attr.queryProp("@group");   
-#else //Handling for multiple clusters
-                StringArray clusters;
-                if (getFileGroups(&attr,clusters)==0) 
+                StringArray groups;
+                if (getFileGroups(&attr,groups)==0)
                 {
-                    clusters.append("");
+                    groups.append("");
                 }
-#endif
-                ForEachItemIn(i, clusters)
+
+                ForEachItemIn(i, groups)
                 {
-                    const char* clusterName = clusters.item(i);
-                Owned<IEspDFULogicalFile> File = createDFULogicalFile("","");
+                    const char* groupName = groups.item(i);
+                    Owned<IEspDFULogicalFile> File = createDFULogicalFile("","");
 
                     File->setPrefix(pref);
-                    File->setClusterName(clusterName);
+                    if (version < 1.26)
+                        File->setClusterName(groupName);
+                    else
+                        File->setNodeGroup(groupName);
                     File->setName(logicalName);
                     File->setOwner(owner);
                     File->setReplicate(true);
@@ -2067,7 +2067,7 @@ void CWsDfuEx::getLogicalFileAndDirectory(IEspContext &context, IUserDescriptor*
                     ForEachItemIn(j, roxieClusterNames)
                     {
                         const char* roxieClusterName = roxieClusterNames.item(j);
-                        if (roxieClusterName && clusterName && !stricmp(roxieClusterName, clusterName))
+                        if (roxieClusterName && groupName && strieq(roxieClusterName, groupName))
                         {
                             File->setFromRoxieCluster(true);
                             break;
@@ -2292,9 +2292,9 @@ __int64 CWsDfuEx::findPositionByName(const char *name, bool descend, IArrayOf<IE
     return addToPos;
 }
 
-__int64 CWsDfuEx::findPositionByCluster(const char *cluster, bool descend, IArrayOf<IEspDFULogicalFile>& LogicalFiles)
+__int64 CWsDfuEx::findPositionByNodeGroup(double version, const char *node, bool descend, IArrayOf<IEspDFULogicalFile>& LogicalFiles)
 {
-    if (!cluster || (strlen(cluster) < 1))
+    if (!node || !*node)
     {
         if (descend)
             return -1;
@@ -2306,16 +2306,20 @@ __int64 CWsDfuEx::findPositionByCluster(const char *cluster, bool descend, IArra
     ForEachItemIn(i, LogicalFiles)
     {
         IEspDFULogicalFile& File = LogicalFiles.item(i);
-        const char *ClusterName = File.getClusterName();
-        if (!ClusterName)
+        const char *nodeGroup = NULL;
+        if (version < 1.26)
+            nodeGroup = File.getClusterName();
+        else
+            nodeGroup = File.getNodeGroup();
+        if (!nodeGroup)
             continue;
 
-        if (descend && strcmp(cluster, ClusterName)>0)
+        if (descend && strcmp(node, nodeGroup)>0)
         {
             addToPos = i;
             break;
         }
-        if (!descend && strcmp(cluster, ClusterName)<0)
+        if (!descend && strcmp(node, nodeGroup)<0)
         {
             addToPos = i;
             break;
@@ -2595,30 +2599,10 @@ void CWsDfuEx::getAPageOfSortedLogicalFile(IEspContext &context, IUserDescriptor
         roxieClusterNames.append(sName.str());
     }
 
-    StringArray clustersReq;
-    const char* clustersReq0 = req.getClusterName();
-    if (clustersReq0 && *clustersReq0)
-    {
-        char* pStr = (char*) clustersReq0;
-        while (pStr)
-        {
-            char clusterName[256];
-            char* ppStr = strchr(pStr, ',');
-            if (!ppStr)
-            {
-                strcpy(clusterName, pStr);
-                pStr = NULL;
-            }
-            else
-            {
-                strncpy(clusterName, pStr, ppStr - pStr );
-                clusterName[ppStr - pStr] = 0;
-                pStr = ppStr+1;
-            }
-
-            clustersReq.append(clusterName);
-        }
-    }
+    StringArray nodeGroupsReq;
+    const char* nodeGroupsReqString = req.getNodeGroup();
+    if (nodeGroupsReqString && *nodeGroupsReqString)
+        nodeGroupsReq.appendListUniq(nodeGroupsReqString, ",");
 
     StringBuffer size;
     __int64 totalFiles = 0;
@@ -2646,49 +2630,33 @@ void CWsDfuEx::getAPageOfSortedLogicalFile(IEspContext &context, IUserDescriptor
                 if (!owner || stricmp(owner, req.getOwner()))
                     continue;
             }
-            StringArray clusters;
-            StringArray clusters1;
-            if (getFileGroups(&attr,clusters1)==0)
+            StringArray nodeGroups;
+            StringArray fileNodeGroups;
+            if (getFileGroups(&attr,fileNodeGroups)==0)
             {
-                if (clustersReq.length() < 1)
-                {
-                    clusters.append("");
-                }
+                if (!nodeGroupsReq.length())
+                    nodeGroups.append("");
             }
-            else
+            else if (nodeGroupsReq.length() > 0) // check specified cluster name in list
             {
-                // check specified cluster name in list
-                if (clustersReq.length() > 0)
-                {
-                    ForEachItemIn(ii,clustersReq)
-                    {
-                        StringBuffer clusterFound;
-
-                        const char * cluster0 = clustersReq.item(ii);
-                        ForEachItemIn(i,clusters1)
-                        {
-                            if (!stricmp(clusters1.item(i), cluster0))
-                            {
-                                clusterFound.append(cluster0);
-                                break;
-                            }
-                        }
-                        if (clusterFound.length() > 0)
-                            clusters.append(clusterFound);
-                    }
-                }
-                else
+                ForEachItemIn(ii,nodeGroupsReq)
                 {
-                    if (clusters1.length() > 0)
+                    const char* nodeGroupReq = nodeGroupsReq.item(ii);
+                    ForEachItemIn(i,fileNodeGroups)
                     {
-                        ForEachItemIn(i,clusters1)
+                        if (strieq(fileNodeGroups.item(i), nodeGroupReq))
                         {
-                            const char * cluster0 = clusters1.item(i);
-                            clusters.append(cluster0);
+                            nodeGroups.append(nodeGroupReq);
+                            break;
                         }
                     }
                 }
             }
+            else if (fileNodeGroups.length())
+            {
+                ForEachItemIn(i,fileNodeGroups)
+                    nodeGroups.append(fileNodeGroups.item(i));
+            }
 
             const char* desc = attr.queryProp("@description");
             if(req.getDescription() && *req.getDescription())
@@ -2749,9 +2717,9 @@ void CWsDfuEx::getAPageOfSortedLogicalFile(IEspContext &context, IUserDescriptor
                 }
             }
 
-            ForEachItemIn(i, clusters)
+            ForEachItemIn(i, nodeGroups)
             {
-                const char* clusterName = clusters.item(i);
+                const char* nodeGroup = nodeGroups.item(i);
                 __int64 addToPos = -1; //Add to tail
                 if (stricmp(sortBy, "FileSize")==0)
                 {
@@ -2765,9 +2733,9 @@ void CWsDfuEx::getAPageOfSortedLogicalFile(IEspContext &context, IUserDescriptor
                 {
                     addToPos = findPositionByOwner(owner, descending, LogicalFileList);
                 }
-                else if (stricmp(sortBy, "Cluster")==0)
+                else if (stricmp(sortBy, "NodeGroup")==0)
                 {
-                    addToPos = findPositionByCluster(clusterName, descending, LogicalFileList);
+                    addToPos = findPositionByNodeGroup(version, nodeGroup, descending, LogicalFileList);
                 }
                 else if (stricmp(sortBy, "Records")==0)
                 {
@@ -2793,7 +2761,10 @@ void CWsDfuEx::getAPageOfSortedLogicalFile(IEspContext &context, IUserDescriptor
                 Owned<IEspDFULogicalFile> File = createDFULogicalFile("","");
 
                 File->setPrefix(pref);
-                File->setClusterName(clusterName);
+                if (version < 1.26)
+                    File->setClusterName(nodeGroup);
+                else
+                    File->setNodeGroup(nodeGroup);
                 File->setName(logicalName);
                 File->setOwner(owner);
                 File->setDescription(description);
@@ -2803,7 +2774,7 @@ void CWsDfuEx::getAPageOfSortedLogicalFile(IEspContext &context, IUserDescriptor
                 ForEachItemIn(j, roxieClusterNames)
                 {
                     const char* roxieClusterName = roxieClusterNames.item(j);
-                    if (roxieClusterName && clusterName && !stricmp(roxieClusterName, clusterName))
+                    if (roxieClusterName && nodeGroup && strieq(roxieClusterName, nodeGroup))
                     {
                         File->setFromRoxieCluster(true);
                         break;
@@ -2911,10 +2882,13 @@ void CWsDfuEx::getAPageOfSortedLogicalFile(IEspContext &context, IUserDescriptor
     }
 
     StringBuffer basicQuery;
-    if (req.getClusterName() && *req.getClusterName())
+    if (req.getNodeGroup() && *req.getNodeGroup())
     {
-        resp.setClusterName(req.getClusterName());
-        addToQueryString(basicQuery, "ClusterName", req.getClusterName());
+        if (version < 1.26)
+            resp.setClusterName(req.getNodeGroup());
+        else
+            resp.setNodeGroup(req.getNodeGroup());
+        addToQueryString(basicQuery, "NodeGroup", req.getNodeGroup());
     }
     if (req.getOwner() && *req.getOwner())
     {
@@ -3103,7 +3077,7 @@ void CWsDfuEx::setDFUQueryFilters(IEspDFUQueryRequest& req, StringBuffer& filter
     setFileTypeFilter(req.getFileType(), filterBuf);
     appendDFUQueryFilter(getDFUQFilterFieldName(DFUQFFdescription), DFUQFTwildcardMatch, req.getDescription(), filterBuf);
     appendDFUQueryFilter(getDFUQFilterFieldName(DFUQFFattrowner), DFUQFTwildcardMatch, req.getOwner(), filterBuf);
-    appendDFUQueryFilter(getDFUQFilterFieldName(DFUQFFgroup), DFUQFTcontainString, req.getClusterName(), ",", filterBuf);
+    appendDFUQueryFilter(getDFUQFilterFieldName(DFUQFFgroup), DFUQFTcontainString, req.getNodeGroup(), ",", filterBuf);
 
     __int64 sizeFrom = req.getFileSizeFrom();
     __int64 sizeTo = req.getFileSizeTo();
@@ -3164,8 +3138,8 @@ void CWsDfuEx::setDFUQuerySortOrder(IEspDFUQueryRequest& req, StringBuffer& sort
         sortOrder[0] = (DFUQResultField) (DFUQRFrecordcount | DFUQRFnumeric);
     else if (strieq(sortByPtr, "Owner"))
         sortOrder[0] = DFUQRFowner;
-    else if (strieq(sortByPtr, "Cluster"))
-        sortOrder[0] = DFUQRFcluster;
+    else if (strieq(sortByPtr, "NodeGroup"))
+        sortOrder[0] = DFUQRFnodegroup;
     else if (strieq(sortByPtr, "Modified"))
         sortOrder[0] = DFUQRFtimemodified;
     else if (strieq(sortByPtr, "Description"))
@@ -3224,9 +3198,14 @@ bool CWsDfuEx::addToLogicalFileList(IPropertyTree& file, double version, IArrayO
         lFile->setDescription(getShortDescription(file.queryProp(getDFUQResultFieldName(DFUQRFdescription)), buf.clear()));
         lFile->setTotalsize((buf.clear()<<comma(file.getPropInt64(getDFUQResultFieldName(DFUQRForigsize),-1))).str());
 
-        const char* clusterName = file.queryProp(getDFUQResultFieldName(DFUQRFcluster));
-        if (clusterName && *clusterName)
-            lFile->setClusterName(clusterName);
+        const char* nodeGroup = file.queryProp(getDFUQResultFieldName(DFUQRFnodegroup));
+        if (nodeGroup && *nodeGroup)
+        {
+            if (version < 1.26)
+                lFile->setClusterName(nodeGroup);
+            else
+                lFile->setNodeGroup(nodeGroup);
+        }
 
         int numSubFiles = file.hasProp(getDFUQResultFieldName(DFUQRFnumsubfiles));
         if(numSubFiles)
@@ -3287,6 +3266,7 @@ void CWsDfuEx::setDFUQueryResponse(IEspContext &context, unsigned totalFiles, St
                                    IEspDFUQueryRequest& req, IEspDFUQueryResponse& resp)
 {
     //for legacy
+    double version = context.getClientVersion();
     unsigned pageEnd = pageStart + pageSize;
     if (pageEnd > totalFiles)
         pageEnd = totalFiles;
@@ -3305,10 +3285,13 @@ void CWsDfuEx::setDFUQueryResponse(IEspContext &context, unsigned totalFiles, St
     }
 
     StringBuffer queryReq;
-    if (req.getClusterName() && *req.getClusterName())
+    if (req.getNodeGroup() && *req.getNodeGroup())
     {
-        resp.setClusterName(req.getClusterName());
-        addToQueryString(queryReq, "ClusterName", req.getClusterName());
+        if (version < 1.26)
+            resp.setClusterName(req.getNodeGroup());
+        else
+            resp.setNodeGroup(req.getNodeGroup());
+        addToQueryString(queryReq, "NodeGroup", req.getNodeGroup());
     }
     if (req.getOwner() && *req.getOwner())
     {
@@ -3403,7 +3386,7 @@ bool CWsDfuEx::doLogicalFileSearch(IEspContext &context, IUserDescriptor* udesc,
     unsigned short localFilterCount = 0;
     DFUQResultField localFilters[8];
     MemoryBuffer localFilterBuf;
-    addDFUQueryFilter(localFilters, localFilterCount, localFilterBuf, req.getClusterName(), DFUQRFcluster);
+    addDFUQueryFilter(localFilters, localFilterCount, localFilterBuf, req.getNodeGroup(), DFUQRFnodegroup);
     localFilters[localFilterCount] = DFUQRFterm;
 
     StringBuffer sortBy;

+ 1 - 1
esp/services/ws_dfu/ws_dfuService.hpp

@@ -100,7 +100,7 @@ private:
     __int64 findPositionByRecords(const __int64 records, bool decsend, IArrayOf<IEspDFULogicalFile>& LogicalFiles);
     __int64 findPositionByName(const char *name, bool descend, IArrayOf<IEspDFULogicalFile>& LogicalFiles);
     __int64 findPositionByOwner(const char *owner, bool descend, IArrayOf<IEspDFULogicalFile>& LogicalFiles);
-    __int64 findPositionByCluster(const char *Cluster, bool descend, IArrayOf<IEspDFULogicalFile>& LogicalFiles);
+    __int64 findPositionByNodeGroup(double version, const char *nodeGroup, bool descend, IArrayOf<IEspDFULogicalFile>& LogicalFiles);
     __int64 findPositionByDate(const char *datetime, bool descend, IArrayOf<IEspDFULogicalFile>& LogicalFiles);
     __int64 findPositionByDescription(const char *description, bool descend, IArrayOf<IEspDFULogicalFile>& LogicalFiles);
     bool checkDescription(const char *description, const char *descriptionFilter);

+ 1 - 1
esp/services/ws_ecl/ws_ecl_service.cpp

@@ -1421,7 +1421,7 @@ void appendEclInputXsds(StringBuffer &content, IPropertyTree *xsd, BoolHash &add
 
 void CWsEclBinding::SOAPSectionToXsd(WsEclWuInfo &wsinfo, const char *parmXml, StringBuffer &schema, bool isRequest, IPropertyTree *xsdtree)
 {
-    Owned<IPropertyTree> tree = createPTreeFromXMLString(parmXml, ipt_none, (PTreeReaderOptions)(ptr_ignoreWhiteSpace|ptr_noRoot));
+    Owned<IPropertyTree> tree = createPTreeFromXMLString(parmXml);
 
     schema.appendf("<xsd:element name=\"%s%s\">", wsinfo.queryname.sget(), isRequest ? "Request" : "Response");
     schema.append("<xsd:complexType>");

+ 2 - 0
esp/services/ws_fs/CMakeLists.txt

@@ -58,6 +58,7 @@ include_directories (
          ./../../smc/SMCLib 
          ./../../bindings/SOAP/xpp 
          ./../../../common/remote 
+         ./../../../common/workunit
     )
 
 ADD_DEFINITIONS( -D_USRDLL )
@@ -70,6 +71,7 @@ target_link_libraries ( ws_fs
          esphttp 
          dalibase 
          environment 
+         workunit
          SMCLib 
          dfuwu 
     )

+ 15 - 1
esp/services/ws_fs/ws_fsBinding.cpp

@@ -303,9 +303,23 @@ IPropertyTree* CFileSpraySoapBindingEx::createPTreeForXslt(const char* method, c
             }
         }
 
+        //For Spray files on Thor Cluster, fetch all the group names for all the thor instances (and dedup them)
+        BoolHash uniqueThorClusterGroupNames;
         it.setown(pEnvSoftware->getElements("ThorCluster"));
         ForEach(*it)
-            pSoftware->addPropTree("ThorCluster", &it->get());
+        {
+            StringBuffer thorClusterGroupName;
+            IPropertyTree& cluster = it->query();
+            getClusterGroupName(cluster, thorClusterGroupName);
+            if (!thorClusterGroupName.length())
+                continue;
+            if (uniqueThorClusterGroupNames.getValue(thorClusterGroupName.str()))
+                continue;
+
+            uniqueThorClusterGroupNames.setValue(thorClusterGroupName.str(), true);
+            IPropertyTree* newClusterTree = pSoftware->addPropTree("ThorCluster", &it->get());
+            newClusterTree->setProp("@name", thorClusterGroupName.str()); //set group name into @name for spray target
+        }
 
         it.setown(pEnvSoftware->getElements("EclAgentProcess"));
         ForEach(*it)

+ 38 - 34
esp/services/ws_fs/ws_fsService.cpp

@@ -26,6 +26,7 @@
 #include "daclient.hpp"
 #include "wshelpers.hpp"
 #include "dfuwu.hpp"
+#include "workunit.hpp"
 #include "ws_fsService.hpp"
 #ifdef _WIN32
 #include "windows.h"
@@ -600,8 +601,12 @@ void setRoxieClusterPartDiskMapping(const char *clusterName, const char *default
     wuOptions->setReplicate(replicate);
 }
 
-void getClusterFromLFN(const char* lfn, StringBuffer& cluster, const char* username, const char* passwd)
+StringBuffer& getNodeGroupFromLFN(StringBuffer& nodeGroup, const char* lfn, const char* username, const char* passwd)
 {
+    Owned<IRemoteConnection> conn = querySDS().connect("Environment", myProcessSession(), RTM_LOCK_READ, SDS_LOCK_TIMEOUT);
+    if (!conn)
+        return nodeGroup;
+
     Owned<IUserDescriptor> udesc;
     if(username != NULL && *username != '\0')
     {
@@ -609,8 +614,10 @@ void getClusterFromLFN(const char* lfn, StringBuffer& cluster, const char* usern
         udesc->set(username, passwd);
     }
 
+    StringBuffer clusterName;
     LogicFileWrapper lfw;
-    lfw.FindClusterName(lfn, cluster, udesc);
+    lfw.FindClusterName(lfn, clusterName, udesc);
+    return getClusterThorGroupName(nodeGroup, clusterName.str());
 }
 
 StringBuffer& constructFileMask(const char* filename, StringBuffer& filemask)
@@ -1825,9 +1832,9 @@ bool CFileSprayEx::onSprayFixed(IEspContext &context, IEspSprayFixed &req, IEspS
 
         StringBuffer destFolder, destTitle, defaultFolder, defaultReplicateFolder;
 
-        const char* destCluster = req.getDestGroup();
-        if(destCluster == NULL || *destCluster == '\0')
-            throw MakeStringException(ECLWATCH_INVALID_INPUT, "Destination cluster/group not specified.");
+        const char* destNodeGroup = req.getDestGroup();
+        if(destNodeGroup == NULL || *destNodeGroup == '\0')
+            throw MakeStringException(ECLWATCH_INVALID_INPUT, "Destination node group not specified.");
 
         MemoryBuffer& srcxml = (MemoryBuffer&)req.getSrcxml();
         const char* srcip = req.getSourceIP();
@@ -1849,19 +1856,19 @@ bool CFileSprayEx::onSprayFixed(IEspContext &context, IEspSprayFixed &req, IEspS
         destname = lfn.get();
 
         StringBuffer gName, ipAddr;
-        const char *pTr = strchr(destCluster, ' ');
+        const char *pTr = strchr(destNodeGroup, ' ');
         if (pTr)
         {
-            gName.append(pTr - destCluster, destCluster);
+            gName.append(pTr - destNodeGroup, destNodeGroup);
             ipAddr.append(pTr+1);
         }
         else
-            gName.append(destCluster);
+            gName.append(destNodeGroup);
 
         if (ipAddr.length() > 0)
             ParseLogicalPath(destname, ipAddr.str(), destFolder, destTitle, defaultFolder, defaultReplicateFolder);
         else
-            ParseLogicalPath(destname, destCluster, destFolder, destTitle, defaultFolder, defaultReplicateFolder);
+            ParseLogicalPath(destname, destNodeGroup, destFolder, destTitle, defaultFolder, defaultReplicateFolder);
 
         Owned<IDFUWorkUnitFactory> factory = getDFUWorkUnitFactory();
         Owned<IDFUWorkUnit> wu = factory->createWorkUnit();
@@ -1996,19 +2003,19 @@ bool CFileSprayEx::onSprayVariable(IEspContext &context, IEspSprayVariable &req,
 
         StringBuffer destFolder, destTitle, defaultFolder, defaultReplicateFolder;
 
-        const char* destCluster = req.getDestGroup();
-        if(destCluster == NULL || *destCluster == '\0')
-            throw MakeStringException(ECLWATCH_INVALID_INPUT, "Destination cluster/group not specified.");
+        const char* destNodeGroup = req.getDestGroup();
+        if(destNodeGroup == NULL || *destNodeGroup == '\0')
+            throw MakeStringException(ECLWATCH_INVALID_INPUT, "Destination node group not specified.");
 
         StringBuffer gName, ipAddr;
-        const char *pTr = strchr(destCluster, ' ');
+        const char *pTr = strchr(destNodeGroup, ' ');
         if (pTr)
         {
-            gName.append(pTr - destCluster, destCluster);
+            gName.append(pTr - destNodeGroup, destNodeGroup);
             ipAddr.append(pTr+1);
         }
         else
-            gName.append(destCluster);
+            gName.append(destNodeGroup);
 
         MemoryBuffer& srcxml = (MemoryBuffer&)req.getSrcxml();
         const char* srcip = req.getSourceIP();
@@ -2032,7 +2039,7 @@ bool CFileSprayEx::onSprayVariable(IEspContext &context, IEspSprayVariable &req,
         if (ipAddr.length() > 0)
             ParseLogicalPath(destname, ipAddr.str(), destFolder, destTitle, defaultFolder, defaultReplicateFolder);
         else
-            ParseLogicalPath(destname, destCluster, destFolder, destTitle, defaultFolder, defaultReplicateFolder);
+            ParseLogicalPath(destname, destNodeGroup, destFolder, destTitle, defaultFolder, defaultReplicateFolder);
 
         Owned<IDFUWorkUnitFactory> factory = getDFUWorkUnitFactory();
         Owned<IDFUWorkUnit> wu = factory->createWorkUnit();
@@ -2332,22 +2339,19 @@ bool CFileSprayEx::onCopy(IEspContext &context, IEspCopy &req, IEspCopyResponse
             throw MakeStringException(ECLWATCH_INVALID_INPUT, "Destination logical file not specified.");
 
         StringBuffer destFolder, destTitle, defaultFolder, defaultReplicateFolder;
-        StringBuffer srcCluster, destCluster, destClusterName;
+        StringBuffer srcNodeGroup, destNodeGroup;
         bool bRoxie = false;
-        const char* destCluster0 = req.getDestGroup();
-        if(destCluster0 == NULL || *destCluster0 == '\0')
+        const char* destNodeGroupReq = req.getDestGroup();
+        if(!destNodeGroupReq || !*destNodeGroupReq)
         {
-            getClusterFromLFN(srcname, srcCluster, context.queryUserId(), context.queryPassword());
-            DBGLOG("Destination cluster/group not specified, using source cluster %s", srcCluster.str());
-            destCluster = srcCluster.str();
-            destClusterName = srcCluster.str();
+            getNodeGroupFromLFN(destNodeGroup, srcname, context.queryUserId(), context.queryPassword());
+            DBGLOG("Destination node group not specified, using source node group %s", destNodeGroup.str());
         }
         else
         {
-            destCluster = destCluster0;
-            destClusterName = destCluster0;
-            const char* destClusterRoxie = req.getDestGroupRoxie();
-            if (destClusterRoxie && !stricmp(destClusterRoxie, "Yes"))
+            destNodeGroup = destNodeGroupReq;
+            const char* destRoxie = req.getDestGroupRoxie();
+            if (destRoxie && !stricmp(destRoxie, "Yes"))
             {
                 bRoxie = true;
             }
@@ -2361,7 +2365,7 @@ bool CFileSprayEx::onCopy(IEspContext &context, IEspCopy &req, IEspCopyResponse
             dstname = lfn.get();
         }
 
-        ParseLogicalPath(dstname, destCluster.str(), destFolder, destTitle, defaultFolder, defaultReplicateFolder);
+        ParseLogicalPath(dstname, destNodeGroup.str(), destFolder, destTitle, defaultFolder, defaultReplicateFolder);
 
         StringBuffer fileMask; 
         constructFileMask(destTitle.str(), fileMask);
@@ -2400,8 +2404,8 @@ bool CFileSprayEx::onCopy(IEspContext &context, IEspCopy &req, IEspCopyResponse
         StringBuffer user, passwd;
         wu->setUser(context.getUserID(user).str());
         wu->setPassword(context.getPassword(passwd).str());
-        if(destCluster.length() > 0)
-            wu->setClusterName(destCluster.str());
+        if(destNodeGroup.length() > 0)
+            wu->setClusterName(destNodeGroup.str());
         if (supercopy)
             wu->setCommand(DFUcmd_supercopy);
         else
@@ -2429,7 +2433,7 @@ bool CFileSprayEx::onCopy(IEspContext &context, IEspCopy &req, IEspCopyResponse
 
         if (bRoxie)
         {
-            setRoxieClusterPartDiskMapping(destCluster.str(), defaultFolder.str(), defaultReplicateFolder.str(), supercopy, wuFSpecDest, wuOptions);
+            setRoxieClusterPartDiskMapping(destNodeGroup.str(), defaultFolder.str(), defaultReplicateFolder.str(), supercopy, wuFSpecDest, wuOptions);
             wuFSpecDest->setWrap(true);                             // roxie always wraps
             if(req.getCompress())
                 wuFSpecDest->setCompressed(true);
@@ -2445,7 +2449,7 @@ bool CFileSprayEx::onCopy(IEspContext &context, IEspCopy &req, IEspCopyResponse
             if (destDiffKeyName&&*destDiffKeyName)
                 wuFSpecDest->setDiffKey(destDiffKeyName);
             wuFSpecDest->setDirectory(destFolder.str());
-            wuFSpecDest->setGroupName(destCluster.str());
+            wuFSpecDest->setGroupName(destNodeGroup.str());
             wuFSpecDest->setWrap(req.getWrap());
             const char * encryptkey = req.getEncrypt();
             if(req.getCompress()||(encryptkey&&*encryptkey))
@@ -2473,10 +2477,10 @@ bool CFileSprayEx::onCopy(IEspContext &context, IEspCopy &req, IEspCopyResponse
                 wuOptions->setIfNewer(true);
 
             ClusterPartDiskMapSpec mspec;
-            wuFSpecDest->getClusterPartDiskMapSpec(destCluster.str(), mspec);
+            wuFSpecDest->getClusterPartDiskMapSpec(destNodeGroup.str(), mspec);
             mspec.setDefaultBaseDir(defaultFolder.str());
             mspec.setDefaultReplicateDir(defaultReplicateFolder.str());
-            wuFSpecDest->setClusterPartDiskMapSpec(destCluster.str(), mspec);
+            wuFSpecDest->setClusterPartDiskMapSpec(destNodeGroup.str(), mspec);
         }
 
         resp.setResult(wu->queryId());

+ 4 - 4
esp/services/ws_workunits/ws_workunitsHelpers.cpp

@@ -2107,7 +2107,7 @@ DataCacheElement* DataCache::lookup(IEspContext &context, const char* filter, un
         if (list_iter == cache.end())
             break;
 
-        DataCacheElement* awu = list_iter->getLink();
+        DataCacheElement* awu = list_iter->get();
         if (!awu || (awu->m_timeCached > timeNow))
             break;
 
@@ -2174,7 +2174,7 @@ ArchivedWuCacheElement* ArchivedWuCache::lookup(IEspContext &context, const char
         if (list_iter == cache.end())
             break;
 
-        ArchivedWuCacheElement* awu = list_iter->getLink();
+        ArchivedWuCacheElement* awu = list_iter->get();
         if (awu && !stricmp(sashaUpdatedWhen, awu->m_sashaUpdatedWhen.c_str()) && (awu->m_timeCached > timeNow))
             break;
 
@@ -2192,12 +2192,12 @@ ArchivedWuCacheElement* ArchivedWuCache::lookup(IEspContext &context, const char
     return NULL;
 }
 
-void ArchivedWuCache::add(const char* filter, const char* sashaUpdatedWhen, bool hasNextPage, IArrayOf<IEspECLWorkunit>& wus)
+void ArchivedWuCache::add(const char* filter, const char* sashaUpdatedWhen, bool hasNextPage, unsigned numWUsReturned, IArrayOf<IEspECLWorkunit>& wus)
 {
     CriticalBlock block(crit);
 
     //Save new data
-    Owned<ArchivedWuCacheElement> e=new ArchivedWuCacheElement(filter, sashaUpdatedWhen, hasNextPage, /*data.str(),*/ wus);
+    Owned<ArchivedWuCacheElement> e=new ArchivedWuCacheElement(filter, sashaUpdatedWhen, hasNextPage, numWUsReturned, wus);
     if (cacheSize > 0)
     {
         if (cache.size() >= cacheSize)

+ 11 - 3
esp/services/ws_workunits/ws_workunitsHelpers.hpp

@@ -261,11 +261,18 @@ struct DataCache: public CInterface, implements IInterface
     size32_t cacheSize;
 };
 
+interface IArchivedWUsReader : extends IInterface
+{
+    virtual void getArchivedWUs(IArrayOf<IEspECLWorkunit>& results) = 0;
+    virtual bool getHasMoreWU() = 0;
+    virtual unsigned getNumberOfWUsReturned() = 0;
+};
+
 struct ArchivedWuCacheElement: public CInterface, implements IInterface
 {
     IMPLEMENT_IINTERFACE;
-    ArchivedWuCacheElement(const char* filter, const char* sashaUpdatedWhen, bool hasNextPage, /*const char* data,*/ IArrayOf<IEspECLWorkunit>& wus):m_filter(filter),
-        m_sashaUpdatedWhen(sashaUpdatedWhen), m_hasNextPage(hasNextPage)/*, m_data(data)*/
+    ArchivedWuCacheElement(const char* filter, const char* sashaUpdatedWhen, bool hasNextPage, unsigned _numWUsReturned, IArrayOf<IEspECLWorkunit>& wus):m_filter(filter),
+        m_sashaUpdatedWhen(sashaUpdatedWhen), m_hasNextPage(hasNextPage), numWUsReturned(_numWUsReturned)
     {
         m_timeCached.setNow();
         if (wus.length() > 0)
@@ -283,6 +290,7 @@ struct ArchivedWuCacheElement: public CInterface, implements IInterface
     std::string m_filter;
     std::string m_sashaUpdatedWhen;
     bool m_hasNextPage;
+    unsigned numWUsReturned;
 
     CDateTime m_timeCached;
     IArrayOf<IEspECLWorkunit> m_results;
@@ -295,7 +303,7 @@ struct ArchivedWuCache: public CInterface, implements IInterface
     ArchivedWuCache(size32_t _cacheSize=0): cacheSize(_cacheSize){}
     ArchivedWuCacheElement* lookup(IEspContext &context, const char* filter, const char* sashaUpdatedWhen, unsigned timeOutMin);
 
-    void add(const char* filter, const char* sashaUpdatedWhen, bool hasNextPage, IArrayOf<IEspECLWorkunit>& wus);
+    void add(const char* filter, const char* sashaUpdatedWhen, bool hasNextPage, unsigned numWUsReturned, IArrayOf<IEspECLWorkunit>& wus);
 
     std::list<Linked<ArchivedWuCacheElement> > cache;
     CriticalSection crit;

+ 41 - 5
esp/services/ws_workunits/ws_workunitsQuerySets.cpp

@@ -335,6 +335,7 @@ void QueryFilesInUse::loadTarget(IPropertyTree *t, const char *target, unsigned
             continue;
 
         queryTree = targetTree->addPropTree("Query", createPTree("Query"));
+        queryTree->setProp("@target", target); //for reference when searching across targets
         queryTree->setProp("@id", queryid);
         if (pkgid && *pkgid)
             queryTree->setProp("@pkgid", pkgid);
@@ -384,12 +385,25 @@ void QueryFilesInUse::loadTargets(IPropertyTree *t, unsigned flags)
     }
 }
 
+IPropertyTreeIterator *QueryFilesInUse::findAllQueriesUsingFile(const char *lfn)
+{
+    CriticalBlock b(crit);
+
+    if (!lfn || !*lfn)
+        return NULL;
+
+    VStringBuffer xpath("*/Query[File/@lfn='%s']", lfn);
+    return tree->getElements(xpath);
+}
+
 IPropertyTreeIterator *QueryFilesInUse::findQueriesUsingFile(const char *target, const char *lfn)
 {
     CriticalBlock b(crit);
 
-    if (!target || !*target || !lfn || !*lfn)
+    if (!lfn || !*lfn)
         return NULL;
+    if (!target || !*target)
+        return findAllQueriesUsingFile(lfn);
     IPropertyTree *targetTree = tree->getPropTree(target);
     if (!targetTree)
         return NULL;
@@ -1244,8 +1258,27 @@ bool CWsWorkunitsEx::onWUListQueries(IEspContext &context, IEspWUListQueriesRequ
     if (!req.getCacheHint_isNull())
         cacheHint = req.getCacheHint();
 
+    Owned<MapStringTo<bool> > queriesUsingFileMap;
+    const char *lfn = req.getFileName();
+    if (lfn && *lfn)
+    {
+        queriesUsingFileMap.setown(new MapStringTo<bool>());
+        Owned<IPropertyTreeIterator> queriesUsingFile = filesInUse.findQueriesUsingFile(clusterReq, lfn);
+        ForEach (*queriesUsingFile)
+        {
+            IPropertyTree &queryUsingFile = queriesUsingFile->query();
+            const char *queryTarget = queryUsingFile.queryProp("@target");
+            const char *queryId = queryUsingFile.queryProp("@id");
+            if (queryTarget && *queryTarget && queryId && *queryId)
+            {
+                VStringBuffer targetQuery("%s/%s", queryTarget, queryId);
+                queriesUsingFileMap->setValue(targetQuery, true);
+            }
+        }
+    }
+
     Owned<IWorkUnitFactory> factory = getWorkUnitFactory(context.querySecManager(), context.queryUser());
-    Owned<IConstQuerySetQueryIterator> it = factory->getQuerySetQueriesSorted(sortOrder, filters, filterBuf.bufferBase(), pageStartFrom, pageSize, &cacheHint, &numberOfQueries);
+    Owned<IConstQuerySetQueryIterator> it = factory->getQuerySetQueriesSorted(sortOrder, filters, filterBuf.bufferBase(), pageStartFrom, pageSize, &cacheHint, &numberOfQueries, queriesUsingFileMap);
     resp.setCacheHint(cacheHint);
 
     IArrayOf<IEspQuerySetQuery> queries;
@@ -1253,10 +1286,13 @@ bool CWsWorkunitsEx::onWUListQueries(IEspContext &context, IEspWUListQueriesRequ
     ForEach(*it)
     {
         IPropertyTree &query=it->query();
+        const char *queryId = query.queryProp("@id");
+        const char *queryTarget = query.queryProp("@querySetId");
+
         Owned<IEspQuerySetQuery> q = createQuerySetQuery();
-        q->setId(query.queryProp("@id"));
+        q->setId(queryId);
+        q->setQuerySetId(queryTarget);
         q->setName(query.queryProp("@name"));
-        q->setQuerySetId(query.queryProp("@querySetId"));
         q->setDll(query.queryProp("@dll"));
         q->setWuid(query.queryProp("@wuid"));
         q->setActivated(query.getPropBool("@activated", false));
@@ -1304,7 +1340,7 @@ bool CWsWorkunitsEx::onWUListQueries(IEspContext &context, IEspWUListQueriesRequ
         queries.append(*q.getClear());
     }
     resp.setQuerysetQueries(queries);
-    resp.setNumberOfQueries(numberOfQueries);
+    resp.setNumberOfQueries(queries.length());
 
     return true;
 }

+ 203 - 189
esp/services/ws_workunits/ws_workunitsService.cpp

@@ -1886,122 +1886,183 @@ void doWUQueryWithSort(IEspContext &context, IEspWUQueryRequest & req, IEspWUQue
 }
 
 void doWUQueryFromArchive(IEspContext &context, const char* sashaServerIP, unsigned sashaServerPort,
-       ArchivedWuCache &archivedWuCache, int cacheTime, IEspWUQueryRequest & req, IEspWUQueryResponse & resp)
-{
-    SecAccessFlags accessOwn;
-    SecAccessFlags accessOthers;
-    getUserWuAccessFlags(context, accessOwn, accessOthers, true);
+       ArchivedWuCache &archivedWuCache, unsigned cacheMinutes, IEspWUQueryRequest & req, IEspWUQueryResponse & resp)
+{
+    class CArchivedWUsReader : public CInterface, implements IArchivedWUsReader
+    {
+        IEspContext& context;
+        IEspWUQueryRequest& req;
+        unsigned pageFrom, pageSize;
+        StringAttr sashaServerIP;
+        unsigned sashaServerPort;
+        unsigned cacheMinutes;
+        StringBuffer filterStr;
+        ArchivedWuCache& archivedWuCache;
+        unsigned numberOfWUsReturned;
+        bool hasMoreWU;
+
+        void readDateFilters(StringBuffer& from, StringBuffer& to)
+        {
+            CDateTime timeFrom, timeTo;
+            if(notEmpty(req.getEndDate()))
+                timeTo.setString(req.getEndDate(), NULL, true);
+            else
+                timeTo.setNow();
 
-    __int64 pageSize = req.getPageSize();
-    if(pageSize < 1)
-        pageSize=100;
-    __int64 displayStart = req.getPageStartFrom();
-    __int64 displayEnd = displayStart + pageSize;
-    unsigned dateLimit = 0;
-    bool hasNextPage = true;
+            unsigned year, month, day, hour, minute, second, nano;
+            timeTo.getDate(year, month, day, true);
+            timeTo.getTime(hour, minute, second, nano, true);
+            to.setf("%4d%02d%02d%02d%02d", year, month, day, hour, minute);
 
-    SocketEndpoint ep;
-    if (sashaServerIP && *sashaServerIP)
-        ep.set(sashaServerIP, sashaServerPort);
-    else
-        getSashaNode(ep);
+            if(!notEmpty(req.getStartDate()))
+                return;
 
-    Owned<INode> sashaserver = createINode(ep);
+            timeFrom.setString(req.getStartDate(), NULL, true);
+            if (timeFrom >= timeTo)
+                return;
 
-    CDateTime wuTimeFrom, wuTimeTo;
-    if(notEmpty(req.getEndDate()))
-        wuTimeTo.setString(req.getEndDate(), NULL, true);
-    else
-        wuTimeTo.setNow();
+            unsigned year0, month0, day0, hour0, minute0, second0, nano0;
+            timeFrom.getDate(year0, month0, day0, true);
+            timeFrom.getTime(hour0, minute0, second0, nano0, true);
+            from.setf("%4d%02d%02d%02d%02d", year0, month0, day0, hour0, minute0);
 
-    if(notEmpty(req.getStartDate()))
-    {
-        wuTimeFrom.setString(req.getStartDate(), NULL, true);
-        dateLimit = 1;
-    }
+            return;
+        }
 
-    IArrayOf<IEspECLWorkunit> results;
+        bool addToFilterString(const char *name, const char *value)
+        {
+            if (isEmpty(name) || isEmpty(value))
+                return false;
+            if (filterStr.length())
+                filterStr.append(';');
+            filterStr.append(name).append("=").append(value);
+            return true;
+        }
 
-    StringBuffer filter;
-    addToQueryString(filter, "cluster", req.getCluster(), ';');
-    addToQueryString(filter, "owner", req.getOwner(), ';');
-    addToQueryString(filter, "jobName", req.getJobname(), ';');
-    addToQueryString(filter, "state", req.getState(), ';');
-    StringBuffer s;
-    if (!req.getLastNDays_isNull() && req.getLastNDays()>0)
-        addToQueryString(filter, "LastNDays", s.clear().append(req.getLastNDays()).str(), ';');
-    else
-    {
-        addToQueryString(filter, "wuTimeFrom", req.getStartDate(), ';');
-        addToQueryString(filter, "wuTimeTo", req.getEndDate(), ';');
-    }
-    addToQueryString(filter, "displayStart", s.append(displayStart).str(), ';');
-    addToQueryString(filter, "pageSize", s.clear().append(pageSize).str(), ';');
+        bool addToFilterString(const char *name, unsigned value)
+        {
+            if (isEmpty(name))
+                return false;
+            if (filterStr.length())
+                filterStr.append(';');
+            filterStr.append(name).append("=").append(value);
+            return true;
+        }
 
-    Owned<ArchivedWuCacheElement> found = archivedWuCache.lookup(context, filter, "AddWhenAvailable", cacheTime);
-    if (found)
-    {
-        hasNextPage = found->m_hasNextPage;
-        if (found->m_results.length())
+        void setFilterString()
         {
-            ForEachItemIn(ai, found->m_results)
+            addToFilterString("cluster", req.getCluster());
+            addToFilterString("owner", req.getOwner());
+            addToFilterString("jobName", req.getJobname());
+            addToFilterString("state", req.getState());
+            addToFilterString("timeFrom", req.getStartDate());
+            addToFilterString("timeTo", req.getEndDate());
+            addToFilterString("pageStart", pageFrom);
+            addToFilterString("pageSize", pageSize);
+            if (sashaServerIP && *sashaServerIP)
             {
-                Owned<IEspECLWorkunit> info= createECLWorkunit("","");
-                info->copy(found->m_results.item(ai));
-                results.append(*info.getClear());
+                addToFilterString("sashaServerIP", sashaServerIP.get());
+                addToFilterString("sashaServerPort", sashaServerPort);
             }
         }
-    }
-    else
-    {
-        IArrayOf<IEspECLWorkunit> resultList;
 
-        CDateTime timeTo = wuTimeTo;
-        __int64 totalWus = 0;
-        bool complete = false;
-        while (!complete)
-        {
-            CDateTime timeFrom = timeTo;
-            timeFrom.adjustTime(-1439); //one day earlier
-            if (dateLimit > 0 && wuTimeFrom > timeFrom)
-                timeFrom = wuTimeFrom;
+        void setSashaCommand(INode* sashaserver, ISashaCommand* cmd)
+        {
+            cmd->setAction(SCA_LIST);
+            cmd->setOutputFormat("owner,jobname,cluster,state");
+            cmd->setOnline(false);
+            cmd->setArchived(true);
+            cmd->setStart(pageFrom);
+            cmd->setLimit(pageSize+1); //read an extra WU to check hasMoreWU
+            if (notEmpty(req.getCluster()))
+                cmd->setCluster(req.getCluster());
+            if (notEmpty(req.getOwner()))
+                cmd->setOwner(req.getOwner());
+            if (notEmpty(req.getJobname()))
+                cmd->setJobName(req.getJobname());
+            if (notEmpty(req.getState()))
+                cmd->setState(req.getState());
+
+            StringBuffer timeFrom, timeTo;
+            readDateFilters(timeFrom, timeTo);
+            if (timeFrom.length())
+                cmd->setAfter(timeFrom.str());
+            if (timeTo.length())
+                cmd->setBefore(timeTo.str());
+
+            return;
+        }
+
+        void addArchivedWU(IArrayOf<IEspECLWorkunit>& archivedWUs, StringArray& wuDataArray, bool canAccess)
+        {
+            Owned<IEspECLWorkunit> info= createECLWorkunit("","");
+            const char* wuid = wuDataArray.item(0);
+            info->setWuid(wuid);
+            if (!canAccess)
+                info->setState("<Hidden>");
+            else
+            {
+                if (notEmpty(wuDataArray.item(1)))
+                    info->setOwner(wuDataArray.item(1));
+                if (notEmpty(wuDataArray.item(2)))
+                    info->setJobname(wuDataArray.item(2));
+                if (notEmpty(wuDataArray.item(3)))
+                    info->setCluster(wuDataArray.item(3));
+                if (notEmpty(wuDataArray.item(4)))
+                    info->setState(wuDataArray.item(4));
+            }
 
-            unsigned year0, month0, day0, hour0, minute0, second0, nano0;
-            timeFrom.getDate(year0, month0, day0, true);
-            timeFrom.getTime(hour0, minute0, second0, nano0, true);
-            VStringBuffer wuFrom("%4d%02d%02d%02d%02d", year0, month0, day0, hour0, minute0);
+            //Sort WUs by WUID
+            ForEachItemIn(i, archivedWUs)
+            {
+                IEspECLWorkunit& w = archivedWUs.item(i);
+                if (!isEmpty(w.getWuid()) && strcmp(wuid, w.getWuid())>0)
+                {
+                    archivedWUs.add(*info.getClear(), (aindex_t) i);
+                    return;
+                }
+            }
+            archivedWUs.append(*info.getClear());
+            return;
+        }
 
-            unsigned year, month, day, hour, minute, second, nano;
-            timeTo.getDate(year, month, day, true);
-            timeTo.getTime(hour, minute, second, nano, true);
-            VStringBuffer wuTo("%4d%02d%02d%02d%02d", year, month, day, hour, minute);
+    public:
+        IMPLEMENT_IINTERFACE_USING(CInterface);
 
-            __int64 begin = 0;
-            unsigned limit = 1000;
-            bool continueSashaLoop = true;
-            while (continueSashaLoop)
-            {
-                Owned<ISashaCommand> cmd = createSashaCommand();
+        CArchivedWUsReader(IEspContext& _context, const char* _sashaServerIP, unsigned _sashaServerPort, ArchivedWuCache& _archivedWuCache,
+            unsigned _cacheMinutes, unsigned _pageFrom, unsigned _pageSize, IEspWUQueryRequest& _req)
+            : context(_context), sashaServerIP(_sashaServerIP), sashaServerPort(_sashaServerPort),
+            archivedWuCache(_archivedWuCache), cacheMinutes(_cacheMinutes), pageFrom(_pageFrom), pageSize(_pageSize), req(_req)
+        {
+            hasMoreWU = false;
+            numberOfWUsReturned = 0;
+        };
 
-                cmd->setAction(SCA_LIST);
-                cmd->setOnline(false);
-                cmd->setArchived(true);
-                cmd->setAfter(wuFrom.str());
-                cmd->setBefore(wuTo.str());
-                cmd->setStart((unsigned)begin);
-                cmd->setLimit(limit);
-
-                if (notEmpty(req.getCluster()))
-                    cmd->setCluster(req.getCluster());
-                if (notEmpty(req.getOwner()))
-                    cmd->setOwner(req.getOwner());
-                if (notEmpty(req.getJobname()))
-                    cmd->setJobName(req.getJobname());
-                if (notEmpty(req.getState()))
-                    cmd->setState(req.getState());
-
-                cmd->setOutputFormat("owner,jobname,cluster,state");
+        void getArchivedWUs(IArrayOf<IEspECLWorkunit>& archivedWUs)
+        {
+            setFilterString();
+            Owned<ArchivedWuCacheElement> cachedResults = archivedWuCache.lookup(context, filterStr, "AddWhenAvailable", cacheMinutes);
+            if (cachedResults)
+            {
+                hasMoreWU = cachedResults->m_hasNextPage;
+                numberOfWUsReturned = cachedResults->numWUsReturned;
+                if (cachedResults->m_results.length())
+                {
+                    ForEachItemIn(ai, cachedResults->m_results)
+                        archivedWUs.append(*LINK(&cachedResults->m_results.item(ai)));
+                }
+            }
+            else
+            {
+                SocketEndpoint ep;
+                if (sashaServerIP && *sashaServerIP)
+                    ep.set(sashaServerIP, sashaServerPort);
+                else
+                    getSashaNode(ep);
+                Owned<INode> sashaserver = createINode(ep);
 
+                Owned<ISashaCommand> cmd = createSashaCommand();
+                setSashaCommand(sashaserver, cmd);
                 if (!cmd->send(sashaserver))
                 {
                     StringBuffer msg("Cannot connect to archive server at ");
@@ -2009,115 +2070,68 @@ void doWUQueryFromArchive(IEspContext &context, const char* sashaServerIP, unsig
                     throw MakeStringException(ECLWATCH_CANNOT_CONNECT_ARCHIVE_SERVER, "%s", msg.str());
                 }
 
-                unsigned actualCount = cmd->numIds();
-                if (actualCount < 1)
-                    break;
-
-                totalWus += actualCount;
-
-                if (actualCount < limit)
-                    continueSashaLoop = false;
+                numberOfWUsReturned = cmd->numIds();
+                hasMoreWU = (numberOfWUsReturned > pageSize);
+                if (hasMoreWU)
+                    numberOfWUsReturned--;
 
-                for (unsigned ii=0; ii<actualCount; ii++)
+                if (numberOfWUsReturned > 0)
                 {
-                    const char *csline = cmd->queryId(ii);
-                    if (!csline)
-                        continue;
+                    SecAccessFlags accessOwn, accessOthers;
+                    getUserWuAccessFlags(context, accessOwn, accessOthers, true);
 
-                    StringArray wuidArray;
-                    wuidArray.appendList(csline, ",");
-
-                    if (chooseWuAccessFlagsByOwnership(context.queryUserId(), cmd->queryOwner(), accessOwn, accessOthers) < SecAccess_Read)
-                        continue;
-
-                    const char* wuid = wuidArray.item(0);
-                    if (isEmpty(wuid))
-                        continue;
-
-                    __int64 addToPos = -1;
-                    ForEachItemIn(ridx, resultList)
+                    for (unsigned i=0; i<numberOfWUsReturned; i++)
                     {
-                        IEspECLWorkunit& w = resultList.item(ridx);
-                        if (isEmpty(w.getWuid()))
+                        const char *csline = cmd->queryId(i);
+                        if (!csline || !*csline)
                             continue;
 
-                        if (strcmp(wuid, w.getWuid())>0)
+                        StringArray wuDataArray;
+                        wuDataArray.appendList(csline, ",");
+
+                        const char* wuid = wuDataArray.item(0);
+                        if (isEmpty(wuid))
                         {
-                            addToPos = ridx;
-                            break;
+                            WARNLOG("Empty WUID in SCA_LIST response");
+                            continue;
                         }
+
+                        addArchivedWU(archivedWUs, wuDataArray, chooseWuAccessFlagsByOwnership(context.queryUserId(), wuDataArray.item(1), accessOwn, accessOthers) >= SecAccess_Read);
                     }
 
-                    if (addToPos < 0 && (ridx > displayEnd))
-                        continue;
-
-                    Owned<IEspECLWorkunit> info= createECLWorkunit("","");
-                    info->setWuid(wuid);
-                    info->setArchived(true);
-                    if (notEmpty(wuidArray.item(1)))
-                          info->setOwner(wuidArray.item(1));
-                    if (notEmpty(wuidArray.item(2)))
-                        info->setJobname(wuidArray.item(2));
-                    if (notEmpty(wuidArray.item(3)))
-                          info->setCluster(wuidArray.item(3));
-                    if (notEmpty(wuidArray.item(4)))
-                          info->setState(wuidArray.item(4));
-
-                    if (addToPos < 0)
-                        resultList.append(*info.getClear());
-                    else
-                        resultList.add(*info.getClear(), (aindex_t) addToPos);
-                    if (resultList.length() > displayEnd)
-                        resultList.pop();
+                    archivedWuCache.add(filterStr, "AddWhenAvailable", hasMoreWU, numberOfWUsReturned, archivedWUs);
                 }
-
-                begin += limit;
-            }
-
-            timeTo.adjustTime(-1440);//one day earlier
-            if (dateLimit > 0 && wuTimeFrom > timeTo) //we reach the date limit
-            {
-                if (totalWus <= displayEnd)
-                    hasNextPage = false;
-                complete = true;
             }
-            else if ( resultList.length() >= displayEnd) //we have all we need
-                complete = true;
-        }
-
-        if (displayEnd > resultList.length())
-            displayEnd = resultList.length();
-
-        for (aindex_t i = (aindex_t)displayStart; i < (aindex_t)displayEnd; i++)
-        {
-            Owned<IEspECLWorkunit> info = createECLWorkunit("","");
-            info->copy(resultList.item(i));
-            results.append(*info.getClear());
-        }
+            return;
+        };
 
-        archivedWuCache.add(filter, "AddWhenAvailable", hasNextPage, results);
-    }
+        bool getHasMoreWU() { return hasMoreWU; };
+        unsigned getNumberOfWUsReturned() { return numberOfWUsReturned; };
+    };
 
-    resp.setPageStartFrom(displayStart+1);
-    resp.setPageEndAt(displayEnd);
+    unsigned pageStart = (unsigned) req.getPageStartFrom();
+    unsigned pageSize = (unsigned) req.getPageSize();
+    if(pageSize < 1)
+        pageSize=500;
+    IArrayOf<IEspECLWorkunit> archivedWUs;
+    Owned<IArchivedWUsReader> archiveWUsReader = new CArchivedWUsReader(context, sashaServerIP, sashaServerPort, archivedWuCache,
+        cacheMinutes, pageStart, pageSize, req);
+    archiveWUsReader->getArchivedWUs(archivedWUs);
 
-    if(dateLimit < 1 || hasNextPage)
-        resp.setNextPage(displayStart + pageSize);
-    else
-        resp.setNextPage(-1);
+    resp.setWorkunits(archivedWUs);
+    resp.setNumWUs(archiveWUsReader->getNumberOfWUsReturned());
 
-    if(displayStart > 0)
-    {
+    resp.setType("archived only");
+    resp.setPageSize(pageSize);
+    resp.setPageStartFrom(pageStart+1);
+    resp.setPageEndAt(pageStart + archiveWUsReader->getNumberOfWUsReturned());
+    if(pageStart > 0)
+    { //This is not the first page;
         resp.setFirst(false);
-        if (displayStart - pageSize > 0)
-            resp.setPrevPage(displayStart - pageSize);
-        else
-            resp.setPrevPage(0);
+        resp.setPrevPage((pageStart > pageSize) ? pageStart - pageSize: 0);
     }
-
-    resp.setPageSize(pageSize);
-    resp.setWorkunits(results);
-    resp.setType("archived only");
+    if (archiveWUsReader->getHasMoreWU())
+        resp.setNextPage(pageStart + pageSize);
     return;
 }
 

+ 1 - 0
esp/services/ws_workunits/ws_workunitsService.hpp

@@ -134,6 +134,7 @@ public:
         CriticalBlock b(crit);
     }
     IPropertyTreeIterator *findQueriesUsingFile(const char *target, const char *lfn);
+    IPropertyTreeIterator *findAllQueriesUsingFile(const char *lfn);
     StringBuffer &toStr(StringBuffer &s)
     {
         CriticalBlock b(crit);

+ 1 - 1
esp/src/eclwatch/QuerySetSuperFilesWidget.js

@@ -67,7 +67,7 @@ define([
                 columns: {
                     col1: selector({
                         width: 27,
-                        selectorType: 'checkbox',
+                        selectorType: 'checkbox'
                     }),
                     __hpcc_display: tree({
                         label: this.i18n.SuperFiles,

+ 2 - 1
esp/src/eclwatch/dojoConfig.js

@@ -39,7 +39,8 @@ var dojoConfig = (function () {
         },
         packages: [{
             name: "d3",
-            location: urlInfo.basePath + "/d3"
+            location: urlInfo.basePath + "/d3",
+            main:"d3"
         }, {
             name: "topojson",
             location: urlInfo.basePath + "/topojson"

+ 3 - 1
esp/src/eclwatch/nls/bs/hpcc.js

@@ -47,6 +47,7 @@
     ClusterProcesses: "Procesi Na Klasteru",
     Code: "Kod",
     Col: "Kolona",
+    CollapseAll: "Suzite sve",
     Command: "Komanda",
     Comment: "Komentar",
     Completed: "Kompletiran",
@@ -127,6 +128,7 @@
     EventText: "Opis Događaja",
     EventTextPH: "Tekst O Događaju",
     Exception: "Neočekivani Problem",
+    ExpandAll: "Proširite sve",
     FailIfNoSourceFile: "Neuspjeh Ukoliko Datoteka Ne Postoji",
     FetchingData: "U Procesu Dobavljnja Podataka...",
     fetchingresults: "dobijeni resultati",
@@ -170,7 +172,6 @@
     History: "Istorija",
     HPCCSystems: "HPCC Systems",
     ID: "Identifikator",
-    Import: "Uvoz",
     Inactive: "Neaktivan",
     Index: "Indeks",
     Info: "Informacija",
@@ -264,6 +265,7 @@
     OpenInNewPageNoFrame: "Otvorite na Novoj Stranici",
     OpenLegacyECLWatch: "Otvorite Stari Prevaziđeni ECL Watch",
     OpenSafeMode: "Otvorite (sigurni mod)",
+    OpenTreeMode: "Otvorite (kao drvo)",
     OpenSource: "Otvoreni Kod",
     Operations: "Operacije",
     Options: "Opcije",

+ 12 - 8
esp/src/eclwatch/nls/es/hpcc.js

@@ -37,7 +37,7 @@ define(
     Bottom: "Abajo",
     Cancel: "Cancelar",
     Clear: "Borrar",
-    Clear: "Clear",
+    Clear: "Despejar",
     ClearPermissionsCache: "Despejar Los Permisos Del Cache",
     ClearPermissionsCacheConfirm: "Esta seguro que desea despejar los permisos del Cache de DALI y ESP? Workunits en executo pueden demorarse asta que los Caches se actualicen",
     Clone: "Clonar",
@@ -48,6 +48,7 @@ define(
     ClusterProcesses: "Procesos de sistema",
     Code: "Código",
     Col: "Columna",
+    CollapseAll: "Colapsar Todo",
     Command: "Comando",
     Comment: "Comentario",
     Completed: "Completado",
@@ -126,7 +127,8 @@ define(
     EventScheduler: "Planificador de eventos",
     EventText: "Texto del Evento",
     EventTextPH: "Texto del evento",
-    Exception: "Exception",
+    Exception: "Excepción",
+    ExpandAll: "Expandir Todo",
     FailIfNoSourceFile: "Falla si no hay archivo de origen",
     FetchingData: "Obteniendo Data...",
     fetchingresults: "obteniendo resultados",
@@ -170,7 +172,6 @@ define(
     History: "Historia",
     HPCCSystems: "HPCC Systems",
     ID: "ID",
-    Import: "Importar",
     Inactive: "Inactivo",
     Index: "Indice",
     Info: "información ",
@@ -264,6 +265,7 @@ define(
     OpenLegacyECLWatch: "Abrir ECL Watch viejo",
     OpenSafeMode: "Abrir (modo seguro)",
     OpenSource: "Código Abierto",
+    OpenTreeMode: "Abrir (vista en arbol)",
     Operations: "Operaciones",
     Options: "Opciones",
     Outputs: "Salidas",
@@ -296,12 +298,13 @@ define(
     PlaceholderFirstName: "John",
     PlaceholderLastName: "Smith",
     Playground: "Patio de juegos",
-    Port: "Port",
+    Port: "Puerto",
     Prefix: "Prefijo",
     PrefixPlaceholder: "filename{:length}, filesize{:[B|L][1-8]}",
     Preview: "Presentación Preliminar",
     Priority: "Prioridad",
     Process: "Proceso",
+    ProcessFilter: "Filtro de Procesos",
     ProgressMessage: "Mensaje de progreso",
     Properties: "Propiedades",
     Property: "Propiedad",
@@ -326,6 +329,7 @@ define(
     ReleaseNotes: "Notas de la versión",
     Reload: "Recargar",
     Remaining: "Restante",
+    RemoteCopy: "Copia Remota",
     RemoteDali: "Dali remoto",
     RemoteDaliIP: "Remote&nbsp;Dali&nbsp;IP&nbsp;Address",
     Remove: "Remover",
@@ -384,14 +388,14 @@ define(
     Started: "Comenzado",
     State: "Estado",
     Stopped: "Detenido",
-    Subgraph: "Subgraph",
+    Subgraph: "Subgrafo",
     Subgraphs: "Sub-gráficos",
     Submit: "Enviar",
-    Subtype: "Subtype",
+    Subtype: "Subtipo",
     Summary: "Resumen",
     SummaryMessage: "Mensaje resumido",
-    SuperFile: "Super File",
     Superfile: "Superfile",
+    SuperFile: "Super File",
     SuperFiles: "Superfiles",
     SuperfilesOnly: "Solo Superfiles",
     Suspend: "Suspender",
@@ -477,8 +481,8 @@ define(
     User: "Usuario",
     UserDetails: "Detalles del Usuario",
     UserID: "ID del Usuarion",
-    Username: "NombreDeUsario",
     UserName: "Nombre de Usario",
+    Username: "NombreDeUsario",
     UserPermissions: "Permisos del Usuario",
     Users: "Usuarios",
     UseSingleConnection: "Use una sola conexión",

+ 1 - 1
esp/src/eclwatch/nls/hpcc.js

@@ -174,7 +174,6 @@ define({root:
     History: "History",
     HPCCSystems: "HPCC Systems",
     ID: "ID",
-    Import: "Import",
     Inactive: "Inactive",
     Index: "Index",
     Info: "Info",
@@ -336,6 +335,7 @@ define({root:
     ReleaseNotes: "Release Notes",
     Reload: "Reload",
     Remaining: "Remaining",
+    RemoteCopy: "Remote Copy",
     RemoteDali: "Remote Dali",
     RemoteDaliIP: "Remote Dali IP Address",
     Remove: "Remove",

+ 3 - 1
esp/src/eclwatch/nls/hr/hpcc.js

@@ -47,6 +47,7 @@
     ClusterProcesses: "Procesi Na Klasteru",
     Code: "Kod",
     Col: "Kolona",
+    CollapseAll: "Suzite sve",
     Command: "Komanda",
     Comment: "Komentar",
     Completed: "Kompletiran",
@@ -127,6 +128,7 @@
     EventText: "Opis Događaja",
     EventTextPH: "Tekst O Događaju",
     Exception: "Neočekivani Problem",
+    ExpandAll: "Proširite sve",
     FailIfNoSourceFile: "Neuspjeh Ukoliko Datoteka Ne Postoji",
     FetchingData: "U Procesu Dobavljnja Podataka...",
     fetchingresults: "dobijeni resultati",
@@ -170,7 +172,6 @@
     History: "Istorija",
     HPCCSystems: "HPCC Systems",
     ID: "Identifikator",
-    Import: "Uvoz",
     Inactive: "Neaktivan",
     Index: "Indeks",
     Info: "Informacija",
@@ -264,6 +265,7 @@
     OpenInNewPageNoFrame: "Otvorite na Novoj Stranici",
     OpenLegacyECLWatch: "Otvorite Stari Prevaziđeni ECL Watch",
     OpenSafeMode: "Otvorite (sigurni mod)",
+    OpenTreeMode: "Otvorite (kao stablo)",
     OpenSource: "Otvoreni Kod",
     Operations: "Operacije",
     Options: "Opcije",

+ 9 - 7
esp/src/eclwatch/nls/hu/hpcc.js

@@ -47,6 +47,7 @@ define(
     ClusterProcesses: "Klaszter feldolgozások",
     Code: "Kód",
     Col: "Oszlop",
+    CollapseAll: "Összes bezárása",
     Command: "Parancs",
     Comment: "Megjegyzés",
     Completed: "Kész",
@@ -118,7 +119,7 @@ define(
     Errors: "Hibák",
     ErrorsStatus: "Hibák/Figyelmeztetések",
     ErrorWarnings: "Hiba/Figyelmeztetés",
-    Escape: "Escape",
+    Escape: "Megszakít",
     ESPBuildVersion: "ESP Build verzió",
     ESPNetworkAddress: "ESP hálózati címe",
     EventName: "Esemény neve",
@@ -127,6 +128,7 @@ define(
     EventText: "Esemény szövege",
     EventTextPH: "Esemény szövege",
     Exception: "Kivétel",
+    ExpandAll: "Összes kinyitása",
     FailIfNoSourceFile: "Hibajelzés, ha nincs bemeneti file",
     FetchingData: "Adat lekérdezés...",
     fetchingresults: "eredemény lekérdezés",
@@ -170,7 +172,6 @@ define(
     History: "Történet",
     HPCCSystems: "HPCC Systems",
     ID: "Azonosító",
-    Import: "Import",
     Inactive: "Inaktív",
     Index: "Index",
     Info: "Információ",
@@ -182,8 +183,8 @@ define(
     IsLibrary: "Könyvtár?",
     Jobname: "Munka azonosító",
     JobName: "Munka azonosító",
-    jsmi: "jsmi*",
-    JSmith: "JSmit*",
+    jsmi: "jkis*",
+    JSmith: "JKis*",
     JSON: "JSON",
     Label: "Címke",
     LandingZone: "Lerakat",
@@ -213,7 +214,7 @@ define(
     LogicalFilesOnly: "Csak a logikai fájlokat",
     LogicalFileType: "Logikai fájl típusa",
     LogicalName: "Logikai név",
-    log_analysis_1: "log_analysis_1*",
+    log_analysis_1: "log_elemzés_1*",
     Low: "Alacsony",
     ManualCopy: "Használja a Ctr+C-t",
     Mappings: "Leképezések",
@@ -266,6 +267,7 @@ define(
     OpenLegacyECLWatch: "Legacy ECL Watch megnyitása",
     OpenSafeMode: "Megnyitás (biztonságos módban)",
     OpenSource: "Nyílt forráskódú",
+    OpenTreeMode: "Nyitva (fa nézetben)",
     Operations: "Műveletek",
     Options: "Opciók",
     Outputs: "Kimenet",
@@ -294,8 +296,8 @@ define(
     PermissionName: "Hozzáférési jog megnevezése",
     Permissions: "Jogosultságok",
     PlaceholderFindText: "Wuid, felhasználók, egyebek...",
-    PlaceholderFirstName: "John",
-    PlaceholderLastName: "Smith",
+    PlaceholderFirstName: "József",
+    PlaceholderLastName: "Kiss",
     Playground: "ECL teszt labor",
     Port: "Port",
     Prefix: "Előtag",

+ 3 - 1
esp/src/eclwatch/nls/sr/hpcc.js

@@ -48,6 +48,7 @@
     ClusterProcesses: "Процеси На Кластеру",
     Code: "Код",
     Col: "Колона",
+    CollapseAll: "Сузите све",
     Command: "Команда",
     Comment: "Коментар",
     Completed: "Комплетиран",
@@ -127,6 +128,7 @@
     EventText: "Опис Догађаја",
     EventTextPH: "Текст О Догађају",
     Exception: "Неочекивани Проблем",
+    ExpandAll: "Проширите све",
     FailIfNoSourceFile: "Неуспех Уколико Датотека Не Постоји",
     FetchingData: "У Процесу Добављња Података...",
     fetchingresults: "добијени ресултати",
@@ -170,7 +172,6 @@
     History: "Историја",
     HPCCSystems: "ХПЦЦ Системс",
     ID: "Идентификатор",
-    Import: "Увоз",
     Inactive: "Неактиван",
     Index: "Индекс",
     Info: "Информација",
@@ -264,6 +265,7 @@
     OpenInNewPageNoFrame: "Отворите на Новој Страници",
     OpenLegacyECLWatch: "Отворите Стари Превазиђени ЕЦЛ Вoч",
     OpenSafeMode: "Отворите (сигурни мод)",
+    OpenTreeMode: "Отворите (као дрво)",
     OpenSource: "Отворени Код",
     Operations: "Операције",
     Options: "Опције",

+ 3 - 1
esp/src/eclwatch/nls/zh/hpcc.js

@@ -47,6 +47,7 @@
     ClusterProcesses: "计算机集群进程",
     Code: "代码",
     Col: "列",
+    CollapseAll: "隐藏",
     Command: "指令",
     Comment: "注释",
     Completed: "完成",
@@ -126,6 +127,7 @@
     EventText: "事件内容",
     EventTextPH: "事件文字",
     Exception: "异常",
+    ExpandAll: "打开",
     FailIfNoSourceFile: "在无源文件时失效",
     FetchingData: "提取数据...",
     fetchingresults: "提取结果",
@@ -169,7 +171,6 @@
     History: "历史",
     HPCCSystems: "HPCC系统",
     ID: "标识",
-    Import: "输入",
     Inactive: "未激活",
     Index: "索引",
     Info: "信息",
@@ -263,6 +264,7 @@
     OpenInNewPageNoFrame: "在无框的新页面里打开",
     OpenLegacyECLWatch: "打开旧版本的ECLWatch",
     OpenSafeMode: "打开 (安全模式)",
+    OpenTreeMode: "打开 (树状模式)",
     OpenSource: "开源",
     Operations: "操作",
     Options: "选项",

+ 2 - 0
esp/src/eclwatch/package.js

@@ -4,6 +4,8 @@ var profile = (function(){
             "hpcc/eclwatch.profile": true,
             "hpcc/eclwatch.json": true,
             "hpcc/dojoConfig": true,
+            "hpcc/viz/DojoD3WordCloud": true,
+            "hpcc/viz/d3-cloud/d3.layout.cloud": true,
             "hpcc/viz/map/us.json": true,
             "hpcc/viz/map/us_counties.json": true
         };

+ 2 - 2
esp/src/eclwatch/templates/DFUQueryWidget.html

@@ -9,7 +9,7 @@
                     <div id="${id}Delete" data-dojo-attach-event="onClick:_onDelete" data-dojo-type="dijit.form.Button">${i18n.Delete}</div>
                     <span data-dojo-type="dijit.ToolbarSeparator"></span>
                     <div id="${id}ImportDropDown" data-dojo-type="dijit.form.DropDownButton">
-                        <span>${i18n.Import}</span>
+                        <span>${i18n.RemoteCopy}</span>
                         <div data-dojo-type="dijit.TooltipDialog">
                             <div id="${id}ImportForm" style="width: 460px;" onsubmit="return false;" data-dojo-props="region: 'bottom'" data-dojo-type="dijit.form.Form">
                                 <div data-dojo-type="dijit.Fieldset">
@@ -40,7 +40,7 @@
                                     </div>
                                 </div>
                                 <div class="dijitDialogPaneActionBar">
-                                    <button type="submit" data-dojo-attach-event="onClick:_onImportOk" data-dojo-type="dijit.form.Button">${i18n.Import}</button>
+                                    <button type="submit" data-dojo-attach-event="onClick:_onImportOk" data-dojo-type="dijit.form.Button">${i18n.Submit}</button>
                                 </div>
                             </div>
                         </div>

+ 5 - 2
esp/xslt/wsecl3_form.xsl

@@ -1235,8 +1235,11 @@ function switchInputForm()
             </xsl:variable>
             <!-- use tristate true/false/default -->
 
-            <xsl:text disable-output-escaping="yes"><![CDATA[<input class='tributton' type='text' readonly='1' size='6' onkeypress='onTriButtonKeyPress(this)' onClick='onClickTriButton(this, 1)' name=']]></xsl:text>
-            <xsl:value-of select="$fieldId"/>
+            <xsl:text disable-output-escaping="yes"><![CDATA[<input class='tributton' type='text' readonly='1' size='6' onkeypress='onTriButtonKeyPress(this)' onClick='onClickTriButton(this, 1)']]></xsl:text>
+            <xsl:if test="$checkval!='default'">
+              <xsl:text disable-output-escaping="yes"><![CDATA[ name=']]></xsl:text>
+              <xsl:value-of select="$fieldId"/>
+            </xsl:if>
             <xsl:text disable-output-escaping="yes"><![CDATA[' id=']]></xsl:text>
             <xsl:value-of select="$fieldId"/>
             <xsl:text disable-output-escaping="yes"><![CDATA[' value=']]></xsl:text>

+ 22 - 1
initfiles/bash/etc/init.d/hpcc-init.in

@@ -45,7 +45,6 @@
 ###<REPLACE>###
 
 
-
 function print_usage {
     echo "Usage: $0 [-c component] {start|stop|restart|status|setup}
        $0 [--componentlist] Display node component name list.
@@ -312,6 +311,8 @@ if [ ! -z "${compDafilesrv}" ];then
             /etc/init.d/dafilesrv status 1>/dev/null 2>/dev/null
             if [ $? -ne 0 ];then
               /etc/init.d/dafilesrv $1 2>/dev/null
+            else
+              echo "Dependent service dafilesrv, ${compDafilesrv} is already running."
             fi
             ;;
         status|setup)
@@ -360,6 +361,11 @@ if [ ${cmd} = "restart" ] && [ "${isComp}" -eq 0 ]; then
     echo "Starting the entire System"
     echo "***************************************************"
     cmd=start
+    /etc/init.d/dafilesrv status 1>/dev/null 2>/dev/null
+    if [ $? -ne 0 ];then
+        /etc/init.d/dafilesrv $1 2>/dev/null
+    fi
+
     statForStart=0
     for (( i=0; i<=${compListLen}; i++ ));do
         if [ -z "${compList[$i]}" ];then
@@ -416,4 +422,19 @@ for C in ${component} ; do
         STATUS=$(( $STATUS == 0 ? $statForEach : $STATUS))
     fi
 done 
+
+
+if [ "$cmd" = "stop" ] && [ -n "$compDafilesrv" ]
+then
+     echo 
+    /etc/init.d/dafilesrv status 1>/dev/null 2>/dev/null
+    if [ $? -ne 0 ];then
+       echo "Service dafilesrv, ${compDafilesrv} is already stopped.".
+    else
+       echo "Service dafilesrv, ${compDafilesrv} is still running".
+       echo "To stop it, run \"service dafilesrv stop\"."
+    fi
+    echo 
+
+fi
 exit ${STATUS}

+ 1 - 1
roxie/ccd/ccdlistener.cpp

@@ -627,7 +627,7 @@ public:
         afor.For(activeChildren.ordinality()+(isMaster ? 0 : 1), 10);
         activeChildren.kill();
         if (mergedReply)
-            toXML(mergedReply, reply);
+            toXML(mergedReply, reply, 0, (mergeType == CascadeMergeQueries) ? XML_Format | XML_NewlinesOnly : XML_Format);
         if (logctx.queryTraceLevel() > 5)
             logctx.CTXLOG("doControlQuery (%d) finished: %.80s", isMaster, queryText);
     }

+ 1 - 1
system/jlib/jfile.cpp

@@ -2847,7 +2847,7 @@ void copyFile(const char *target, const char *source, size32_t buffersize, ICopy
 
 void copyFile(IFile * target, IFile * source, size32_t buffersize, ICopyFileProgress *progress, CFflags copyFlags)
 {
-    source->copyTo(target, buffersize, progress, copyFlags);
+    source->copyTo(target, buffersize, progress, false, copyFlags);
 }
 
 void doCopyFile(IFile * target, IFile * source, size32_t buffersize, ICopyFileProgress *progress, ICopyFileIntercept *copyintercept, bool usetmp, CFflags copyFlags)

+ 9 - 8
system/jlib/jptree.cpp

@@ -5243,7 +5243,7 @@ static void _toXML(const IPropertyTree *tree, IIOStream &out, unsigned indent, b
             {
                 if (first)
                 {
-                    if (flags & XML_Format) inlinebody = false;
+                    if (flags & XML_Format|XML_NewlinesOnly) inlinebody = false;
                     first = false;
                     writeCharToStream(out, ' ');
                 }
@@ -5291,7 +5291,7 @@ static void _toXML(const IPropertyTree *tree, IIOStream &out, unsigned indent, b
     bool empty;
     if (isBinary)
     {
-        if (flags & XML_Format) inlinebody = false;
+        if (flags & XML_Format|XML_NewlinesOnly) inlinebody = false;
         writeStringToStream(out, " xsi:type=\"SOAP-ENC:base64\"");
         empty = (!tree->getPropBin(NULL, thislevelbin))||(thislevelbin.length()==0);
     }
@@ -5308,11 +5308,11 @@ static void _toXML(const IPropertyTree *tree, IIOStream &out, unsigned indent, b
     }
     if (sub->first())
     {
-        if (flags & XML_Format) inlinebody = false;
+        if (flags & XML_Format|XML_NewlinesOnly) inlinebody = false;
     }
     else if (empty && !(flags & XML_Sanitize))
     {
-        if (flags & XML_Format)
+        if (flags & XML_Format|XML_NewlinesOnly)
             writeStringToStream(out, "/>\n");
         else
             writeStringToStream(out, "/>");
@@ -5367,7 +5367,8 @@ static void _toXML(const IPropertyTree *tree, IIOStream &out, unsigned indent, b
             else
                 JBASE64_Encode(thislevelbin.toByteArray(), thislevelbin.length(), out);
         }
-        else {
+        else
+        {
             if (flags & XML_NoEncode)
             {
                 writeStringToStream(out, thislevel);
@@ -5388,7 +5389,7 @@ static void _toXML(const IPropertyTree *tree, IIOStream &out, unsigned indent, b
                     encodeXML(s, out, ENCODE_WHITESPACE, (unsigned)-1, true);
                 }
             }
-            
+
             if (!inlinebody)
                 writeStringToStream(out, "\n");
         }
@@ -5398,7 +5399,7 @@ static void _toXML(const IPropertyTree *tree, IIOStream &out, unsigned indent, b
 
     writeStringToStream(out, "</");
     writeStringToStream(out, name);
-    if (flags & XML_Format)
+    if (flags & XML_Format|XML_NewlinesOnly)
         writeStringToStream(out, ">\n");
     else
         writeCharToStream(out, '>');
@@ -5585,7 +5586,7 @@ static void _toJSON(const IPropertyTree *tree, IIOStream &out, unsigned indent,
             isNull = (NULL == (thislevel = tree->queryProp(NULL)));
     }
 
-    if (isNull && !complex)
+    if (isNull && !root && !complex)
     {
         writeJSONValueToStream(out, NULL, delimit);
         return;

+ 1 - 0
system/jlib/jptree.hpp

@@ -213,6 +213,7 @@ jlib_decl IPropertyTree *createPTreeFromJSONString(unsigned len, const char *jso
 #define XML_SanitizeAttributeValues 0x10
 #define XML_SingleQuoteAttributeValues 0x20
 #define XML_NoBinaryEncode64 0x40
+#define XML_NewlinesOnly 0x80
 
 jlib_decl StringBuffer &toXML(const IPropertyTree *tree, StringBuffer &ret, unsigned indent = 0, byte flags=XML_Format);
 jlib_decl void toXML(const IPropertyTree *tree, IIOStream &out, unsigned indent = 0, byte flags=XML_Format);

+ 1 - 1
system/jlib/jthread.cpp

@@ -1290,7 +1290,7 @@ public:
         }
     }
 
-    bool run(const char *_title,const char *prog,const char *dir,bool hasinput,bool hasoutput,bool haserror, size32_t stderrbufsize)
+    bool run(const char *_title,const char *prog,const char *dir,bool hasinput,bool hasoutput,bool haserror, size32_t stderrbufsize,bool newProcessGroup)
     {
         // size32_t stderrbufsize ignored as not required (I think)
         CriticalBlock block(sect);

+ 1 - 2
testing/regress/ecl-test

@@ -18,8 +18,6 @@
     limitations under the License.
 ############################################################################ */
 '''
-
-import argparse
 import logging
 import os
 import platform
@@ -27,6 +25,7 @@ import atexit
 import traceback
 import glob
 
+from hpcc.util import argparse
 from hpcc.regression.regress import Regression
 from hpcc.util.ecl.file import ECLFile
 from hpcc.util.util import checkPqParam,  getVersionNumbers,  checkXParam

Plik diff jest za duży
+ 2361 - 0
testing/regress/hpcc/util/argparse.py


+ 7 - 0
testing/regress/hpcc/util/ecl/command.py

@@ -50,6 +50,13 @@ class ECLcmd(Shell):
 
         if cmd == 'publish':
             args.append(eclfile.getArchive())
+
+            name = kwargs.pop('name', False)
+            if not name:
+                name = eclfile.getBaseEclName()
+
+            args.append("--name=" + name)
+
         else:
             args.append('--noroot')
             server = kwargs.pop('server', False)

+ 3 - 0
testing/regress/hpcc/util/ecl/file.py

@@ -126,6 +126,9 @@ class ECLFile:
     def getBaseEcl(self):
         return self.ecl
 
+    def getBaseEclName(self):
+        return self.basename
+
     def getWuid(self):
         return self.wuid