瀏覽代碼

Merge branch 'candidate-5.4.0'

Signed-off-by: Richard Chapman <rchapman@hpccsystems.com>
Richard Chapman 10 年之前
父節點
當前提交
1345c32095
共有 41 個文件被更改,包括 1426 次插入449 次删除
  1. 10 2
      CMakeLists.txt
  2. 2 9
      dali/base/dasds.cpp
  3. 3 15
      dali/base/dasds.ipp
  4. 37 10
      docs/ECLLanguageReference/ECLR_mods/RecordStructure.xml
  5. 122 2
      docs/ECLLanguageReference/ECLR_mods/Recrd-DATASET.xml
  6. 2 3
      ecl/eclcc/eclcc.cpp
  7. 3 1
      ecl/eclcc/reservedwords.cpp
  8. 39 0
      ecl/hql/hqlexpr.cpp
  9. 8 4
      ecl/hqlcpp/hqlsource.cpp
  10. 38 3
      esp/src/eclwatch/WUDetailsWidget.js
  11. 48 1
      esp/src/eclwatch/nls/bs/hpcc.js
  12. 48 1
      esp/src/eclwatch/nls/hr/hpcc.js
  13. 46 0
      esp/src/eclwatch/nls/sr/hpcc.js
  14. 1 0
      esp/src/eclwatch/templates/WUDetailsWidget.html
  15. 68 56
      initfiles/bash/etc/init.d/hpcc-init.in
  16. 53 111
      initfiles/bash/etc/init.d/hpcc_common.in
  17. 16 0
      initfiles/bash/etc/init.d/init-functions
  18. 8 16
      initfiles/bash/etc/init.d/lock.sh
  19. 15 6
      initfiles/bash/etc/init.d/pid.sh
  20. 15 1
      initfiles/bin/init_configesp
  21. 22 5
      initfiles/bin/init_dafilesrv.in
  22. 16 0
      initfiles/bin/init_dali
  23. 15 1
      initfiles/bin/init_dfuserver
  24. 16 1
      initfiles/bin/init_eclagent.in
  25. 9 0
      initfiles/bin/init_eclccserver
  26. 15 0
      initfiles/bin/init_eclscheduler
  27. 11 1
      initfiles/bin/init_esp
  28. 1 1
      initfiles/bin/init_ftslave
  29. 26 5
      initfiles/bin/init_roxie
  30. 14 0
      initfiles/bin/init_sasha
  31. 43 41
      initfiles/bin/init_thor
  32. 17 15
      initfiles/bin/init_thorslave
  33. 30 32
      initfiles/componentfiles/thor/start_backupnode.in
  34. 41 18
      roxie/ccd/ccdserver.cpp
  35. 25 10
      rtl/eclrtl/eclrtl.cpp
  36. 23 29
      system/jlib/jptree.ipp
  37. 44 49
      system/jlib/jthread.cpp
  38. 40 0
      testing/regress/ecl/issue13588.ecl
  39. 6 0
      testing/regress/ecl/key/issue13588.xml
  40. 378 0
      testing/regress/ecl/key/realformat.xml
  41. 52 0
      testing/regress/ecl/realformat.ecl

+ 10 - 2
CMakeLists.txt

@@ -138,9 +138,15 @@ else()
     set(CPACK_PACKAGE_NAME "hpccsystems-clienttools-${majorver}.${minorver}")
     set(PACKAGE_FILE_NAME_PREFIX  "hpccsystems-clienttools-${projname}")
 endif()
+
+set (VER_SEPARATOR "-")
+if ("${stagever}" MATCHES "^rc[0-9]+$")
+    set (VER_SEPARATOR "~")
+endif()
+
 SET(CPACK_PACKAGE_VERSION_MAJOR ${majorver})
 SET(CPACK_PACKAGE_VERSION_MINOR ${minorver})
-SET(CPACK_PACKAGE_VERSION_PATCH ${point}-${stagever})
+SET(CPACK_PACKAGE_VERSION_PATCH ${point}${VER_SEPARATOR}${stagever})
 set ( CPACK_PACKAGE_CONTACT "HPCCSystems <ossdevelopment@lexisnexis.com>" )
 set( CPACK_SOURCE_GENERATOR TGZ )
 
@@ -158,6 +164,8 @@ set(CPACK_SYSTEM_NAME "${CMAKE_SYSTEM_NAME}-${CPACK_RPM_PACKAGE_ARCHITECTURE}")
 if ("${CMAKE_BUILD_TYPE}" STREQUAL "Release")
     set(CPACK_STRIP_FILES TRUE)
 endif()
+
+
 if ( APPLE )
 elseif ( UNIX )
     EXECUTE_PROCESS (
@@ -195,7 +203,7 @@ elseif ( UNIX )
     endif ()
 endif ()
 MESSAGE ( "-- Current release version is ${CPACK_PACKAGE_FILE_NAME}" )
-set( CPACK_SOURCE_PACKAGE_FILE_NAME "${PACKAGE_FILE_NAME_PREFIX}_${CPACK_RPM_PACKAGE_VERSION}-${version}" )
+set( CPACK_SOURCE_PACKAGE_FILE_NAME "${PACKAGE_FILE_NAME_PREFIX}_${CPACK_RPM_PACKAGE_VERSION}-${stagever}" )
 set( CPACK_SOURCE_GENERATOR TGZ )
 set( CPACK_SOURCE_IGNORE_FILES
         "~$"

+ 2 - 9
dali/base/dasds.cpp

@@ -2353,15 +2353,13 @@ CRemoteTreeBase *CRemoteTreeBase::createChild(int pos, const char *childName)
 static CheckedCriticalSection suppressedOrphanUnlockCrit; // to temporarily suppress unlockall
 static bool suppressedOrphanUnlock=false;
 
-#ifdef __64BIT__
-#pragma pack(push,1)    // 64bit pack CServerRemoteTree's    (could do for 32bit also)
-#endif
-
 #if defined(new)
 #define __old_new new
 #undef new
 #endif
 
+//Do not override the packing for this class - otherwise the fixed size allocator will allocate
+//misaligned objects, which can cause problems on some architectures (especially for atomic operations)
 class CServerRemoteTree : public CRemoteTreeBase
 {
     DECL_NAMEDCOUNT;
@@ -2819,11 +2817,6 @@ public:
 #endif
 
 
-#ifdef __64BIT__
-#pragma pack(pop)   // 64bit pack CServerRemoteTree's    (could do for 32bit also)
-#endif
-
-
 void populateWithServerIds(IPropertyTree *matchParent, CRemoteTreeBase *parent)
 {
     matchParent->setPropInt64("@serverId", parent->queryServerId());

+ 3 - 15
dali/base/dasds.ipp

@@ -105,12 +105,10 @@ interface ITrackChanges
     virtual void registerPropAppend(size32_t l) = 0;
 };
 
-class ChangeInfo : public CInterface, implements IInterface
+class ChangeInfo : public CInterfaceOf<IInterface>
 {
     DECL_NAMEDCOUNT;
 public:
-    IMPLEMENT_IINTERFACE;
-
     ChangeInfo(IPropertyTree &_owner) : owner(&_owner) { INIT_NAMEDCOUNT; tree.setown(createPTree(RESERVED_CHANGE_NODE)); }
     const IPropertyTree *queryOwner() const { return owner; }
     const void *queryFindParam() const { return &owner; }
@@ -193,7 +191,7 @@ private: // data
 
 ///////////////////
 
-class CPTStack : public CInterface, public IArrayOf<PTree>
+class CPTStack : public IArrayOf<PTree>
 {
     bool _fill(IPropertyTree &root, const char *xpath, IPropertyTree &tail);
 public:
@@ -227,10 +225,6 @@ class CBranchChange;
 ///////////////////
 class CSubscriberContainerList;
 
-#ifdef __64BIT__
-#pragma pack(push,1)    // 64bit pack CRemoteTree's  (could probably do for 32bit also)
-#endif
-
 class CRemoteTreeBase : public PTree
 {
 public:
@@ -267,10 +261,6 @@ protected: // data
     __int64 serverId;
 };
 
-#ifdef __64BIT__
-#pragma pack(pop)   
-#endif
-
 class CTrackChanges
 {
 public:
@@ -581,7 +571,7 @@ public:
     virtual unsigned queryConnections() { return connections.ordinality(); }
 };
 
-class CXPathIterator : public CInterface, implements IPropertyTreeIterator
+class CXPathIterator : public CInterfaceOf<IPropertyTreeIterator>
 {
     DECL_NAMEDCOUNT;
     IPropertyTree *root;
@@ -593,8 +583,6 @@ class CXPathIterator : public CInterface, implements IPropertyTreeIterator
     IPTIteratorCodes flags;
     bool validateServerIds;
 public:
-    IMPLEMENT_IINTERFACE;
-
     CXPathIterator(IPropertyTree *_root, IPropertyTree *_matchTree, IPTIteratorCodes _flags) : root(_root), matchTree(_matchTree), flags(_flags)
     {
         INIT_NAMEDCOUNT;

+ 37 - 10
docs/ECLLanguageReference/ECLR_mods/RecordStructure.xml

@@ -82,7 +82,7 @@
           <emphasis>#OPTION(maxLength,####)</emphasis> to change the default).
           The maximum record size should be set as conservatively as possible,
           and is better set on a per-field basis (see the <emphasis
-          role="bold">Field Modifiers</emphasis>section below).</entry>
+          role="bold">Field Modifiers </emphasis>section below).</entry>
         </row>
 
         <row>
@@ -598,11 +598,11 @@ END;</programlisting>
             XPATH(</emphasis>'<emphasis>tag</emphasis>'<emphasis role="bold">)
             }</emphasis></entry>
 
-            <entry>Specifies the XML <emphasis>tag</emphasis> that contains
-            the data, in a RECORD structure that defines XML data. This
-            overrides the default <emphasis>tag</emphasis> name (the lowercase
-            field <emphasis>identifier</emphasis>). See the <emphasis
-            role="bold">XPATH Support</emphasis> section below for
+            <entry>Specifies the XML or JSON <emphasis>tag</emphasis> that
+            contains the data, in a RECORD structure that defines XML or JSON
+            data. This overrides the default <emphasis>tag</emphasis> name
+            (the lowercase field <emphasis>identifier</emphasis>). See the
+            <emphasis role="bold">XPATH Support</emphasis> section below for
             details.</entry>
           </row>
 
@@ -739,7 +739,7 @@ END;</programlisting>
     multiple times, you must use the ordinal operation (for example,
     /foo[1]/bar) to explicit select the first occurrence.</para>
 
-    <para>For XML DATASET reading and processing results of the
+    <para>For XML or JSON DATASETs reading and processing results of the
     SOAPCALL<indexterm>
         <primary>SOAPCALL</primary>
       </indexterm> function, the following XPATH syntax is specifically
@@ -819,12 +819,12 @@ SET OF STRING Npeople{xpath('Name')};
 SET OF STRING Xpeople{xpath('/Name/@id')};
     //matches: &lt;Name id='Kevin'/&gt;&lt;Name id='Richard'/&gt;</programlisting>
 
-    <para>For writing XML files using OUTPUT, the rules are similar with the
-    following exceptions:</para>
+    <para>For writing XML or JSON files using OUTPUT, the rules are similar
+    with the following exceptions:</para>
 
     <itemizedlist>
       <listitem>
-        <para>For scalar fields, simple tag names and XML attributes are
+        <para>For scalar fields, simple tag names and XML/JSON attributes are
         supported.</para>
       </listitem>
 
@@ -1017,6 +1017,33 @@ OUTPUT(ds,,'~RTTEST::XMLtest2',
      &lt;/RECORDS&gt;
  */</programlisting>
 
+    <para>XPATH can also be used to define a JSON file</para>
+
+    <programlisting>/* a JSON  file called "MyBooks.json" contains this data:
+[
+  {
+    "id" : "978-0641723445",
+    "name" : "The Lightning Thief",
+    "author" : "Rick Riordan"
+  }
+,
+  {
+    "id" : "978-1423103349",
+    "name" : "The Sea of Monsters",
+    "author" : "Rick Riordan"
+  }
+]
+*/
+
+BookRec := RECORD
+  STRING ID {XPATH('id')}; //data from id tag -- renames field to uppercase
+  STRING title {XPATH('name')}; //data from name tag, renaming the field
+  STRING author; //data from author tag, tag name is lowercase and matches field name  
+END;
+
+books := DATASET('~jd::mybooks.json',BookRec,JSON('/'));
+OUTPUT(books);</programlisting>
+
     <para>See Also: <link linkend="DATASET">DATASET</link>, <link
     linkend="DICTIONARY">DICTIONARY</link>, <link
     linkend="INDEX_record_structure">INDEX</link>, <link

+ 122 - 2
docs/ECLLanguageReference/ECLR_mods/Recrd-DATASET.xml

@@ -96,7 +96,7 @@
 
           <entry>One of the following keywords, optionally followed by
           relevant options for that specific type of file: THOR /FLAT, CSV,
-          XML, PIPE. Each of these is discussed in its own section,
+          XML, JSON, PIPE. Each of these is discussed in its own section,
           below.</entry>
         </row>
 
@@ -302,7 +302,7 @@
 
   <para>The first two forms are alternatives to each other and either may be
   used with any of the <emphasis>filetypes</emphasis> described below
-  (<emphasis role="bold">THOR/FLAT, CSV, XML, PIPE</emphasis>).</para>
+  (<emphasis role="bold">THOR/FLAT, CSV, XML, JSON, PIPE</emphasis>).</para>
 
   <para>The third form defines the result of an OUTPUT with the NAMED option
   within the same workunit or the workunit specified by the
@@ -819,6 +819,126 @@ END;
 books := DATASET('MyFile',rform,XML('library/book'));</programlisting>
   </sect2>
 
+  <sect2 id="JSON_Files">
+    <title>JSON Files</title>
+
+    <para><emphasis> attr</emphasis><emphasis role="bold"> :=
+    DATASET(</emphasis><emphasis> file, struct, </emphasis><emphasis
+    role="bold">JSON<indexterm>
+        <primary>JSON</primary>
+      </indexterm>( </emphasis><emphasis>xpath</emphasis><emphasis
+    role="bold"> [, NOROOT<indexterm>
+        <primary>NOROOT</primary>
+      </indexterm> ] ) [,ENCRYPT<indexterm>
+        <primary>ENCRYPT</primary>
+      </indexterm>(</emphasis><emphasis>key</emphasis><emphasis role="bold">)
+    ]);</emphasis></para>
+
+    <informaltable colsep="1" frame="all" rowsep="1">
+      <tgroup cols="2">
+        <colspec align="left" colwidth="122.40pt" />
+
+        <colspec />
+
+        <tbody>
+          <row>
+            <entry><emphasis role="bold">JSON</emphasis></entry>
+
+            <entry>Specifies the <emphasis>file</emphasis> is a JSON
+            file.</entry>
+          </row>
+
+          <row>
+            <entry><emphasis>xpath</emphasis></entry>
+
+            <entry>A string constant containing the full XPATH to the tag that
+            delimits the records in the <emphasis>file</emphasis>.</entry>
+          </row>
+
+          <row>
+            <entry><emphasis role="bold">NOROOT</emphasis></entry>
+
+            <entry>Specifies the <emphasis>file</emphasis> is a JSON file with
+            no root level markup, only a collection of objects.</entry>
+          </row>
+
+          <row>
+            <entry><emphasis role="bold"><emphasis
+            role="bold">ENCRYPT</emphasis></emphasis></entry>
+
+            <entry>Optional. Specifies the <emphasis>file</emphasis> was
+            created by OUTPUT with the ENCRYPT option.</entry>
+          </row>
+
+          <row>
+            <entry><emphasis>key</emphasis></entry>
+
+            <entry>A string constant containing the encryption key used to
+            create the file.</entry>
+          </row>
+        </tbody>
+      </tgroup>
+    </informaltable>
+
+    <para>This form is used to read a JSON file. The
+    <emphasis>xpath</emphasis> parameter defines the path used to locate
+    records within the JSON content using a subset of standard XPATH
+    (<emphasis role="underline">www.w3.org/TR/xpath</emphasis>) syntax (see
+    the <emphasis role="bold">XPATH Support</emphasis> section under the
+    RECORD structure discussion for a description of the supported
+    subset).</para>
+
+    <para>The key to getting individual field values from the JSON lies in the
+    RECORD structure<indexterm>
+        <primary>RECORD structure</primary>
+      </indexterm> field definitions. If the field name exactly matches a
+    lower case JSON tag containing the data, then nothing special is required.
+    Otherwise, <emphasis>{xpath(xpathtag)} </emphasis>appended to the field
+    name (where the <emphasis>xpathtag</emphasis> is a string constant
+    containing standard XPATH syntax) is required to extract the data. An
+    XPATH consisting of empty quotes ('') indicates the field receives the
+    entire record. An absolute XPATH is used to access properties of child
+    elements. Because JSON is case sensitive, and ECL identifiers are case
+    insensitive, xpaths need to be specified if the tag contains any upper
+    case characters.</para>
+
+    <para><emphasis role="bold">NOTE:</emphasis> JSON reading and parsing can
+    consume a large amount of memory, depending on the usage. In particular,
+    if the specified xpath matches a very large amount of data, then a large
+    data structure will be provided to the transform. Therefore, the more you
+    match, the more resources you consume per match. For example, if you have
+    a very large document and you match an element near the root that
+    virtually encompasses the whole thing, then the whole thing will be
+    constructed as a referenceable structure that the ECL can get at.</para>
+
+    <para><emphasis role="bold">Example:</emphasis></para>
+
+    <programlisting>/* a JSON  file called "MyBooks.json" contains this data:
+[
+  {
+    "id" : "978-0641723445",
+    "name" : "The Lightning Thief",
+    "author" : "Rick Riordan"
+  }
+,
+  {
+    "id" : "978-1423103349",
+    "name" : "The Sea of Monsters",
+    "author" : "Rick Riordan"
+  }
+]
+*/
+
+BookRec := RECORD
+  STRING ID {XPATH('id')}; //data from id tag -- renames field to uppercase
+  STRING title {XPATH('name')}; //data from name tag, renaming the field
+  STRING author; //data from author tag -- tag name is lowercase and matches field name  
+END;
+
+books := DATASET('~jd::mybooks.json',BookRec,JSON('/'));
+OUTPUT(books);</programlisting>
+  </sect2>
+
   <sect2 id="PIPE_Files">
     <title>PIPE Files<indexterm>
         <primary>PIPE Files</primary>

+ 2 - 3
ecl/eclcc/eclcc.cpp

@@ -436,11 +436,10 @@ static int doMain(int argc, const char *argv[])
     if (!processor.parseCommandLineOptions(argc, argv))
         return 1;
 
-    if (processor.printKeywordsToXml())
-        return 0;
-
     try
     {
+        if (processor.printKeywordsToXml())
+            return 0;
         if (!processor.processFiles())
             return 2;
     }

+ 3 - 1
ecl/eclcc/reservedwords.cpp

@@ -575,8 +575,10 @@ void printKeywordsToXml()
      buffer.append("</xml>\n");
 
      Owned<IFile> treeFile = createIFile("ECLKeywords.xml");
+     assertex(treeFile);
      Owned<IFileIO> io = treeFile->open(IFOcreaterw);
+     assertex(io);
      Owned<IIOStream> out = createIOStream(io);
-
+     assertex(out);
      out->write(buffer.length(), buffer.str());
 }

+ 39 - 0
ecl/hql/hqlexpr.cpp

@@ -12342,6 +12342,34 @@ IHqlExpression * ensureActiveRow(IHqlExpression * expr)
     return createRow(no_activerow, LINK(expr->queryNormalizedSelector()));
 }
 
+static void ensureSerialized(HqlExprArray & assigns, IHqlExpression *transform, IHqlExpression * srcRecord, IHqlExpression * tgtRecord, IAtom * serialForm)
+{
+    OwnedHqlExpr childSelf = createSelector(no_self, tgtRecord, NULL);
+    ForEachChild(i, srcRecord)
+    {
+        IHqlExpression * src = srcRecord->queryChild(i);
+        IHqlExpression * tgt = tgtRecord->queryChild(i);
+        dbgassertex(src->getOperator() == tgt->getOperator());
+        switch (src->getOperator())
+        {
+        case no_field:
+            {
+                OwnedHqlExpr  match = getExtractSelect(transform, src, false);
+                assertex(match);
+                OwnedHqlExpr lhs = createSelectExpr(LINK(childSelf), LINK(tgt));
+                assigns.append(*createAssign(lhs.getClear(), ensureSerialized(match, serialForm)));
+                break;
+            }
+        case no_record:
+            ensureSerialized(assigns, transform, src, tgt, serialForm);
+            break;
+        case no_ifblock:
+            ensureSerialized(assigns, transform, src->queryChild(1), tgt->queryChild(1), serialForm);
+            break;
+        }
+    }
+}
+
 IHqlExpression * ensureSerialized(IHqlExpression * expr, IAtom * serialForm)
 {
     ITypeInfo * type = expr->queryType();
@@ -12349,6 +12377,17 @@ IHqlExpression * ensureSerialized(IHqlExpression * expr, IAtom * serialForm)
     if (type == serialType)
         return LINK(expr);
 
+    if (expr->getOperator() == no_createrow)
+    {
+        IHqlExpression * serialRecord = queryRecord(serialType);
+        IHqlExpression * exprRecord = expr->queryRecord();
+        IHqlExpression * transform = expr->queryChild(0);
+        HqlExprArray assigns;
+        ensureSerialized(assigns, transform, exprRecord, serialRecord, serialForm);
+        OwnedHqlExpr newTransform = createValue(no_transform, makeTransformType(serialRecord->getType()), assigns);
+        return createRow(no_createrow, newTransform.getClear());
+    }
+
     HqlExprArray args;
     args.append(*LINK(expr));
     args.append(*createAttribute(serialForm));

+ 8 - 4
ecl/hqlcpp/hqlsource.cpp

@@ -410,9 +410,13 @@ static void createPhysicalLogicalAssigns(HqlExprArray & assigns, IHqlExpression
                 {
                     IHqlExpression * curPhysical = nextDiskField(diskRecord, diskIndex);
                     OwnedHqlExpr physicalSelect = createSelectExpr(LINK(diskDataset), LINK(curPhysical));
-                    if (cur->isDatarow() && !cur->hasAttribute(blobAtom) && !isInPayload())
+                    if (cur->isDatarow() && !cur->hasAttribute(blobAtom) && (!isInPayload() || (physicalSelect->queryType() != target->queryType())))
                     {
-                        createPhysicalLogicalAssigns(assigns, target, curPhysical->queryRecord(), cur->queryRecord(), physicalSelect, allowTranslate, NotFound);
+                        HqlExprArray subassigns;
+                        OwnedHqlExpr childSelf = createSelector(no_self, cur, NULL);
+                        createPhysicalLogicalAssigns(subassigns, childSelf, curPhysical->queryRecord(), cur->queryRecord(), physicalSelect, false, NotFound);
+                        OwnedHqlExpr transform = createValue(no_transform, makeTransformType(cur->queryRecord()->getType()), subassigns);
+                        newValue.setown(createRow(no_createrow, transform.getClear()));
                     }
                     else
                         newValue.setown(convertIndexPhysical2LogicalValue(cur, physicalSelect, allowTranslate));
@@ -500,10 +504,10 @@ static IHqlExpression * createPhysicalIndexRecord(HqlMapTransformer & mapper, IH
             {
                 //This should support other non serialized formats.  E.g., link counted strings. 
                 //Simplest would be to move getSerializedForm code + call that first.
-                if (cur->hasAttribute(_linkCounted_Atom))
+                if (cur->hasAttribute(_linkCounted_Atom) || cur->isDatarow())
                 {
                     newField = getSerializedForm(cur, diskAtom);
-                    assertex(newField != cur);
+                    assertex(newField != cur || cur->isDatarow());
                 }
                 else
                 {

+ 38 - 3
esp/src/eclwatch/WUDetailsWidget.js

@@ -54,6 +54,7 @@ define([
     "dijit/form/Button",
     "dijit/form/DropDownButton",
     "dijit/form/ValidationTextBox",
+    "dijit/form/Select",
     "dijit/Toolbar",
     "dijit/ToolbarSeparator",
     "dijit/TooltipDialog",
@@ -114,6 +115,8 @@ define([
             this.zapDescription = registry.byId(this.id + "ZapDescription");
             this.warnHistory = registry.byId(this.id + "WarnHistory");
             this.warnTimings = registry.byId(this.id + "WarnTimings");
+            this.clusters = registry.byId(this.id + "Clusters");
+            this.allowedClusters = registry.byId(this.id + "AllowedClusters");
 
             this.infoGridWidget = registry.byId(this.id + "InfoContainer");
             this.zapDialog = registry.byId(this.id + "ZapDialog");
@@ -141,10 +144,12 @@ define([
             var protectedCheckbox = registry.byId(this.id + "Protected");
             var context = this;
             this.wu.update({
-                Scope: dom.byId(context.id + "Scope").value,
-                Description: dom.byId(context.id + "Description").value,
+                State: dom.byId(this.id + "State").innerHTML,
                 Jobname: dom.byId(context.id + "Jobname").value,
-                Protected: protectedCheckbox.get("value")
+                Description: dom.byId(context.id + "Description").value,
+                Protected: protectedCheckbox.get("value"),
+                Scope: dom.byId(context.id + "Scope").value,
+                ClusterSelection: this.allowedClusters.get("value")
             }, null);
         },
         _onRestore: function (event) {
@@ -236,6 +241,7 @@ define([
                 this.wu.refresh();
             }
             this.infoGridWidget.init(params);
+            this.checkIfClustersAllowed();
         },
 
         initTab: function () {
@@ -320,6 +326,35 @@ define([
             return text;
         },
 
+        checkIfClustersAllowed: function () {
+            var context = this;
+            WsWorkunits.WUInfo({
+                request: {
+                    Wuid: this.wu.Wuid
+                }
+            }).then(function (response) {
+                if (lang.exists("WUInfoResponse.Workunit.AllowedClusters.AllowedCluster", response)) {
+                    var targetData = response.WUInfoResponse.Workunit.AllowedClusters.AllowedCluster;
+                    if (targetData.length >= 1) {
+                        context.allowedClusters.options.push({
+                            label: "&nbsp;",
+                            value: ""
+                        });
+                        for (var i = 0; i < targetData.length; ++i) {
+                            context.allowedClusters.options.push({
+                                label: targetData[i],
+                                value: targetData[i]
+                            });
+                        }
+                        context.allowedClusters.set("value", "")
+                        domClass.add(context.id + "Cluster", "hidden");
+                    } else {
+                        domClass.add(context.id + "AllowedClusters", "hidden");
+                    }
+                }
+            });
+        },
+
         updateInput: function (name, oldValue, newValue) {
             var registryNode = registry.byId(this.id + name);
             if (registryNode) {

+ 48 - 1
esp/src/eclwatch/nls/bs/hpcc.js

@@ -33,6 +33,7 @@
     ArchivedOnly: "Samo Arhiviran",
     ArchivedWarning: "Upozorenje: koristite kratak vremenski period. Ako koristite duži vremenski period, pretraživanje radnih jedinica može trajati duže od dozviljenog vremena za pretraživanje .",
     AutoRefresh: "Osvježi",
+    Back: "Nazad",
     BannerColor: "Boja Reklamnog Bloka",
     BannerMessage: "Poruka za Reklamni Blok",
     BannerScroll: "Kretanje Reklamnog Bloka",
@@ -59,11 +60,14 @@
     Command: "Komanda",
     Comment: "Komentar",
     Completed: "Kompletiran",
+    ComplexityWarning: "Više od praga {threshold} aktivnosti ({activityCount}) - zaustavite prikaz podataka?",
     Component: "Komponenta",
     Compress: "Sabijte",
     Compressed: "Komprimirani",
+    CompressedFileSize: "Komprimirana Veličina Datoteke",
     Configuration: "Konfiguracija",
     ConfirmPassword: "Potvrdite Lozinku",
+    ConfirmRemoval: "Jeste li sigurni da to želite učiniti?",
     Content: "Sadržaj",
     Contents: "Sadržaji",
     ContentType: "Vrsta Sadržaja",
@@ -78,12 +82,14 @@
     Debug: "Otklonite Neispravnosti",
     DEF: "DEF",
     Delete: "Obrišite",
+    Deleted: "Izbrisan",
     DeleteSelectedFiles: "Obrišite Odabrane Datoteke?",
     DeleteSelectedGroups: "Obrišite odabranu(e) grupu(e)?",
     DeleteSelectedPermissions: "Obrišite Odabrane Dozvole Za Pristup",
     DeleteSelectedQueries: "Obrišite Odabrane Zahtjeve?",
     DeleteSelectedUsers: "Obrišite Odabrane Korisnike?",
     DeleteSelectedWorkunits: "Obrišite Odabrane Radne Jedinice?",
+    DeleteSuperfile2: "Izbrišite Super Datoteku",
     DeleteSuperfile: "Obrišite Superdatoteke?",
     DeleteThisPackage: "Obrišite ovaj paket?",
     Delimited: "Razgraničen",
@@ -92,17 +98,20 @@
     DenyRead: "<center>Zabranite<br>Čitanje</center>",
     DenyWrite: "<center>Zabranite<br>Pisanje</center>",
     Depth: "Dubina",
+    DepthTooltip: "'Najveća Dubina Podgrafa",
     Deschedule: "Izbacite Is Reda Za Izvršavanje",
     DescheduleSelectedWorkunits: "Izbacite Odabrane Radne Jedinice Is Reda Za Izvršavanje?",
     Description: "Opis",
     Despray: "Ponovo Objedinite Datoteku",
     Details: "Detalji",
     DFUServerName: "Ime DFU Servera",
+    DFUWorkunit: "DFU RadnaJedinica",
     Directory: "Direktorij",
     DisableScopeScanConfirm: "Da li ste sigurni da želite da onemogućite skaniranje opsega? Promjene će biti prihvaćene poslije restarta DALIja",
     DisableScopeScans: "Onemogućite Skaniranje Opsega",
     DiskUsage: "Iskorištenost Diska",
     Distance: "Razdaljina",
+    DistanceTooltip: "Maksimalna Veličina Podgrafa Za Izabranu Aktivnost",
     Dll: "Dll",
     DOT: "DOT",
     DOTAttributes: "DOT Atributi",
@@ -112,6 +121,7 @@
     Duration: "Trajanje",
     EBCDIC: "EBCDIC",
     ECL: "ECL",
+    ECLWorkunit: "ECL RadnaJedinica",
     Edges: "Ivice",
     Edit: "Editujte",
     EditDOT: "Editujte DOT",
@@ -159,11 +169,16 @@
     Find: "Nađite",
     FindNext: "Nađite Slijedeći",
     FindPrevious: "Nađite Prethodni",
+    Finished: "Završen",
     FirstName: "Ime",
+    FirstNRows: "Prvih N Redova",
     Fixed: "Fiksni",
+    Folder: "Fascikla",
     Format: "Format",
+    Forward: "Naprijed",
     FromDate: "Od Datuma",
     FromSizes: "Od Veličine",
+    FromTime: "Od Vremena",
     FullName: "Ime i Prezime",
     Graph: "Graf",
     Graphs: "Grafikoni",
@@ -206,6 +221,8 @@
     LargestSize: "Najveća veličina",
     LastName: "Prezime",
     LastNDays: "Poslijednjih N Dana",
+    LastNHours: "Posljednjih N Sati",
+    LastNRows: "Posljednjih N Redova",
     LDAPWarning: "<b>Greška LDAP Servica:</b>  'Previše korisnika' - Molimo koristite filter.",
     LegacyForm: "Stari Prevaziđeni Formular",
     LibrariesUsed: "Biblioteke u Korištenju",
@@ -220,6 +237,7 @@
     LoadPackageContentHere: "(Dobavite ovamo sadržaj paketa)",
     LoadPackageFromFile: "Dobavite Paket iz Datoteke",
     Local: "Lokalni",
+    Log: "Dnevnik (Log)",
     LogFile: "Datoteka Aktivnosti",
     LoggedInAs: "Prijavljen kao",
     LogicalFile: "Logička Datoteka",
@@ -228,9 +246,12 @@
     LogicalFilesOnly: "Samo Logičke Datoteke",
     LogicalFileType: "Tip Logičke Datoteke",
     LogicalName: "Ime Logičke Datoteke",
+    Logs: "Dnevnici",
     log_analysis_1: "log_analysis_1*",
     Low: "Nizak",
     ManualCopy: "Pritisnite Ctrl+C",
+    ManualOverviewSelection: "Nophodno Je Odabrati Pregled",
+    ManualTreeSelection: "Nophodno Je Odabrati Drvo",
     Mappings: "Mapiranja",
     Mask: "Maska",
     Max: "Maksimum",
@@ -249,6 +270,7 @@
     MonitorShotLimit: "Nadgledajte Shot Limit",
     MonitorSub: "Nadgledajte Sub",
     Month: "Mjesec",
+    More: "Nastavite",
     MustContainUppercaseAndSymbol: "Mora uključiti veliko slovo i simbol",
     NA: "N/A",
     Name: "Ime",
@@ -304,10 +326,12 @@
     PasswordExpired: "Vaša lozinka je istekla I mora biti promijenjena",
     PasswordExpirePostfix: "dan(a). Želite li je sada promijeniti?",
     PasswordExpirePrefix: "Vaša lozinka će isteći za",
+    PasswordOpenZAP: "Unesite Lozinku Za ZAP (opcionalno)",
     PasswordsDoNotMatch: "Pogrešna Lozinka.",
     Path: "Put",
     PathMask: "Maska za Put",
     Pause: "Pauza",
+    PauseNow: "Zaustavite Odmah",
     PctComplete: "% Kompletiran",
     PercentDone: "Procenat Završen",
     PerformingLayout: "Izvršava Layout...",
@@ -317,6 +341,7 @@
     PlaceholderFirstName: "John",
     PlaceholderLastName: "Smith",
     Playground: "Igralište",
+    Plugins: "Dopune",
     Port: "Port",
     Prefix: "Prefiks",
     PrefixPlaceholder: "filename{:length}, filesize{:[B|L][1-8]}",
@@ -337,13 +362,17 @@
     Quarter: "Četvrtina",
     Queries: "Upiti",
     QueriesNoPackage: "Upiti bez odgovarajućeg paketa",
+    Query: "Upit",
     QueryDetailsfor: "Detalji o Upitu",
+    QueryID: "Identifikator Upita",
     QueryIDPlaceholder: "som?q*ry.1",
+    QueryName: "Ime Upita",
     QueryNamePlaceholder: "My?Su?erQ*ry",
     QuerySet: "Kolekcija Upita",
     Queue: "Red (Queue)",
     Quote: "Citat",
     QuotedTerminator: "Završni Karakter",
+    RawTextPage: "Neobrađen Tekst (Tekuća Stranica)",
     RecordCount: "Broj Rekorda",
     RecordLength: "Dužina Rekorda",
     Records: "Rekordi",
@@ -358,6 +387,8 @@
     RemoteDali: "Daleki Dali",
     RemoteDaliIP: "Daleki&nbsp;Dali&nbsp;IP&nbsp;Adresa",
     Remove: "Uklonite",
+    RemoveSubfiles: "Uklonite Pod-Datoteku",
+    RemoveUser: "Uklonite Korisnika",
     Rename: "Preimenujte",
     RenderedSVG: "Donesene SVG",
     RenderSVG: "Donesite SVG",
@@ -365,6 +396,8 @@
     RequestSchema: "Shema Zahtjeva",
     Reschedule: "Ponovo Stavite Na Raspored",
     Reset: "Resetujte",
+    ResetThisQuery: "Resetujte Tekući Upit",
+    ResetViewToSelection: "Resetujte Odabrani Prikaz",
     Resource: "Resurs",
     Resources: "Resursi",
     ResponseSchema: "Shema Odgovora",
@@ -377,6 +410,8 @@
     Resume: "Nastavite",
     RetainSuperfileStructure: "Zadržite Strukturu Superdatoteke",
     RetypePassword: "Ponovite Lozinku",
+    Reverse: "Idite Unazad",
+    RowPath: "Put Do Rekorda",
     Rows: "Redovi",
     RowTag: "Etiketa Reda",
     RoxieCluster: "Roxie Klaster",
@@ -415,6 +450,7 @@
     Start: "Počnite",
     Started: "Počeo",
     State: "Stanje",
+    Stats: "Statistike",
     Status: "Status",
     Stopped: "Zaustavljen",
     Subgraph: "Pod-Graf",
@@ -423,9 +459,11 @@
     Subtype: "Pod-Vrsta",
     Summary: "Kratak Pregled",
     SummaryMessage: "Sažeta Poruka",
-    SuperFile: "Super Datoteka",
     Superfile: "Super Datoteka",
+    SuperFile: "Super Datoteka",
     SuperFiles: "Super Datoteke",
+    Superfiles: "SuperDatoteke",
+    SuperFilesBelongsTo: "Pripadnik Superdatoteke",
     SuperfilesOnly: "Samo Superdatoteke",
     Suspend: "Suspendujte",
     Suspended: "Suspendovan",
@@ -444,6 +482,7 @@
     TargetWuid: "Cilj/Wuid",
     Terminators: "Terminatori",
     TestPages: "Test Stranice",
+    Text: "Tekst",
     ThorMasterAddress: "Adresa Glavnog Thora",
     ThorNetworkAddress: "Netvork Adresa Thora",
     Time: "Vrijeme",
@@ -470,8 +509,11 @@
     title_HPCCPlatformMain: "ECL Watch - Glavna Stranica",
     title_HPCCPlatformOps: "ECL Watch - Upravljnje",
     title_HPCCPlatformRoxie: "ECL Watch - Roxie",
+    title_HPCCPlatformServicesPlugin: "ECL Monitor - Dopune",
     title_Inputs: "Unosi",
     title_LFDetails: "Detalji o Logičkol Datoteci",
+    title_LibrariesUsed: "Biblioteke U Korištenju",
+    title_Log: "Log Fajl",
     title_LZBrowse: "Zona za Pretovar",
     title_MemberOf: "Član Od",
     title_Members: "Članovi",
@@ -485,15 +527,20 @@
     title_Results: "Rezultati",
     title_SearchResults: "Rezultati Pretraživanja",
     title_SourceFiles: "",
+    title_SourceFiles: "Originalni Fajlovi",
+    title_Topology: "Topologija",
     title_TpThorStatus: "Stanje Thora",
     title_UserPermissions: "Korisničke Dozvile za Pristup",
     title_UserQuery: "Prava Pristupa",
     title_WUDetails: "ECL Detalji o Radnoj Jedinici",
     title_WUQuery: "ECL Radne Jedinice",
+    To: "Prema",
     ToDate: "Do Sada",
     Toenablegraphviews: "Da biste mogli vidjeli grafikone, moraćete instalisati Graph View Control plugin",
     Top: "Vrh",
+    Topology: "Topologija",
     ToSizes: "Do Velićina",
+    TotalClusterTime: "Ukupno Vrijeme Klastera",
     TotalSize: "Totalna Veličina",
     TotalThorTime: "Ukupno Vrijeme Thor-a",
     TransitionGuide: "Vodič",

+ 48 - 1
esp/src/eclwatch/nls/hr/hpcc.js

@@ -33,6 +33,7 @@
     ArchivedOnly: "Samo Arhiviran",
     ArchivedWarning: "Upozorenje: koristite kratak vremenski period. Ako koristite duži vremenski period, pretraživanje radnih jedinica može trajati duže od dozviljenog vremena za pretraživanje .",
     AutoRefresh: "Osvježi",
+    Back: "Natrag",
     BannerColor: "Boja Reklamnog Bloka",
     BannerMessage: "Poruka za Reklamni Blok",
     BannerScroll: "Kretanje Reklamnog Bloka",
@@ -59,11 +60,14 @@
     Command: "Komanda",
     Comment: "Komentar",
     Completed: "Kompletiran",
+    ComplexityWarning: "Više od praga {threshold} aktivnosti ({activityCount}) - prekinite prikaz podataka?",
     Component: "Komponenta",
     Compress: "Sabijte",
     Compressed: "Komprimirani",
+    CompressedFileSize: "Komprimirana Veličina Datoteke",
     Configuration: "Konfiguracija",
     ConfirmPassword: "Potvrdite Lozinku",
+    ConfirmRemoval: "Jeste li sigurni da to želite učiniti?",
     Content: "Sadržaj",
     Contents: "Sadržaji",
     ContentType: "Vrsta Sadržaja",
@@ -78,12 +82,14 @@
     Debug: "Otklonite Neispravnosti",
     DEF: "DEF",
     Delete: "Obrišite",
+    Deleted: "Obrisan",
     DeleteSelectedFiles: "Obrišite Odabrane Datoteke?",
     DeleteSelectedGroups: "Obrišite odabranu(e) grupu(e)?",
     DeleteSelectedPermissions: "Obrišite Odabrane Dozvole Za Pristup",
     DeleteSelectedQueries: "Obrišite Odabrane Zahtjeve?",
     DeleteSelectedUsers: "Obrišite Odabrane Korisnike?",
     DeleteSelectedWorkunits: "Obrišite Odabrane Radne Jedinice?",
+    DeleteSuperfile2: "Obrišite Super Datoteku",
     DeleteSuperfile: "Obrišite Superdatoteke?",
     DeleteThisPackage: "Obrišite ovaj paket?",
     Delimited: "Razgraničen",
@@ -92,17 +98,20 @@
     DenyRead: "<center>Zabranite<br>Čitanje</center>",
     DenyWrite: "<center>Zabranite<br>Pisanje</center>",
     Depth: "Dubina",
+    DepthTooltip: "'Najveća Dubina Podgrafa",
     Deschedule: "Izbacite Is Reda Za Izvršavanje",
     DescheduleSelectedWorkunits: "Izbacite Odabrane Radne Jedinice Is Reda Za Izvršavanje?",
     Description: "Opis",
     Despray: "Ponovo Objedinite Datoteku",
     Details: "Detalji",
     DFUServerName: "Ime DFU Servera",
+    DFUWorkunit: "DFU RadnaJedinica",
     Directory: "Direktorij",
     DisableScopeScanConfirm: "Da li ste sigurni da želite da onemogućite skaniranje opsega? Promjene će biti prihvaćene poslije restarta DALIja",
     DisableScopeScans: "Onemogućite Skaniranje Opsega",
     DiskUsage: "Iskorištenost Diska",
     Distance: "Razdaljina",
+    DistanceTooltip: "Maksimalna Veličina Podgrafa Za Odabranu Aktivnost",
     Dll: "Dll",
     DOT: "DOT",
     DOTAttributes: "DOT Atributi",
@@ -112,6 +121,7 @@
     Duration: "Trajanje",
     EBCDIC: "EBCDIC",
     ECL: "ECL",
+    ECLWorkunit: "ECL RadnaJedinica",
     Edges: "Ivice",
     Edit: "Editujte",
     EditDOT: "Editujte DOT",
@@ -159,11 +169,16 @@
     Find: "Nađite",
     FindNext: "Nađite Slijedeći",
     FindPrevious: "Nađite Prethodni",
+    Finished: "Završen",
     FirstName: "Ime",
+    FirstNRows: "Prvih N Redova",
     Fixed: "Fiksni",
+    Folder: "Fascikla",
     Format: "Format",
+    Forward: "Naprijed",
     FromDate: "Od Datuma",
     FromSizes: "Od Veličine",
+    FromTime: "Od Vremena",
     FullName: "Ime i Prezime",
     Graph: "Graf",
     Graphs: "Grafikoni",
@@ -206,6 +221,8 @@
     LargestSize: "Najveća veličina",
     LastName: "Prezime",
     LastNDays: "Poslijednjih N Dana",
+    LastNHours: "Posljednjih N Sati",
+    LastNRows: "Posljednjih N Redova",
     LDAPWarning: "<b>Greška LDAP Servica:</b>  'Previše korisnika' - Molimo koristite filter.",
     LegacyForm: "Stari Prevaziđeni Formular",
     LibrariesUsed: "Biblioteke u Korištenju",
@@ -220,6 +237,7 @@
     LoadPackageContentHere: "(Dobavite ovamo sadržaj paketa)",
     LoadPackageFromFile: "Dobavite Paket iz Datoteke",
     Local: "Lokalni",
+    Log: "Dnevnik (Log)",
     LogFile: "Datoteka Aktivnosti",
     LoggedInAs: "Prijavljen kao",
     LogicalFile: "Logička Datoteka",
@@ -228,9 +246,12 @@
     LogicalFilesOnly: "Samo Logičke Datoteke",
     LogicalFileType: "Tip Logičke Datoteke",
     LogicalName: "Ime Logičke Datoteke",
+    Logs: "Dnevnici",
     log_analysis_1: "log_analysis_1*",
     Low: "Nizak",
     ManualCopy: "Pritisnite Ctrl+C",
+    ManualOverviewSelection: "Nophodno Je Selektirati Pregled",
+    ManualTreeSelection: "Nophodno Je Selektirati Drvo",
     Mappings: "Mapiranja",
     Mask: "Maska",
     Max: "Maksimum",
@@ -249,6 +270,7 @@
     MonitorShotLimit: "Nadgledajte Shot Limit",
     MonitorSub: "Nadgledajte Sub",
     Month: "Mjesec",
+    More: "Nastavite",
     MustContainUppercaseAndSymbol: "Mora uključiti veliko slovo i simbol",
     NA: "N/A",
     Name: "Ime",
@@ -304,10 +326,12 @@
     PasswordExpired: "Vaša lozinka je istekla I mora biti promijenjena",
     PasswordExpirePostfix: "dan(a). Želite li je sada promijeniti?",
     PasswordExpirePrefix: "Vaša lozinka će isteći za",
+    PasswordOpenZAP: "Unesite Lozinku Za ZAP (neobavezno)",
     PasswordsDoNotMatch: "Pogrešna Lozinka.",
     Path: "Put",
     PathMask: "Maska za Put",
     Pause: "Pauza",
+    PauseNow: "Zaustavite Odmah",
     PctComplete: "% Kompletiran",
     PercentDone: "Procenat Završen",
     PerformingLayout: "Izvršava Layout...",
@@ -317,6 +341,7 @@
     PlaceholderFirstName: "John",
     PlaceholderLastName: "Smith",
     Playground: "Igralište",
+    Plugins: "Dodatci",
     Port: "Port",
     Prefix: "Prefiks",
     PrefixPlaceholder: "filename{:length}, filesize{:[B|L][1-8]}",
@@ -337,13 +362,17 @@
     Quarter: "Četvrtina",
     Queries: "Upiti",
     QueriesNoPackage: "Upiti bez odgovarajućeg paketa",
+    Query: "Upit",
     QueryDetailsfor: "Detalji o Upitu",
+    QueryID: "Identifikator Upita",
     QueryIDPlaceholder: "som?q*ry.1",
+    QueryName: "Naziv Upita",
     QueryNamePlaceholder: "My?Su?erQ*ry",
     QuerySet: "Kolekcija Upita",
     Queue: "Red (Queue)",
     Quote: "Citat",
     QuotedTerminator: "Završni Karakter",
+    RawTextPage: "Neobrađen Tekst (Tekuća Stranica)",
     RecordCount: "Broj Rekorda",
     RecordLength: "Dužina Rekorda",
     Records: "Rekordi",
@@ -358,6 +387,8 @@
     RemoteDali: "Daleki Dali",
     RemoteDaliIP: "Daleki&nbsp;Dali&nbsp;IP&nbsp;Adresa",
     Remove: "Uklonite",
+    RemoveSubfiles: "Uklonite Pod-Datoteku",
+    RemoveUser: "Uklonite Korisnika",
     Rename: "Preimenujte",
     RenderedSVG: "Donesene SVG",
     RenderSVG: "Donesite SVG",
@@ -365,6 +396,8 @@
     RequestSchema: "Shema Zahtjeva",
     Reschedule: "Ponovo Stavite Na Raspored",
     Reset: "Resetujte",
+    ResetThisQuery: "Resetujte Tekući Upit",
+    ResetViewToSelection: "Resetujte Odabrani Prikaz",
     Resource: "Resurs",
     Resources: "Resursi",
     ResponseSchema: "Shema Odgovora",
@@ -377,6 +410,8 @@
     Resume: "Nastavite",
     RetainSuperfileStructure: "Zadržite Strukturu Superdatoteke",
     RetypePassword: "Ponovite Lozinku",
+    Reverse: "Idite Unazad",
+    RowPath: "Put Do Rekorda",
     Rows: "Redovi",
     RowTag: "Etiketa Reda",
     RoxieCluster: "Roxie Klaster",
@@ -415,6 +450,7 @@
     Start: "Počnite",
     Started: "Počeo",
     State: "Stanje",
+    Stats: "Statistike",
     Status: "Status",
     Stopped: "Zaustavljen",
     Subgraph: "Pod-Graf",
@@ -423,9 +459,11 @@
     Subtype: "Pod-Vrsta",
     Summary: "Kratak Pregled",
     SummaryMessage: "Sažeta Poruka",
-    SuperFile: "Super Datoteka",
     Superfile: "Super Datoteka",
+    SuperFile: "Super Datoteka",
     SuperFiles: "Super Datoteke",
+    Superfiles: "SuperDatoteke",
+    SuperFilesBelongsTo: "Pripadnik Superdatoteke",
     SuperfilesOnly: "Samo Superdatoteke",
     Suspend: "Suspendujte",
     Suspended: "Suspendovan",
@@ -444,6 +482,7 @@
     TargetWuid: "Cilj/Wuid",
     Terminators: "Terminatori",
     TestPages: "Test Stranice",
+    Text: "Tekst",
     ThorMasterAddress: "Adresa Glavnog Thora",
     ThorNetworkAddress: "Netvork Adresa Thora",
     Time: "Vrijeme",
@@ -470,8 +509,11 @@
     title_HPCCPlatformMain: "ECL Watch - Glavna Stranica",
     title_HPCCPlatformOps: "ECL Watch - Upravljnje",
     title_HPCCPlatformRoxie: "ECL Watch - Roxie",
+    title_HPCCPlatformServicesPlugin: "ECL Monitor - Dodatci",
     title_Inputs: "Unosi",
     title_LFDetails: "Detalji o Logičkol Datoteci",
+    title_LibrariesUsed: "Biblioteke U Korištenju",
+    title_Log: "Log Fajl",
     title_LZBrowse: "Zona za Pretovar",
     title_MemberOf: "Član Od",
     title_Members: "Članovi",
@@ -485,15 +527,20 @@
     title_Results: "Rezultati",
     title_SearchResults: "Rezultati Pretraživanja",
     title_SourceFiles: "",
+    title_SourceFiles: "Originalni Fajlovi",
+    title_Topology: "Topologija",
     title_TpThorStatus: "Stanje Thora",
     title_UserPermissions: "Korisničke Dozvile za Pristup",
     title_UserQuery: "Prava Pristupa",
     title_WUDetails: "ECL Detalji o Radnoj Jedinici",
     title_WUQuery: "ECL Radne Jedinice",
+    To: "Prema",
     ToDate: "Do Sada",
     Toenablegraphviews: "Da biste mogli vidjeli grafikone, moraćete instalisati Graph View Control plugin",
     Top: "Vrh",
+    Topology: "Topologija",
     ToSizes: "Do Velićina",
+    TotalClusterTime: "Ukupno Vrijeme Klastera",
     TotalSize: "Totalna Veličina",
     TotalThorTime: "Ukupno Vrijeme Thor-a",
     TransitionGuide: "Vodič",

+ 46 - 0
esp/src/eclwatch/nls/sr/hpcc.js

@@ -33,6 +33,7 @@
     ArchivedOnly: "Само Архивиран",
     ArchivedWarning: "Упозорење: користите кратак временски период. Ако користите дужи временски период, претраживање радних јединица може трајати дуже од дозвиљеног времена за претраживање.",
     AutoRefresh: "Освежи",
+    Back: "Назад",
     BannerColor: "Боја Рекламног Блока",
     BannerMessage: "Порука за Рекламни Блок",
     BannerScroll: "Кретање Рекламног Блока",
@@ -60,11 +61,14 @@
     Command: "Команда",
     Comment: "Коментар",
     Completed: "Комплетиран",
+    ComplexityWarning: "Више од прага {threshold} активности ({activityCount}) - прекините приказ података?",
     Component: "Компонента",
     Compress: "Сабијте",
     Compressed: "Компримирани",
+    CompressedFileSize: "Компримирана Величина Датотеке",
     Configuration: "Конфигурација",
     ConfirmPassword: "Потврдите Лозинку",
+    ConfirmRemoval: "Јесте ли сигурни да то желите учинити?",
     Content: "Садржај",
     Contents: "Садржаји",
     ContentType: "Врста Садржаја",
@@ -79,12 +83,14 @@
     Debug: "Отклоните Неисправности",
     DEF: "ДЕФ",
     Delete: "Обришите",
+    Deleted: "Избрисан",
     DeleteSelectedFiles: "Обришите Одабране Датотеке?",
     DeleteSelectedGroups: "Обришите одабрану(е) групу(е)?",
     DeleteSelectedPermissions: "Обришите Одабране Дозволе За Приступ",
     DeleteSelectedQueries: "Обришите Одабране Захтјеве?",
     DeleteSelectedUsers: "Обришите Одабране Кориснике?",
     DeleteSelectedWorkunits: "Обришите Одабране Радне Јединице?",
+    DeleteSuperfile2: "Избришите Супер Датотеку",
     DeleteSuperfile: "Обришите Супердатотеке?",
     DeleteThisPackage: "Обришите Овај Пакет?",
     Delimited: "Разграничен",
@@ -93,17 +99,20 @@
     DenyRead: "<center>Забраните<br>Читање</center>",
     DenyWrite: "<center>Забраните<br>Писање</center>",
     Depth: "Дубина",
+    DepthTooltip: "'Највећа Дубина Подграфа",
     Deschedule: "Избаците Ис Реда За Извршавање",
     DescheduleSelectedWorkunits: "Избаците Одабране Радне Јединице Из Реда За Извршавање?",
     Description: "Опис",
     Despray: "Поново Обjедините Датотеку",
     Details: "Детаљи",
     DFUServerName: "Име ДФУ Сервера",
+    DFUWorkunit: "ДФУ РаднаЈединицa",
     Directory: "Директориј",
     DisableScopeScanConfirm: "Да ли сте сигурни да желите онемогућити сканирање опсега? Промене ће бити прихваћене после рестарта ДАЛИја",
     DisableScopeScans: "Онемогућите Сканирање Опсега",
     DiskUsage: "Искориштеност Диска",
     Distance: "Раздаљина",
+    DistanceTooltip: "Максимална Величина Подграфа За Одабрану Активност",
     Dll: "Длл",
     DOT: "ДОТ",
     DOTAttributes: "ДОТ Атрибути",
@@ -113,6 +122,7 @@
     Duration: "Трајање",
     EBCDIC: "ЕБЦДИK",
     ECL: "ЕЦЛ",
+    ECLWorkunit: "ЕЦЛ РаднаЈединица",
     Edges: "Ивице",
     Edit: "Едитујте",
     EditDOT: "Едитујте ДОТ",
@@ -159,11 +169,16 @@
     Find: "Нађите",
     FindNext: "Нађите Следећи",
     FindPrevious: "Нађите Претходни",
+    Finished: "Завршен",
     FirstName: "Име",
+    FirstNRows: "Првих Н Редова",
     Fixed: "Фиксни",
+    Folder: "Фасцикла",
     Format: "Формат",
+    Forward: "Напред",
     FromDate: "Од Датума",
     FromSizes: "Од Величине",
+    FromTime: "Од Времена",
     FullName: "Име и Презиме",
     Graph: "Граф",
     Graphs: "Графикони",
@@ -206,6 +221,8 @@
     LargestSize: "Највећа величина",
     LastName: "Презиме",
     LastNDays: "Последњих Н Дана",
+    LastNHours: "Последњих Н Сати",
+    LastNRows: "Последњих Н Редова",
     LDAPWarning: "<б>Грешка ЛДАП Сервица:</б>  'Превише корисника' - Молимо користите филтер.",
     LegacyForm: "Стари Превазиђени Формулар",
     LibrariesUsed: "Библиотеке у Кориштењу",
@@ -220,6 +237,7 @@
     LoadPackageContentHere: "(Добавите овамо садржај пакета)",
     LoadPackageFromFile: "Добавите Пакет из Датотеке",
     Local: "Локални",
+    Log: "Дневник (Лог)",
     LogFile: "Датотека Активности",
     LoggedInAs: "Пријављен као",
     LogicalFile: "Логичка Датотека",
@@ -228,9 +246,12 @@
     LogicalFilesOnly: "Само Логичке Датотеке",
     LogicalFileType: "Тип Логичке Датотеке",
     LogicalName: "Име Логичке Датотеке",
+    Logs: "Дневници",
     log_analysis_1: "лог_аналисис_1*",
     Low: "Низак",
     ManualCopy: "Притисните Ctrl+C",
+    ManualOverviewSelection: "Нопходно Је Одабрати Преглед",
+    ManualTreeSelection: "Нопходно Је Одабрати Дрво",
     Mappings: "Мапирања",
     Mask: "Маска",
     Max: "Максимум",
@@ -249,6 +270,7 @@
     MonitorShotLimit: "Надгледајте Шот Лимит",
     MonitorSub: "Надгледајте Суб",
     Month: "Месец",
+    More: "Наставите",
     MustContainUppercaseAndSymbol: "Мораte укључити велико слово и симбол",
     NA: "Н/A",
     Name: "Име",
@@ -304,10 +326,12 @@
     PasswordExpired: "Ваша лозинка је истекла И мора бити промењена",
     PasswordExpirePostfix: "дан(а). Желите ли је сада променити?",
     PasswordExpirePrefix: "Ваша лозинка ће истећи за",
+    PasswordOpenZAP: "Унесите Лозинку За ЗАП (необавезно)",
     PasswordsDoNotMatch: "Погрешна Лозинка.",
     Path: "Пут",
     PathMask: "Маска за Пут",
     Pause: "Пауза",
+    PauseNow: "Зауставите Одмах",
     PctComplete: "% Комплетиран",
     PercentDone: "Проценат Завршен",
     PerformingLayout: "Извршава Лejaут...",
@@ -317,6 +341,7 @@
     PlaceholderFirstName: "Џон",
     PlaceholderLastName: "Смит",
     Playground: "Игралиште",
+    Plugins: "Допуне",
     Port: "Порт",
     Prefix: "Префикс",
     PrefixPlaceholder: "имедатотеке{:дужина}, величинадатотеке{:[B|L][1-8]}",
@@ -337,13 +362,17 @@
     Quarter: "Четвртина",
     Queries: "Упити",
     QueriesNoPackage: "Упити без одговарајућег пакета",
+    Query: "Упит",
     QueryDetailsfor: "Детаљи о Упиту",
+    QueryID: "Идентификатор Упита",
     QueryIDPlaceholder: "som?q*ry.1",
+    QueryName: "Име Упита",
     QueryNamePlaceholder: "My?Su?erQ*ry",
     QuerySet: "Колекција Упита",
     Queue: "Ред (Kjу)",
     Quote: "Цитат",
     QuotedTerminator: "Завршни Карактер",
+    RawTextPage: "Необрађен Текст (Текућа Страница)",
     RecordCount: "Број Рекорда",
     RecordLength: "Дужина Рекорда",
     Records: "Рекорди",
@@ -358,6 +387,8 @@
     RemoteDali: "Далеки Дали",
     RemoteDaliIP: "Далеки&nbsp;Дали&nbsp;ИП&nbsp;Адреса",
     Remove: "Уклоните",
+    RemoveSubfiles: "Уклоните Поддатотеку",
+    RemoveUser: "Уклоните Корисника",
     Rename: "Преименујте",
     RenderedSVG: "Донесене СВГ",
     RenderSVG: "Донесите СВГ",
@@ -365,6 +396,8 @@
     RequestSchema: "Шема Захтева",
     Reschedule: "Поново Ставите На Распоред",
     Reset: "Ресетујте",
+    ResetThisQuery: "Ресетујте Текући Упит",
+    ResetViewToSelection: "Ресетујте Одабрани Вју",
     Resource: "Ресурс",
     Resources: "Ресурси",
     ResponseSchema: "Шема Одговора",
@@ -377,6 +410,8 @@
     Resume: "Наставите",
     RetainSuperfileStructure: "Задржите Структуру Супердатотеке",
     RetypePassword: "Поновите Лозинку",
+    Reverse: "Идите Уназад",
+    RowPath: "Пут До Рекорда",
     Rows: "Редови",
     RowTag: "Етикета Реда",
     RoxieCluster: "Роkcи Кластер",
@@ -415,6 +450,7 @@
     Start: "Почните",
     Started: "Почео",
     State: "Стање",
+    Stats: "Статистике",
     Status: "Статус",
     Stopped: "Заустављен",
     Subgraph: "ПодГраф",
@@ -426,6 +462,8 @@
     SuperFile: "Супер Датотека",
     Superfile: "Супер Датотека",
     SuperFiles: "Супер Датотеке",
+    Superfiles: "СуперДатотеке",
+    SuperFilesBelongsTo: "Припадник Супердатотеке",
     SuperfilesOnly: "Само Супердатотеке",
     Suspend: "Суспендујте",
     Suspended: "Суспендован",
@@ -444,6 +482,7 @@
     TargetWuid: "Циљ/Pjид",
     Terminators: "Терминатори",
     TestPages: "Тест Странице",
+    Text: "Текст",
     ThorMasterAddress: "Адреса Главног Тора",
     ThorNetworkAddress: "Нетворк Адреса Торa",
     Time: "Време",
@@ -470,8 +509,11 @@
     title_HPCCPlatformMain: "ECL Watch - Главна Страница",
     title_HPCCPlatformOps: "ECL Watch - Управљње",
     title_HPCCPlatformRoxie: "ECL Watch - Роkcи",
+    title_HPCCPlatformServicesPlugin: "ЕЦЛ Монитор - Допуне",
     title_Inputs: "Уноси",
     title_LFDetails: "Детаљи о Логичкоj Датотеци",
+    title_LibrariesUsed: "Библиотеке У Кориштењу",
+    title_Log: "Лог Фајл",
     title_LZBrowse: "Зона за Претовар",
     title_MemberOf: "Члан Од",
     title_Members: "Чланови",
@@ -485,15 +527,19 @@
     title_Results: "Резултати",
     title_SearchResults: "Резултати Претраживања",
     title_SourceFiles: "Оригиналне Датотеке",
+    title_Topology: "Топологија",
     title_TpThorStatus: "Стање Тора",
     title_UserPermissions: "Корисничке Дозвиле за Приступ",
     title_UserQuery: "Права Приступа",
     title_WUDetails: "ЕЦЛ Детаљи о Радној Јединици",
     title_WUQuery: "ЕЦЛ Радне Јединице",
+    To: "Према",
     ToDate: "До Сада",
     Toenablegraphviews: "Да бисте могли видети графиконе, мораћете инсталисати Граф Вjу Kонтрол плaгин",
     Top: "Врх",
+    Topology: "Топологија",
     ToSizes: "До Величина",
+    TotalClusterTime: "Укупно Време Кластера",
     TotalSize: "Укупна Величина",
     TotalThorTime: "Укупно Време Тора",
     TransitionGuide: "Водич",

+ 1 - 0
esp/src/eclwatch/templates/WUDetailsWidget.html

@@ -82,6 +82,7 @@
                             <li>
                                 <label class="Prompt" for="${id}Cluster">${i18n.Cluster}:</label>
                                 <div id="${id}Cluster"></div>
+                                <div id="${id}AllowedClusters" data-dojo-type="dijit.form.Select"/></div>
                             </li>
                             <li>
                                 <label class="Prompt" for="${id}TotalClusterTime">${i18n.TotalClusterTime}:</label>

+ 68 - 56
initfiles/bash/etc/init.d/hpcc-init.in

@@ -89,17 +89,13 @@ source  ${INSTALL_DIR}/etc/init.d/hpcc_common
 source  ${INSTALL_DIR}/etc/init.d/init-functions
 source  ${INSTALL_DIR}/etc/init.d/export-path
 
-# Only root user can write following HPCC_INIT_LOG
+# Only root user can write following logfile
 is_root
 
 [ ! -e ${LOG_DIR} ] && mkdir -p ${LOG_DIR}
-HPCC_INIT_LOG=${LOG_DIR}/hpcc-init.log
+export logfile=${LOG_DIR}/hpcc-init.log
 
-export PS4='+${BASH_SOURCE[1]} ${LINENO}: '
-[ -e ${HPCC_INIT_LOG}  ] && rm -rf ${HPCC_INIT_LOG}
-touch $HPCC_INIT_LOG
-exec 2> ${HPCC_INIT_LOG}
-set -x
+[ ! -e ${logfile}  ] && touch $logfile
 
 ## Debug variable allowing verbose debug output
 ##
@@ -109,13 +105,14 @@ VERBOSE=${VERBOSE:-0}
 COMP_BY_TYPE=${COMP_BY_TYPE:-0}
 DAFILESRV=${DAFILESRV:-0}
 
+
 set_environmentvars
 envfile=$configs/$environment
 
 # Know HPCC user after set_environmentvars
 log_dir_owner=$(ls -ld $LOG_DIR | awk '{print $3}')
 [ "log_dir_owner" != "${user}" ] && chown ${user}:${user} $LOG_DIR
-chown ${user}:${user} $HPCC_INIT_LOG
+chown ${user}:${user} $logfile
 
 #Sourcing the hpcc environment
 configgen_path=${path}/sbin
@@ -125,16 +122,21 @@ source ${configgen_path}/hpcc_setenv
 which_service
 get_commondirs
 
+log "--------------------------"
+log "--------------------------"
+
 #Check for existance of user
 check_user ${user}
 if [ $? -ne 1 ];then
-   echo "$user user does not exits on the system. Exiting ....."
+   log  "$user user does not exist on the system. Exiting ..."
+   echo "$user user does not exits on the system. Exiting ..."
    exit 3
 fi
 
 check_group ${group}
 if [ $? -ne 1 ];then
-  echo "Group for user ${group} does not exist on the system. Exiting....."
+   log  "Group for user ${group} does not exist on the system. Exiting ..."
+   echo "Group for user ${group} does not exist on the system. Exiting ..."
    exit 3
 fi 
 
@@ -148,7 +150,8 @@ COMPS=`${configgen_path}/configgen -env ${envfile} -list`
 comp.parser ${COMPS}
 
 if [ -z ${compArray} ];then
-   echo "There are no components configured to run on this node..."
+   log  "There are no components configured to run on this node ..."
+   echo "There are no components configured to run on this node ..."
    exit 3
 fi
 
@@ -203,20 +206,19 @@ done
 compList[0]=$compDali
 compTypeList[0]="dali"
 
-if [ ${DEBUG} != "NO_DEBUG" ]; then
-    for i in ${compList[@]};do
-        echo $i
-    done
+log "The following components have been located:"
+for i in ${compList[@]};do
+    log "---> $i"
+done
+log "--------------------------"
     
-fi
-
 #declaring all flags here
 isComp=0
 component=""
 runSetupOnly=0
 dafilesrvflag=0
 
-TEMP=`/usr/bin/getopt -o c:h --long help,componentlist,typelist -n 'hpcc-init' -- "$@"`
+TEMP=`/usr/bin/getopt -o c:hd --long help,componentlist,typelist,debug -n 'hpcc-init' -- "$@"`
 if [ $? != 0 ] ; then echo "Failure to parse commandline." >&2 ; exit 1 ; fi
 eval set -- "$TEMP"
 while true ; do
@@ -229,7 +231,7 @@ while true ; do
                 comp.getByType $comp
                 if [ -z $comp_return ]
                 then
-                    echo "Unknown component: $comp"
+                    log "Unknown component: $comp"
                     exit 1
                 fi
                 for (( i=0; i<=${compListLen}; i++ ));do
@@ -247,12 +249,14 @@ while true ; do
                 component=$comp
             fi
             shift 2 ;;
-        -h|--help) print_usage
-                   shift ;;
+        -d|--debug) DEBUG="DEBUG"
+                    shift ;;
+        -h|--help)  print_usage
+                    shift ;;
         --componentlist) print_components
-                   shift ;;
+                    shift ;;
         --typelist) print_types
-                   shift ;;
+                    shift ;;
         --) shift ; break ;;
         *) print_usage ;;
     esac
@@ -263,6 +267,10 @@ if [ -z $arg ] || [ $# -ne 1 ]; then
     print_usage
 fi
 
+log "Debug log written to $LOG_DIR/hpcc-init.debug"
+exec 2>$LOG_DIR/hpcc-init.debug
+set -x
+
 if [ -z ${component} ]; then
     for (( i=0; i<=${compListLen}; i++ ));do
         component="$component ${compList[$i]}"
@@ -299,10 +307,13 @@ case "$arg" in
         ;;
 esac
 
+log "Attempting to execute ${cmd} argument on specified components"
+
 unset IFS
 
 # Create dropzone on a full system start
 if [ ${cmd} = "start" ] || [ "${cmd}" = "restart" ]; then
+    log "Creating dropzone"
     create_dropzone
 fi
 
@@ -312,8 +323,11 @@ if [ ! -z "${compDafilesrv}" ];then
         start)
             /etc/init.d/dafilesrv status 1>/dev/null 2>/dev/null
             if [ $? -ne 0 ];then
+              log "--------------------------"
+              log "${compDafilesrv} ---> ${cmd}"
               /etc/init.d/dafilesrv $1 2>/dev/null
             else
+              log  "Dependent service dafilesrv, ${compDafilesrv} is already running."
               echo "Dependent service dafilesrv, ${compDafilesrv} is already running."
             fi
             ;;
@@ -327,6 +341,8 @@ fi
 
 # Restart handling for entire system
 if [ ${cmd} = "restart" ] && [ "${isComp}" -eq 0 ]; then
+    log "Stopping entire system for a full restart"
+    log "--------------------------"
     echo "*****************************************"
     echo "Stopping entire system for a full restart"
     echo "*****************************************"
@@ -338,33 +354,33 @@ if [ ${cmd} = "restart" ] && [ "${isComp}" -eq 0 ]; then
         fi
         set_componentvars ${compList[$i]}
         xcmd="${cmd}_component ${compList[$i]}"
-        if [ ${DEBUG} != "NO_DEBUG" ]; then
-            echo $xcmd
-        fi
+        log "--------------------------"
+        log "${compName} ---> ${cmd}"
         if strstr ${compType} "thor" && [ ${foundThorSlave} -eq 1 ];
         then
-            if [ ${DEBUG} != "NO_DEBUG" ]; then
-               echo "Thor slave found on the node, hence skipping the component "
-            fi
+            log "Thor slave found on the node, hence skipping the component "
             continue
         elif strstr ${compType} "dafilesrv" ;then
-            if [ ${DEBUG} != "NO_DEBUG" ]; then
-               echo "skipping the component ${compName}"
-            fi
+            log "skipping the component ${compName}"
             continue;
         else
           eval $xcmd
           statForEach=$?
+          log "${xcmd} ---> Exit status ${statForEach}"
           statForStop=$(( ${statForStop} == 3 ? ${statForEach} : ${statForStop} ))
         fi
     done 
 
+    log "Starting the entire System"
+    log "--------------------------"
     echo "***************************************************"
     echo "Starting the entire System"
     echo "***************************************************"
     cmd=start
     /etc/init.d/dafilesrv status 1>/dev/null 2>/dev/null
     if [ $? -ne 0 ];then
+        log "--------------------------"
+        log "${compDafilesrv} ---> ${cmd}"
         /etc/init.d/dafilesrv $1 2>/dev/null
     fi
 
@@ -375,19 +391,17 @@ if [ ${cmd} = "restart" ] && [ "${isComp}" -eq 0 ]; then
         fi
         set_componentvars ${compList[$i]}
         xcmd="${cmd}_component ${compList[$i]}"
-        if [ ${DEBUG} != "NO_DEBUG" ]; then
-            echo $xcmd
-        fi
+        log "--------------------------"
+        log "${compName} ---> ${cmd}"
         if strstr ${compType} "thor" && [ ${foundThorSlave}  -eq 1 ]; 
         then
-            if [ ${DEBUG} != "NO_DEBUG" ]; then
-               echo "Thor slave found on the node, hence just running the setup for thor"
-            fi
-            setup_component 
+            log "Thor slave found on the node, hence just running the setup for thor"
+            setup_component
             continue
         else
            eval $xcmd
            statForEach=$?
+           log "${xcmd} ---> Exit status ${statForEach}"
            statForStart=$(( ${statForStart} == 0 ? ${statForEach} : ${statForStart} ))
         fi
     done
@@ -405,38 +419,36 @@ fi
 STATUS=0
 for C in ${component} ; do
     if [ -z "${C}" ];then
-          continue;
+      continue;
     fi
     set_componentvars ${C}
     xcmd="${cmd}_component ${C}"
-    if [ ${DEBUG} != "NO_DEBUG" ]; then
-        echo $xcmd
-    fi
     if strstr ${compType} "thor" && [ ${foundThorSlave} -eq 1 ] && [ "${cmd}" != "status" ] && [ "${cmd}" != "setup" ]; then
-        if [ ${DEBUG} != "NO_DEBUG" ]; then
-            echo "Thor slave found on the node, hence just running the setup for thor"
-        fi
-        setup_component 
-        continue
+      log "Thor slave found on the node, hence just running the setup for thor"
+      setup_component
+      continue
     else
-        eval $xcmd
-        statForEach=$?
-        STATUS=$(( $STATUS == 0 ? $statForEach : $STATUS))
+      log "--------------------------"
+      log "${compName} ---> ${cmd}"
+      eval $xcmd
+      statForEach=$?
+      log "${cmd}_component ${C} ---> Exit status ${statForEach}"
+      STATUS=$(( $STATUS == 0 ? $statForEach : $STATUS))
     fi
 done 
 
 
 if [ "$cmd" = "stop" ] && [ -n "$compDafilesrv" ]
 then
-     echo 
     /etc/init.d/dafilesrv status 1>/dev/null 2>/dev/null
     if [ $? -ne 0 ];then
-       echo "Service dafilesrv, ${compDafilesrv} is already stopped.".
+      log  "Service dafilesrv, ${compDafilesrv} is already stopped."
+      echo "Service dafilesrv, ${compDafilesrv} is already stopped."
     else
-       echo "Service dafilesrv, ${compDafilesrv} is still running".
-       echo "To stop it, run \"service dafilesrv stop\"."
+      log  "Service dafilesrv, ${compDafilesrv} is still running."
+      echo "Service dafilesrv, ${compDafilesrv} is still running."
+      echo "To stop it, run \"service dafilesrv stop\"."
     fi
-    echo 
 
 fi
 exit ${STATUS}

+ 53 - 111
initfiles/bash/etc/init.d/hpcc_common.in

@@ -251,26 +251,10 @@ set_environmentvars() {
     ## use default of "DEFAULT"
     ##
     SECTION=${SECTION:-DEFAULT}
-    DEBUG=${DEBUG:-NO_DEBUG}
 
     cfg.parser ${HPCC_CONFIG}
     cfg.section.${SECTION}
 
-    if [ ${DEBUG} != "NO_DEBUG" ]; then
-        echo "\$runtime=$runtime"
-        echo "\$path=$path"
-        echo "\$configs=$configs"
-        echo "\$configsbackup=$configsbackup"
-        echo "\$user=$user"
-        echo "\$lock=$lock"
-        echo "\$pid=$pid"
-        echo "\$log=$log"
-        echo "\$environment=$environment"
-        echo "\$interface=$interface"
-        echo "\$autodetectipscript=$autodetectipscript"
-        echo
-    fi
-
     if [ -n "${umask}" ]; then
         umask $umask
     fi
@@ -298,15 +282,10 @@ configGenCmd() {
 
     # Creating logfiles for component
     logDir=$log/${compName}
-    logFile=$log/${compName}/init_${compName}.log
 
     configcmd="${configgen_path}/configgen -env ${envfile} -od ${runtime} -id ${componentFile} -c ${compName}"
-    if [ ${DEBUG} != "NO_DEBUG" ]; then
-        echo $configcmd
-    else
-        echo $configcmd >> $logFile
-    fi
-    su ${user} -c "$configcmd" >> $logFile 2>&1
+    log "$configcmd"
+    su ${user} -c "$configcmd" 2>/dev/null
 }
 
 createRuntime() {
@@ -337,31 +316,23 @@ createRuntime() {
     # Creating Component Specific directories
     # Creating pidfile specific directory and changing its owner permissions
     if [ ! -d "$pid/$compName" ]; then
-        if [ ${DEBUG} != "NO_DEBUG" ]; then
-            echo "Creating Pidfile directory"
-        fi
+        log "Creating Pidfile Directory $pid/$compName"
         createDir "$pid/$compName"
     fi
 
     if [ ! -d "$lock/$compName" ]; then
-        if [ ${DEBUG} != "NO_DEBUG" ]; then
-            echo "Creating lockfile directory"
-        fi
+        log "Creating Lockfile Directory $lock/$compName"
         createDir "$lock/$compName"
     fi
 
     if [ ! -d "$log/$compName" ]; then
-        if [ ${DEBUG} != "NO_DEBUG" ]; then
-            echo "Creating log directory"
-        fi
+        log "Creating Log Directory $log/$compName"
         createDir "$log/$compName"
     fi
 
     # Creating runtime specific directory and changing its owner permissions
     if [ ! -d $compPath ]; then
-        if [ ${DEBUG} != "NO_DEBUG" ]; then
-            echo "Creating Runtime Directory for $compName"
-        fi
+        log "Creating Runtime Directory $compPath"
         createDir "$compPath"
     fi
 
@@ -394,17 +365,14 @@ start_dafilesrv() {
    /etc/init.d/dafilesrv status 1>/dev/null 2>/dev/null
    if [ $? -ne 0 ];then
       #Dafilesrv is not running so start it , before starting cleanup the lock and pid file.
-      if [ ${DEBUG} != "NO_DEBUG" ]; then
-        log_failure_msg "Pid or lock file exists, but process is not running"
-      fi
       cleanupRuntimeEnvironment
-
       noStatusCheck=1
       /etc/init.d/dafilesrv setup 1>/dev/null 2>/dev/null
       startCmd ${compName} ${noStatusCheck}
       return $?
    else
-      printf "Starting %-21s" "$compName.... "
+      log "Component $compName already started ..."
+      printf "Starting %-21s" "$compName ..."
       log_success_msg "Already started"
       return 0
    fi
@@ -412,18 +380,14 @@ start_dafilesrv() {
 
 startCmd() {
     noStatusCheck=$2
-    printf "Starting %-21s" "$compName.... "
-    if [ ${DEBUG} != "NO_DEBUG" ]; then
-        echo "compName=$compName compPath=$compPath compProcessName=$compType"
-    fi
+    printf "Starting %-21s" "$compName ..."
+    log "compType = $compType"
 
     # use less heap when threaded
     export MALLOC_ARENA_MAX=8
 
     # Creating logfiles for component
     logDir=$log/${compName}
-    logFile=$log/${compName}/init_${compName}.log
-
 
     if [ ${noStatusCheck} -ne 1 ]; then
       check_status ${PIDPATH} ${LOCKPATH} ${COMPPIDPATH} 1
@@ -434,6 +398,7 @@ startCmd() {
       fi
       if [ ${RCSTART} -eq 0 ]; then
         #Since component is already started but current script is failed till returning 0
+        log "$compName ---> already started"
         log_success_msg "Already Started"
         return ${RCSTART}
       fi
@@ -469,29 +434,24 @@ startCmd() {
     fi
 
     EXEC_COMMAND="${bin_path}/init_${compType} "
-    startcmd="${START_STOP_DAEMON} -S -p ${pid}/init_${compName}.pid -c ${user}:${group} -d ${compPath} ${UMASK_ARG} -m -x ${EXEC_COMMAND} -b  >>${logFile} 2>&1"
-
-    issueTime=`date`
-    logCommand="COMMAND:: $startcmd  ::Issued at $issueTime "
-    echo $logCommand >> $logFile
+    startcmd="${START_STOP_DAEMON} -S -p ${pid}/init_${compName}.pid -c ${user}:${group} -d ${compPath} ${UMASK_ARG} -m -x ${EXEC_COMMAND} -b"
 
+    log "${startcmd}"
 
     # Creating a Lock
     lockPath=${lock}/${compName}
     if [ ! -d $lockPath ]; then
-        mkdir -p $lockPath >> $logFile 2>&1
+        mkdir -p $lockPath >>/dev/null 2>&1
     fi
     chown -c $user:$group $lockPath >> /dev/null 2>&1
     lock ${lock}/${compName}/${compName}.lock
 
     if [ $__lockCreated -eq 0 ]; then
+        log "Cannot create the lock file. File locked by subsystem"
         log_failure_msg "Cannot create the lock file, File locked by subsystem"
         return 3
     fi
 
-    if [ ${DEBUG} != "NO_DEBUG" ]; then
-        echo $startcmd
-    fi
     eval $startcmd
 
 
@@ -516,6 +476,7 @@ startCmd() {
           COMPONENT_HAS_STARTED=1
         else
           if [ ${COMPONENT_HAS_STARTED} -eq 1 ]; then
+            log "${compName} failed to start cleanly"
             log_failure_msg "${compName} failed to start cleanly"
             return 0;
           fi
@@ -525,6 +486,7 @@ startCmd() {
 
     if [ ${WAITTIME} -eq 0 ]; then
         log_timeout_msg
+        log "${compName} has timed out, but may still be starting"
     fi
 
     chmod 644 ${envfile}
@@ -543,21 +505,15 @@ stop_component() {
     check_status ${PIDPATH} ${LOCKPATH} ${COMPPIDPATH} 0
     RCSTOP=$?
     if [ $RCSTOP -ne 0 ];then
-       if [ ${DEBUG} != "NO_DEBUG" ]; then
-          log_success_msg "Process already stopped :: check_status code is ${RCSTOP}"
-       else
-          log_success_msg "Already stopped"
-       fi
-       cleanup_component
-       cleanupRuntimeEnvironment
-       return 0
+      log "Already stopped"
+      log_success_msg "Already stopped"
+      cleanup_component
+      cleanupRuntimeEnvironment
+      return 0
     fi
 
     stopcmd="${START_STOP_DAEMON} -K -p ${PIDPATH} >> tmp.txt 2>&1"
-
-    if [ ${DEBUG} != "NO_DEBUG" ]; then
-        echo "$stopcmd"
-    fi
+    log "$stopcmd"
 
     eval $stopcmd
 
@@ -598,27 +554,15 @@ stop_component() {
 
 
 start_component() {
-    if [ ${DEBUG} != "NO_DEBUG" ]; then
-        echo "comp_return = $comp_return"
-        echo "compName = $compName compPath = $compPath "
-        echo "path = $PATH"
-    fi
-
 
-    # Creating logfiles for component
+    # Creating logdirs for component
     logDir=$log/${compName}
-    logFile=$log/${compName}/init_${compName}.log
 
     if [ ! -d $logDir ]; then
         mkdir -p $logDir >> tmp.txt 2>&1
         chown -c $user:$group $logDir >> /dev/null 2>&1
     fi
 
-    if [ ! -f $logFile ]; then
-        touch $logFile >> tmp.txt 2>&1
-        chown -c $user:$group $logFile >> /dev/null 2>&1
-    fi
-
     # Creating Runtime
     createRuntime $compName $compPath
 
@@ -628,7 +572,7 @@ start_component() {
 
     if [ ${runSetupOnly} -ne 1 ]
     then
-        cd ${compPath} > $logFile 2>&1
+        cd ${compPath} >>/dev/null 2>&1
         startCmd ${compName} 0
         STAT=$?
     fi
@@ -637,27 +581,29 @@ start_component() {
 
 restart_component() {
     if strstr "${compType}" "dafilesrv" ;then
-       /etc/init.d/dafilesrv status 1>/dev/null 2>/dev/null
-       if [ $? -eq 0 ];then
-         /etc/init.d/dafilesrv stop 2>/dev/null
-       else
-           echo "Component $compName was not running. Will start it now for you ...."
-           cleanupRuntimeEnvironment
-       fi
-         /etc/init.d/dafilesrv start 2>/dev/null
+      /etc/init.d/dafilesrv status 1>/dev/null 2>/dev/null
+      if [ $? -eq 0 ];then
+        /etc/init.d/dafilesrv stop 2>/dev/null
+      else
+        log  "$compName ---> Stopped.  Now Starting ..."
+        echo "Component $compName was not running. Will start it now for you ..."
+        cleanupRuntimeEnvironment
+      fi
+      /etc/init.d/dafilesrv start 2>/dev/null
     else
-       check_status ${PIDPATH} ${LOCKPATH} ${COMPPIDPATH} 1
-       RCRESTART=$?
-       if [ $RCRESTART -ne 0 ];then
-           echo "Component $compName was not running. Will start it now for you ...."
-           cleanupRuntimeEnvironment
-       else
-           stop_component ${compName}
-       fi
-       start_component $compName
-       check_status ${PIDPATH} ${LOCKPATH} ${COMPPIDPATH} 1
-       RCRESTART=$?
-       return $RCRESTART
+      check_status ${PIDPATH} ${LOCKPATH} ${COMPPIDPATH} 1
+      RCRESTART=$?
+      if [ $RCRESTART -ne 0 ];then
+        log  "$compName ---> Stopped.  Now Starting ..."
+        echo "Component $compName was not running. Will start it now for you ..."
+        cleanupRuntimeEnvironment
+      else
+        stop_component ${compName}
+      fi
+      start_component $compName
+      check_status ${PIDPATH} ${LOCKPATH} ${COMPPIDPATH} 1
+      RCRESTART=$?
+      return $RCRESTART
     fi
 }
 
@@ -666,9 +612,11 @@ status_component() {
   RCSTATUS=$?
   getPid ${COMPPIDPATH}
   if [ ${RCSTATUS} -ne 0 ];then
+    log "${compName} ---> Stopped"
     printf "%-15s is stopped" "$compName"
   else
-    printf "%-15s ( pid %8s ) is running..." "${compName}" "${__pidValue}"
+    log "${compName} ---> Running ( pid ${__pidValue} )"
+    printf "%-15s ( pid %8s ) is running ..." "${compName}" "${__pidValue}"
   fi
   echo ""
   return ${RCSTATUS}
@@ -1001,23 +949,17 @@ cleanup_component() {
       WAITTIME=`expr ${WAITTIME} - 1`
       kill -0 -$pgid &> /dev/null
       if [ $? -ne 0 ];then
-        if [ ${DEBUG} != "NO_DEBUG" ];then
-          echo "${compName} orphans cleaned up"
-        fi
+        log "${compName} orphans cleaned up"
         RUNNING=0
         break;
       else
-        if [ ${DEBUG} != "NO_DEBUG" ]; then
-          echo "Waiting for ${compName} orphans to cleanup gracefully"
-        fi
+        log "Waiting for ${compName} orphans to cleanup gracefully"
         sleep 1
       fi
     done
 
     if [[ ${RUNNING} -eq 1 ]]; then
-      if [ ${DEBUG} != "NO_DEBUG" ]; then
-        echo "sending SIGKILL to ${compName} orphans"
-      fi
+      log "sending SIGKILL to ${compName} orphans"
       kill -9 -$pgid
     fi
   fi

+ 16 - 0
initfiles/bash/etc/init.d/init-functions

@@ -198,6 +198,22 @@ log_timeout_msg () {
     fi
 }
 
+# general logging message for init scripts
+# expects $logfile to exist within the context of where it's called
+log() {
+  if [[ -z ${logfile+x} ]]; then
+    # logfile isn't set within the context of this function call
+    return 1
+  fi
+
+  local msg=$@
+  local header=$( date --universal --iso-8601='seconds' )
+  local header="${header:0:19}: "
+  printf "%s%s\n" "$header" "$msg" >> $logfile
+
+  return 0
+}
+
 #log_success_msg () {
 #    if [ -n "${1:-}" ]; then
 #        log_begin_msg $@

+ 8 - 16
initfiles/bash/etc/init.d/lock.sh

@@ -21,13 +21,13 @@ checkLockDir () {
     LOCKPATH=$1
     #echo -n "Checking if Lock path exists "
     if [ -d $LOCKPATH ]; then
-        log_success_msg "$LOCKPATH ..."
+        log "$LOCKPATH ..."
     else 
-        log_failure_msg "$LOCKPATH ..."
-        echo "Creating Lock Path ..."
+        log "$LOCKPATH ..."
+        log "Creating Lock Path ..."
         /bin/mkdir $LOCKPATH
         if [ !-d $LOCKPATH ]; then
-            log_failure_msg "Can not create Lock Path $LOCKPATH ..."
+            log "Can not create Lock Path $LOCKPATH ..."
         fi  
     fi
 
@@ -43,9 +43,7 @@ lock () {
     locked $FILE
     if [ $flagLocked -eq 1 ]; then
         __lockCreated=0
-        if [ ${DEBUG} != "NO_DEBUG" ]; then
-            log_failure_msg "Lock file $FILE already exists"
-        fi
+        log "Lock file $FILE already exists"
     else
         /bin/touch $FILE
         locked $FILE
@@ -53,9 +51,7 @@ lock () {
             #log_success_msg 
             __lockCreated=1
         else
-            if [ ${DEBUG} != "NO_DEBUG" ]; then
-                log_failure_msg "Failed to create file $FILE"
-            fi
+            log "Failed to create file $FILE"
             __lockCreated=0
         fi
     fi          
@@ -80,16 +76,12 @@ unlock () {
     FILE=$1
     #echo -n "Removing lock file $1 "
     if [ ! -e $FILE ]; then
-        if [ ${DEBUG} != "NO_DEBUG" ]; then
-            log_failure_msg "Lock file $FILE does not exist"
-        fi
+        log "Lock file $FILE does not exist"
         __lockRemoved=0
     else
         /bin/rm -rf $FILE
         if [ -e $FILE ]; then
-            if [ ${DEBUG} != "NO_DEBUG" ]; then
-                log_failure_msg "File $FILE can not be removed"
-            fi
+            log "File $FILE can not be removed"
             __lockRemoved=0
         else
             __lockRemoved=1

+ 15 - 6
initfiles/bash/etc/init.d/pid.sh

@@ -25,7 +25,7 @@ checkPidDir () {
         echo "Creating a Pid directory"
         /bin/mkdir -P ${PIDFILEPATH} 
         if [[ ! -e ${PIDFILEPATH} ]]; then
-            echo "Can not create a Pid directory $PIDFILEPATH"
+            log  "Can not create a Pid directory $PIDFILEPATH"
         else
             log_success_msg
         fi
@@ -39,15 +39,17 @@ createPid () {
     checkPid ${PIDFILEPATH}
     if [[ $__flagPid -eq 1 ]]; then
         [[ ${DEBUG} != "NO_DEBUG" ]] && log_failure_msg "Pid file already exists"
+        log "Pid file already exists"
         __pidCreated=0
     else
         echo $PIDNO > ${PIDFILEPATH}
         checkPid ${PIDFILEPATH}
         if [[ $__flagPid -eq 1 ]]; then
-            [[ ${DEBUG} != "NO_DEBUG" ]] && log_success_msg 
+            [[ ${DEBUG} != "NO_DEBUG" ]] && log_success_msg
             __pidCreated=1
         else
             [[ ${DEBUG} != "NO_DEBUG" ]] && log_failure_msg "Failed to create Pid"
+            log "Failed to create Pid"
             __pidCreated=0
         fi
     fi
@@ -80,6 +82,7 @@ removePid () {
     checkPid ${PIDFILEPATH}
     if [[ $__flagPid -eq 0 ]]; then
         [[ ${DEBUG} != "NO_DEBUG" ]] && log_failure_msg "Pidfile doesn't exist"
+        log "Pid file doesn't exist"
         __pidRemoved=0
     else
         rm -rf ${PIDFILEPATH} > /dev/null 2>&1
@@ -87,11 +90,12 @@ removePid () {
             __pidRemoved=1
         else
             [[ ${DEBUG} != "NO_DEBUG" ]] && log_failure_msg "Failed to remove pid"
+            log "Failed to remove pid"
             __pidRemoved=0
         fi
     fi
 }
-    
+
 checkPidExist() {
     PIDFILEPATH=$1
     getPid ${PIDFILEPATH}
@@ -134,12 +138,15 @@ check_status() {
     # check if running and healthy
     if [[ $componentLocked -eq 1 ]] && [[ $initRunning -eq 1 ]] && [[ $compRunning -eq 1 ]]; then
         [[ ${DEBUG} != "NO_DEBUG" ]] && echo "everything is up except sentinel"
+        log "$compName ---> Waiting on Sentinel"
         if [[ ${SENTINELFILECHK} -eq 1 ]]; then
             if [[ ${sentinelFlag} -eq 0 ]]; then
                 [[ ${DEBUG} != "NO_DEBUG" ]] && echo "Sentinel not yet located, process currently unhealthy"
+                log "$compName ---> Currently Unhealthy"
                 return 2 
             fi
             [[ ${DEBUG} != "NO_DEBUG" ]] && echo "Sentinel is now up"
+            log "$compName ---> Sentinel Up"
         fi
         return 0
     # check if shutdown and healthy
@@ -147,16 +154,18 @@ check_status() {
         if [[ ${SENTINELFILECHK} -eq 1 ]]; then
             if [[ ${sentinelFlag} -eq 1 ]]; then
                 [[ ${DEBUG} != "NO_DEBUG" ]] && echo "Sentinel is up but orphaned"
+                log "$compName ---> Orphaned State"
                 return 3
             fi
             [[ ${DEBUG} != "NO_DEBUG" ]] && echo "Sentinel is now down"
+            log "$compName ---> Sentinel Down"
         fi
         return 1
     else
         if [[ "${DEBUG}" != "NO_DEBUG" ]]; then
-            [[ $componentLocked -eq 0 ]] && log_failure_msg "component is not locked: $2"
-            [[ $initRunning -eq 0 ]]     && log_failure_msg "process for ${compName}_init.pid is not running"
-            [[ $compRunning -eq 0 ]]     && log_failure_msg "process for ${compName}.pid is not running"
+            [[ $componentLocked -eq 0 ]] && log "$compName ---> component is not locked: $LOCKFILEPATH"
+            [[ $initRunning -eq 0 ]]     && log "$compName ---> process for init_${compName}.pid is not running"
+            [[ $compRunning -eq 0 ]]     && log "$compName ---> process for ${compName}.pid is not running"
         fi
         return 4
     fi

+ 15 - 1
initfiles/bin/init_configesp

@@ -19,29 +19,43 @@
 PATH_PRE=$(type -path hpcc_setenv)
 source ${PATH_PRE}
 PID_NAME="$PID/$(basename $PWD).pid"
+source ${INSTALL_DIR}/etc/init.d/hpcc_common
+
+component=$(basename $PWD)
+export logfile="${LOG_DIR}/${component}/init_${component}_$(date +%Y_%m_%d_%H_%M_%S).log"
 
 export SENTINEL="configesp.sentinel"
+log "Removing ---> ${SENTINEL}"
 rm -f ${SENTINEL}
 
 killed()
 {
+    log "attempting to kill $component"
     kill_process ${PID_NAME} configesp 3 ${SENTINEL}
+    if [[ $? -eq 1 ]]; then
+      log "could not kill $component"
+    else
+      log "Stopped $component"
+    fi
     exit 255
 }
 
 trap "killed" SIGINT SIGTERM SIGKILL
-
+log "calling configesp 1>/dev/null 2>/dev/null"
 nohup configesp 1>/dev/null 2>/dev/null &
 echo $! > $PID_NAME
 wait
+log "removing $PID_NAME"
 rm $PID_NAME
 
 while [[ -e ${SENTINEL} ]]; do
     sleep 5
     if [[ -e ${SENTINEL} ]]; then
+        log "calling configesp 1>/dev/null 2>/dev/null"
         nohup configesp 1>/dev/null 2>/dev/null &
         echo $! > $PID_NAME
         wait
+        log "removing $PID_NAME"
         rm $PID_NAME
     fi
 done

+ 22 - 5
initfiles/bin/init_dafilesrv.in

@@ -27,35 +27,52 @@ shift
 PATH_PRE=$(type -path hpcc_setenv)
 source ${PATH_PRE}
 PID_NAME="$PID/$(basename $PWD).pid"
-COMP_NAME="$(basename $PWD)"
+source ${INSTALL_DIR}/etc/init.d/hpcc_common
 
 # this must match jsocket hard limit
 export handlelimit=32768
 
+component=$(basename $PWD)
+export logfile="${LOG_DIR}/${component}/init_${component}_$(date +%Y_%m_%d_%H_%M_%S).log"
+
 export SENTINEL="dafilesrv.sentinel"
+log "Removing ${SENTINEL}"
 rm -f ${SENTINEL}
 
 killed()
 {
+    log "Attempting to kill $component"
     kill_process ${PID_NAME} dafilesrv 3 ${SENTINEL}
+    if [[ $? -eq 1 ]]; then
+      log "could not kill $component"
+    else
+      log "$component Stopped"
+    fi
     exit 255
 }
 
-ulimit -c unlimited
-ulimit -n $handlelimit
+log "Setting core and handle limit"
+ulimit -Sc hard
+[[ $? -ne 0 ]] && log "Failed to set core file limit"
+ulimit -Sn hard
+[[ $? -ne 0 ]] && log "Failed to set file descriptor limit"
 
 trap "killed" SIGINT SIGTERM SIGKILL
-dafilesrv -L $log -I ${COMP_NAME} &
+log "Calling dafilesrv -L $log -I $component &"
+dafilesrv -L $log -I $component &
 echo $! > $PID_NAME
 wait
+log "Removing $PID_NAME"
 rm $PID_NAME
 
 while [[ -e ${SENTINEL} ]]; do
     sleep 5
     if [[ -e ${SENTINEL} ]]; then
-        dafilesrv -L $log -I ${COMP_NAME} &
+        log "Calling dafilesrv -L $log -I $component &"
+        dafilesrv -L $log -I $component &
         echo $! > $PID_NAME
         wait
+        log "Removing $PID_NAME"
         rm $PID_NAME
     fi
 done

+ 16 - 0
initfiles/bin/init_dali

@@ -18,29 +18,45 @@
 PATH_PRE=$(type -path hpcc_setenv)
 source ${PATH_PRE}
 PID_NAME="$PID/$(basename $PWD).pid"
+source ${INSTALL_DIR}/etc/init.d/hpcc_common
+
+component=$(basename $PWD)
+export logfile="${LOG_DIR}/${component}/init_${component}_$(date +%Y_%m_%d_%H_%M_%S).log"
 
 export SENTINEL="daserver.sentinel"
+log "Removing $SENTINEL"
 rm -f ${SENTINEL}
 
 killed()
 {
+    log "calling dalistop ."
     dalistop .
+    log "Attempting to kill $component"
     kill_process ${PID_NAME} daserver 3 ${SENTINEL}
+    if [[ $? -eq 1 ]]; then
+      log "could not kill $component"
+    else
+      log "$component Stopped"
+    fi
     exit 255
 }
 
 trap "killed" SIGINT SIGTERM SIGKILL
+log "calling daserver 1>/dev/null 2>/dev/null &"
 daserver 1>/dev/null 2>/dev/null &
 echo $! > $PID_NAME
 wait
+log "$component returned, removing $PID_NAME"
 rm $PID_NAME
 
 while [[ -e ${SENTINEL} ]]; do
     sleep 5
     if [[ -e ${SENTINEL} ]]; then
+        log "calling daserver 1>/dev/null 2>/dev/null &"
         daserver 1>/dev/null 2>/dev/null &
         echo $! > $PID_NAME
         wait
+        log "$component returned, removing $PID_NAME"
         rm $PID_NAME
     fi
 done

+ 15 - 1
initfiles/bin/init_dfuserver

@@ -18,29 +18,43 @@
 PATH_PRE=$(type -path hpcc_setenv)
 source ${PATH_PRE}
 PID_NAME="$PID/$(basename $PWD).pid"
+source ${INSTALL_DIR}/etc/init.d/hpcc_common
+
+component=$(basename $PWD)
+export logfile="${LOG_DIR}/${component}/init_${component}_$(date +%Y_%m_%d_%H_%M_%S).log"
 
 export SENTINEL="dfuserver.sentinel"
+log "Removing ${SENTINEL}"
 rm -f ${SENTINEL}
 
 killed()
 {
+    log "Attempting to kill $component"
     kill_process ${PID_NAME} dfuserver 15 ${SENTINEL}
+    if [[ $? -eq 1 ]]; then
+      log "could not kill $component"
+    else
+      log "$component Stopped"
+    fi
     exit 255
 }
 
 trap "killed" SIGINT SIGTERM SIGKILL
-
+log "Calling dfuserver 1>/dev/null 2>/dev/null &"
 dfuserver 1>/dev/null 2>/dev/null &
 echo $! > $PID_NAME
 wait
+log "Removing $PID_NAME"
 rm $PID_NAME
 
 while [[ -e ${SENTINEL} ]]; do
     sleep 5
     if [[ -e ${SENTINEL} ]]; then
+        log "Calling dfuserver 1>/dev/null 2>/dev/null &"
         dfuserver 1>/dev/null 2>/dev/null &
         echo $! > $PID_NAME
         wait
+        log "Removing $PID_NAME"
         rm $PID_NAME
     fi
 done

+ 16 - 1
initfiles/bin/init_eclagent.in

@@ -20,31 +20,46 @@
 PATH_PRE=$(type -path hpcc_setenv)
 source ${PATH_PRE}
 PID_NAME="$PID/$(basename $PWD).pid"
+source ${INSTALL_DIR}/etc/init.d/hpcc_common
+
+component=$(basename $PWD)
+export logfile="${LOG_DIR}/${component}/init_${component}_$(date +%Y_%m_%d_%H_%M_%S).log"
 
 export SENTINEL="agentexec.sentinel"
+log "Removing $SENTINEL"
 rm -f ${SENTINEL}
 
+log "Removing $PID_DIR/hthortemp/*"
 rm -f ${PID_DIR}/hthortemp/*
 
 killed ()
 {
+    log "Attempting to kill $component"
     kill_process ${PID_NAME} agentexec 3 ${SENTINEL}
+    if [[ $? -eq 1 ]]; then
+      log "could not kill $component"
+    else
+      log "$component Stopped"
+    fi
     exit 255
 }
 
 trap "killed" SIGINT SIGTERM SIGKILL
-
+log "Calling agentexec 1>/dev/null 2>/dev/null &"
 agentexec 1>/dev/null 2>/dev/null &
 echo $! > $PID_NAME
 wait
+log "Removing $PID_NAME"
 rm $PID_NAME
 
 while [[ -e ${SENTINEL} ]]; do
     sleep 1
     if [[ -e ${SENTINEL} ]]; then
+        log "Calling agentexec 1>/dev/null 2>/dev/null &"
         agentexec 1>/dev/null 2>/dev/null &
         echo $! > $PID_NAME
         wait
+        log "Removing $PID_NAME"
         rm $PID_NAME
     fi
 done

+ 9 - 0
initfiles/bin/init_eclccserver

@@ -18,8 +18,13 @@
 PATH_PRE=$(type -path hpcc_setenv)
 source ${PATH_PRE}
 PID_NAME="$PID/$(basename $PWD).pid"
+source ${INSTALL_DIR}/etc/init.d/hpcc_common
+
+component=$(basename $PWD)
+export logfile="${LOG_DIR}/${component}/init_${component}_$(date +%Y_%m_%d_%H_%M_%S).log"
 
 export SENTINEL="eclccserver.sentinel"
+log "Removing $SENTINEL"
 rm -f ${SENTINEL}
 
 killed()
@@ -29,17 +34,21 @@ killed()
 }
 
 trap "killed" SIGINT SIGTERM SIGKILL
+log "Calling eclccserver 1>/dev/null 2>/dev/null &"
 eclccserver 1>/dev/null 2>/dev/null &
 echo $! > $PID_NAME
 wait
+log "Removing $PID_NAME"
 rm $PID_NAME
 
 while [[ -e ${SENTINEL} ]]; do
     sleep 5
     if [[ -e ${SENTINEL} ]]; then
+        log "Calling eclccserver 1>/dev/null 2>/dev/null &"
         eclccserver 1>/dev/null 2>/dev/null & 
         echo $! > $PID_NAME
         wait
+        log "Removing $PID_NAME"
         rm $PID_NAME
     fi
 done

+ 15 - 0
initfiles/bin/init_eclscheduler

@@ -18,28 +18,43 @@
 PATH_PRE=$(type -path hpcc_setenv)
 source ${PATH_PRE}
 PID_NAME="$PID/$(basename $PWD).pid"
+source ${INSTALL_DIR}/etc/init.d/hpcc_common
+
+component=$(basename $PWD)
+export logfile="${LOG_DIR}/${component}/init_${component}_$(date +%Y_%m_%d_%H_%M_%S).log"
 
 export SENTINEL="eclscheduler.sentinel"
+log "Removing $SENTINEL"
 rm -f ${SENTINEL}
 
 killed()
 {
+    log "Attempting to kill $component"
     kill_process ${PID_NAME} eclscheduler 3 ${SENTINEL}
+    if [[ $? -eq 1 ]]; then
+      log "could not kill $component"
+    else
+      log "$component Stopped"
+    fi
     exit 255
 }
 
 trap "killed" SIGINT SIGTERM SIGKILL
+log "Calling eclscheduler 1>/dev/null 2>/dev/null &"
 eclscheduler 1>/dev/null 2>/dev/null &
 echo $! > $PID_NAME
 wait
+log "Removing $PID_NAME"
 rm $PID_NAME
 
 while [[ -e ${SENTINEL} ]]; do
     sleep 5
     if [[ -e ${SENTINEL} ]]; then
+        log "Calling eclscheduler 1>/dev/null 2>/dev/null &"
         eclscheduler 1>/dev/null 2>/dev/null &
         echo $! > $PID_NAME
         wait
+        log "Removing $PID_NAME"
         rm $PID_NAME
     fi
 done

+ 11 - 1
initfiles/bin/init_esp

@@ -18,28 +18,38 @@
 PATH_PRE=$(type -path hpcc_setenv)
 source ${PATH_PRE}
 PID_NAME="$PID/$(basename $PWD).pid"
+source ${INSTALL_DIR}/etc/init.d/hpcc_common
+
+component=$(basename $PWD)
+export logfile="${LOG_DIR}/${component}/init_${component}_$(date +%Y_%m_%d_%H_%M_%S).log"
 
 export SENTINEL="esp.sentinel"
+log "Removing $SENTINEL"
 rm -f ${SENTINEL}
 
 SNMPID=$$
 
-killed() {
+killed()
+{
     kill_process ${PID_NAME} esp 15 ${SENTINEL}
     exit 255
 }
 
 trap "killed" SIGINT SIGTERM SIGKILL
+log "Calling esp snmpid=$SNMPID 1>/dev/null 2>/dev/null &"
 esp snmpid=$SNMPID 1>/dev/null 2>/dev/null &
 echo $! > $PID_NAME
 wait
+log "Removing $PID_NAME"
 rm $PID_NAME
 while [[ -e ${SENTINEL} ]]; do
     sleep 5
     if [[ -e ${SENTINEL} ]]; then
+        log "Calling esp snmpid=$SNMPID 1>/dev/null 2>/dev/null &"
         esp snmpid=$SNMPID 1>/dev/null 2>/dev/null &
         echo $! > $PID_NAME
         wait
+        log "Removing $PID_NAME"
         rm $PID_NAME
     fi
 done

+ 1 - 1
initfiles/bin/init_ftslave

@@ -15,4 +15,4 @@
 #    limitations under the License.
 ################################################################################
 
-# ftslave is executed on demand via ssh - no action needed at service start time
+# ftslave is executed on demand via ssh - no action needed at service start time

+ 26 - 5
initfiles/bin/init_roxie

@@ -18,17 +18,24 @@
 PATH_PRE=$(type -path hpcc_setenv)
 source ${PATH_PRE}
 PID_NAME="$PID/$(basename $PWD).pid"
+source ${INSTALL_DIR}/etc/init.d/hpcc_common
+
+component=$(basename $PWD)
+export logfile="${LOG_DIR}/${component}/init_${component}_$(date +%Y_%m_%d_%H_%M_%S).log"
 
 export SENTINEL="roxie.sentinel"
+log "Removing $SENTINEL"
 rm -f ${SENTINEL}
 
-if [ -n "$1" ]; then
+if [[ -n "$1" ]]; then
+    log "cd $1"
     cd $1
 fi
 
+log "sourcing roxievars"
 source ./roxievars
 
-if [ -n "$2" ]; then
+if [[ -n "$2" ]]; then
     logfilename=$2
 else
     logfilename="`date +%m_%d_%Y_%H_%M_%S`"
@@ -37,32 +44,46 @@ fi
 export LIBC_FATAL_STDERR_=1
 export restarts=0
 
-ulimit -c unlimited
-ulimit -n $NUM_ROXIE_HANDLES
+log "Setting limits for core and open file descriptors"
+ulimit -Sc hard > /dev/null 2>&1
+[[ $? -ne 0 ]] && log "Failed to set core file limit"
+ulimit -Sn hard > /dev/null 2>&1
+[[ $? -ne 0 ]] && log "Failed to set file descriptor limit"
 
 killed()
 {
     if [[ -n "$1" ]]; then
+        log "cd $1"
         cd $1
     fi
+    log "Attempting to kill $component"
     kill_process ${PID_NAME} roxie 3 ${SENTINEL}
+    if [[ $? -eq 1 ]]; then
+      log "could not kill $component"
+    else
+      log "$component Stopped"
+    fi
     exit 255
 }
 
 trap "killed" SIGINT SIGTERM SIGKILL
+log "Calling nohup roxie --topology=RoxieTopology.xml --logfile --restarts=$restarts --stdlog=0 2>>$logfilename.stderr 1>>$logfilename.stdout &"
 nohup roxie --topology=RoxieTopology.xml --logfile --restarts=$restarts --stdlog=0 2>>$logfilename.stderr 1>>$logfilename.stdout &
 echo $! > $PID_NAME 
 wait
+log "Removing $PID_NAME"
 rm $PID_NAME
 
 # Automatically restart roxie when it dies
 while [[ -e ${SENTINEL} ]]; do
     export restarts=$(($restarts+1))
+    log "Restarting $restarts"
     echo Restarting $restarts >> $logfilename.stderr
     echo Restarting $restarts >> $logfilename.stdout
+    log "Calling nohup roxie --topology=RoxieTopology.xml --logfile --restarts=$restarts --stdlog=0 2>>$logfilename.stderr 1>>$logfilename.stdout &"
     nohup roxie --topology=RoxieTopology.xml --logfile --restarts=$restarts --stdlog=0 2>>$logfilename.stderr 1>>$logfilename.stdout &
     echo $! > $PID_NAME
     wait
+    log "Removing $PID_NAME"
     rm $PID_NAME
 done
-

+ 14 - 0
initfiles/bin/init_sasha

@@ -22,27 +22,41 @@ PID_NAME="$PID/$(basename $PWD).pid"
 INSTALL_DIR="$(dirname ${PATH_PRE})/.."
 source  ${INSTALL_DIR}/etc/init.d/hpcc_common
 
+component=$(basename $PWD)
+export logfile="${LOG_DIR}/${component}/init_${component}_$(date +%Y_%m_%d_%H_%M_%S).log"
+
 export SENTINEL="saserver.sentinel"
+log "Removing $SENTINEL"
 rm -f ${SENTINEL}
 
 killed()
 {
+    log "Attempting to kill $component"
     kill_process ${PID_NAME} saserver 3 ${SENTINEL}
+    if [[ $? -eq 1 ]]; then
+      log "could not kill $component"
+    else
+      log "$component Stopped"
+    fi
     exit 255
 }
 
 trap "killed" SIGINT SIGTERM SIGKILL
+log "Calling saserver 1>/dev/null 2>/dev/null &"
 saserver 1>/dev/null 2>/dev/null &
 echo $! > $PID_NAME
 wait
+log "Removing $PID_NAME"
 rm $PID_NAME
 
 while [[ -e ${SENTINEL} ]]; do
     sleep 5
     if [[ -e ${SENTINEL} ]]; then
+        log "Calling saserver 1>/dev/null 2>/dev/null &"
         saserver 1>/dev/null 2>/dev/null &
         echo $! > $PID_NAME
         wait
+        log "Removing $PID_NAME"
         rm $PID_NAME
     fi
 done

+ 43 - 41
initfiles/bin/init_thor

@@ -17,15 +17,23 @@
 
 deploydir=$(dirname $(type -path $0))
 PATH_PRE=$(type -path hpcc_setenv)
-source ${PATH_PRE} 
+source ${PATH_PRE}
 
 INSTALL_DIR=$(dirname ${PATH_PRE})/..
 source  ${INSTALL_DIR}/etc/init.d/hpcc_common
+component=$(basename $PWD)
 
-PID_NAME="$PID/$(basename $PWD).pid"
-INIT_PID_NAME="$PID/init_$(basename $PWD).pid"
-echo $$ > $INIT_PID_NAME
+PID_NAME="$PID/${component}.pid"
 
+timestamp="$(date +%Y_%m_%d_%H_%M_%S)"
+export logfile="${LOG_DIR}/${component}/init_${component}_${timestamp}.log"
+
+# for use by init_thorslave call
+logredirect="init_thorslave_${component}_${timestamp}.log"
+
+log "Starting ${component}"
+
+log "removing any previous sentinel file"
 export SENTINEL="thor.sentinel"
 rm -f ${SENTINEL}
 
@@ -33,7 +41,6 @@ if [[ -z "$deploydir" ]]; then
     deploydir=$(pwd -P)
 fi
 
-compname=$(basename $PWD)
 instancedir=$(pwd -P)
 source $instancedir/setvars
 
@@ -45,10 +52,6 @@ fi
 ln -s -f $deploydir/thormaster${LCR} thormaster_$THORNAME
 
 ENV_DIR=$(cat ${HPCC_CONFIG} | sed -n "/\[DEFAULT\]/,/\[/p" | grep "^configs=" | sed -e 's/^configs=//')
-logdir=$(updtdalienv $ENV_DIR/environment.xml -d log thor $THORNAME)
-logfile=$logdir/${THORNAME}.log
-
-exec >> $logfile
 
 contains()
 {
@@ -91,14 +94,14 @@ makethorgroup()
 
 kill_slaves()
 {
+    log "Killing slaves"
     if [[ "$localthor" = "true" ]]; then
-    $deploydir/init_thorslave stop localhost $THORMASTER $THORMASTERPORT $logdir $instancedir $deploydir $THORNAME $PATH_PRE $logredirect
+        $deploydir/init_thorslave stop localhost $THORMASTER $THORMASTERPORT $LOG_DIR $instancedir $deploydir $THORNAME $PATH_PRE $logredirect
     else
         # we want to kill only slaves that have already been started in run_thor
         if [[ -r $instancedir/uslaves.start ]]; then
             nslaves=$(cat $instancedir/uslaves.start 2> /dev/null | wc -l)
-            $deploydir/frunssh $instancedir/uslaves.start "/bin/sh -c '$deploydir/init_thorslave stop %a $THORMASTER $THORMASTERPORT $logdir $instancedir $deploydir $THORNAME $PATH_PRE $logredirect'" -i:$SSHidentityfile -u:$SSHusername -pe:$SSHpassword -t:$SSHtimeout -a:$SSHretries -n:$nslaves 2>&1 | egrep -v "no process killed"
-            echo slaves stopped
+            $deploydir/frunssh $instancedir/uslaves.start "/bin/sh -c '$deploydir/init_thorslave stop %a $THORMASTER $THORMASTERPORT $LOG_DIR $instancedir $deploydir $THORNAME $PATH_PRE $logredirect'" -i:$SSHidentityfile -u:$SSHusername -pe:$SSHpassword -t:$SSHtimeout -a:$SSHretries -n:$nslaves 2>&1
         fi
     fi
 
@@ -107,15 +110,16 @@ kill_slaves()
 
 killed()
 {
-    echo "Stopping"
-    kill_process ${PID_NAME} thormaster_${compname} 30
+    log "Stopping ${component}"
+    kill_process ${PID_NAME} thormaster_${component} 30
     kill_slaves
+    log "removing init.pid file and uslaves.start file"
     rm -f $INIT_PID_NAME $instancedir/uslaves.start > /dev/null 2>&1
     exit 255
 }
 
 trap "killed" SIGINT SIGTERM
-# attempt to clean up any old slaves
+log "Ensuring a clean working environment ..."
 kill_slaves
 thorpid=0
 
@@ -124,87 +128,85 @@ while [[ 1 ]]; do
     daliadmin server=$DALISERVER dfsgroup ${groupName} slaves
     errcode=$?
     if [[ 0 != ${errcode} ]]; then
-    echo "failed to lookup dali group for $groupName"
+    log "failed to lookup dali group for $groupName"
         exit 1
     fi
     makethorgroup
     sort $instancedir/slaves | uniq > $instancedir/uslaves.start
 
-    echo "--------------------------"
-    echo "starting thorslaves ..."
+    log "--------------------------"
+    log "starting thorslaves ..."
 
-    logpthtail=$(date +%m_%d_%Y_%H_%M_%S)
-    logredirect="$logdir/init_thorslave_$logpthtail.log"
     # Would be simpler, if there was simple way to test if ip is local and get rid of 'localthor' setting
     if [[ "$localthor" = "true" ]]; then
         slaveip=$(head -n 1 $instancedir/uslaves.start)
-        $deploydir/init_thorslave start $slaveip $THORMASTER $THORMASTERPORT $logdir $instancedir $deploydir $THORNAME $PATH_PRE $logredirect
+        $deploydir/init_thorslave start $slaveip $THORMASTER $THORMASTERPORT $LOG_DIR $instancedir $deploydir $THORNAME $PATH_PRE $logredirect
     else
         nslaves=$(cat $instancedir/uslaves.start | wc -l)
-        $deploydir/frunssh $instancedir/uslaves.start "/bin/sh -c '$deploydir/init_thorslave start %a $THORMASTER $THORMASTERPORT $logdir $instancedir $deploydir $THORNAME $PATH_PRE $logredirect'" -i:$SSHidentityfile -u:$SSHusername -pe:$SSHpassword -t:$SSHtimeout -a:$SSHretries -n:$nslaves 2>&1
+        $deploydir/frunssh $instancedir/uslaves.start "/bin/sh -c '$deploydir/init_thorslave start %a $THORMASTER $THORMASTERPORT $LOG_DIR $instancedir $deploydir $THORNAME $PATH_PRE $logredirect'" -i:$SSHidentityfile -u:$SSHusername -pe:$SSHpassword -t:$SSHtimeout -a:$SSHretries -n:$nslaves 2>&1
         FRUNSSH_RC=$?
         if [[ ${FRUNSSH_RC} -gt 0 ]]; then
-            echo "Error ${FRUNSSH_RC} in frunssh"
-            echo "Please check $(dirname ${logdir})/frunssh for more details"
+            log "Error ${FRUNSSH_RC} in frunssh"
+            log "Please check $(dirname ${LOG_DIR})/frunssh for more details"
             # clean up any slaves it was able to reach
             killed
         fi
     fi
 
-    echo thormaster cmd : $instancedir/thormaster_$THORNAME MASTER=$THORMASTER:$THORMASTERPORT
+    log "thormaster cmd : $instancedir/thormaster_$THORNAME MASTER=$THORMASTER:$THORMASTERPORT"
     nohup $instancedir/thormaster_$THORNAME MASTER=$THORMASTER:$THORMASTERPORT 2> /dev/null 1>/dev/null &
 
     thorpid=$!
     if [[ "$thorpid" -ne "0" ]]; then
-        echo thormaster$LCR process started pid = $thorpid
+        log "thormaster$LCR process started pid = $thorpid"
 
         echo $thorpid > $PID_NAME
         wait $thorpid
         errcode=$?
         case $errcode in
         # TEC_Clean
-        0)  echo "Thormaster ($thorpid) Exited cleanly"
+        0)  log "Thormaster ($thorpid) Exited cleanly"
             rm -f $instancedir/uslaves.start $PID_NAME $INIT_PID_NAME > /dev/null 2>&1
             exit 0
             ;;
         # TEC_CtrlC
-        1)  echo "Thormaster ($thorpid) Interrupted, Ctrl-C caught"
+        1)  log "Thormaster ($thorpid) Interrupted, Ctrl-C caught"
             killed
             ;;
         # TEC_Idle, TEC_Watchdog, TEC_Swap, TEC_DaliDown
-        2|3|5|6)    [[ $errcode -eq 2 ]] && echo "Thormaster ($thorpid) Idle"
-                    [[ $errcode -eq 3 ]] && echo "Thormaster ($thorpid) Lost connection to slave(s)"
-                    [[ $errcode -eq 5 ]] && echo "Thormaster ($thorpid) Swap node required"
-                    [[ $errcode -eq 6 ]] && echo "Thormaster ($thorpid) Unable to connect to Dali"
-                    echo 'stopping thorslave(s) for restart'
+        2|3|5|6)    [[ $errcode -eq 2 ]] && log "Thormaster ($thorpid) Idle"
+                    [[ $errcode -eq 3 ]] && log "Thormaster ($thorpid) Lost connection to slave(s)"
+                    [[ $errcode -eq 5 ]] && log "Thormaster ($thorpid) Swap node required"
+                    [[ $errcode -eq 6 ]] && log "Thormaster ($thorpid) Unable to connect to Dali"
+                    log "Stopping thorslave(s) for restart"
                     kill_slaves
                     if [[ 0 != $autoSwapNode ]]; then
-                        echo "Running autoswap $THORNAME :: ($thorpid)"
-                        swapnode auto $DALISERVER $compname
+                        log "Running autoswap $THORNAME :: ($thorpid)"
+                        swapnode auto $DALISERVER $component
                         errcode=$?
                         if [[ 0 != ${errcode} ]]; then
-                            echo "auto swap node failed, errcode=${errcode}"
+                            log "auto swap node failed, errcode=${errcode}"
                             killed
                         fi
                     fi
                 # restarting thormaster
                 ;;
         # TEC_SlaveInit
-        4)  echo "Thormaster ($thorpid) Slaves failed to initialize"
-            echo "Shutting down"
+        4)  log "Thormaster ($thorpid) Slaves failed to initialize"
+            log "Shutting down"
             killed
             ;;
-        *)  echo "Thormaster ($thorpid) Unknown error code.  Stopping"
+        *)  log "Thormaster ($thorpid) Unknown error code.  Stopping"
             killed
             ;;
         esac
     else
-        echo failed to start thormaster$LCR, pausing for 30 seconds
+        log "failed to start thormaster$LCR, pausing for 30 seconds"
         sleep 30
         kill_slaves
     fi
     if [[ ! -e $SENTINEL ]]; then
-        echo $SENTINEL 'has been removed or thormaster did not fully start - script stopping'
+        log "$SENTINEL has been removed or thormaster did not fully start - script stopping"
         exit 0
     fi
 done

+ 17 - 15
initfiles/bin/init_thorslave

@@ -24,9 +24,10 @@ instancedir=$6
 deploydir=$7
 hpcc_compname=$8
 hpcc_setenv=$9
-logredirect=${10}
+export logfile="${logpth}/${hpcc_compname}/${10}"
 
-source ${hpcc_setenv}
+source "$hpcc_setenv"
+source "$(dirname $hpcc_setenv)/../etc/init.d/hpcc_common"
 
 slavename=thorslave_${hpcc_compname}
 
@@ -34,6 +35,7 @@ stop_slaves()
 {
     killall -0 $slavename > /dev/null 2>&1
     if [[ $? -eq 0 ]];then
+        log "killing slaves"
         killall -9 $slavename > /dev/null 2>&1
     fi
     rm -f $PID/${slavename}_*.pid > /dev/null 2>&1
@@ -41,6 +43,7 @@ stop_slaves()
 
 start_slaves()
 {
+
     # insuring dafilesrv is running on the machine as it is a prerequisite
     sudo /etc/init.d/dafilesrv status > /dev/null 2>&1
     if [[ $? -ne 0 ]];then
@@ -52,26 +55,25 @@ start_slaves()
 
     # insuring parent directory structure is setup properly
     mkdir -p $instancedir
-    mkdir -p $(dirname $logredirect)
-    exec >>$logredirect 2>&1
+    mkdir -p $(dirname $logfile)
 
     cd $instancedir
 
-    echo "$(date) Dependency dafilesrv is running"
+    log "dependency dafilesrv started"
 
     ulimit -Sc hard > /dev/null 2>&1
-    [[ $? -ne 0 ]] && echo "$(date) Failed to set ulimit for core file size"
+    [[ $? -ne 0 ]] && log "Failed to set ulimit for core file size"
     ulimit -Sn hard > /dev/null 2>&1
-    [[ $? -ne 0 ]] && echo "$(date) Failed to set ulimit for number of file descriptors open"
+    [[ $? -ne 0 ]] && log "Failed to set ulimit for number of file descriptors open"
 
-    echo "$(date) slave(${ip}) init"
-    echo "$(date) slave(s) starting"
+    log "slave(${ip}) init"
+    log "slave(s) starting"
 
     # create symlink for easier identification of slaves by compName
     ln -s -f $deploydir/thorslave_lcr ${slavename}
 
     # sync to current master thorgroup
-    echo "$(date) rsync -e ssh -o StrictHostKeyChecking=no ${master}:${instancedir}/thorgroup ${instancedir}/thorgroup.slave"
+    log "rsync -e ssh -o StrictHostKeyChecking=no ${master}:${instancedir}/thorgroup ${instancedir}/thorgroup.slave"
     rsync -e "ssh -o StrictHostKeyChecking=no" $master:$instancedir/thorgroup $instancedir/thorgroup.slave
 
     let "slavenum = 1";
@@ -82,13 +84,13 @@ start_slaves()
             if [[ "$slaveport" = "" ]]; then
                 slaveport=$THORSLAVEPORT
             fi
-            echo "$(date) $slavename  master=$master:$masterport slave=.:$slaveport slavenum=$slavenum logDir=$logpth"
-            ./$slavename master=$master:$masterport slave=.:$slaveport slavenum=$slavenum logDir=$logpth 2>/dev/null 1>/dev/null &
+            log "$slavename  master=$master:$masterport slave=.:$slaveport slavenum=$slavenum logDir=$logpth/$hpcc_compname"
+            ./$slavename master=$master:$masterport slave=.:$slaveport slavenum=$slavenum logDir=$logpth/$hpcc_compname 2>/dev/null 1>/dev/null &
             slavepid=$!
             if [[ "$slavepid" -eq "0" ]]; then
-                echo "$(date) failed to start"
+                log "failed to start"
             else
-                echo "$(date) slave pid $slavepid started"
+                log "slave pid $slavepid started"
                 PID_NAME="$PID/${slavename}_${slavenum}.pid"
                 echo $slavepid > $PID_NAME
             fi
@@ -99,7 +101,7 @@ start_slaves()
 
 print_usage()
 {
-  echo usage: cmd ip master masterport logdir workingdir deploydir hpcc_compname hpcc_setenv logredirect
+  log "usage: cmd ip master masterport logdir workingdir deploydir hpcc_compname hpcc_setenv logredirect"
 }
 
 ##  Main

+ 30 - 32
initfiles/componentfiles/thor/start_backupnode.in

@@ -26,15 +26,6 @@ if [ $# -lt 1 ]; then
     exit 1
 fi
 
-pid=`${PIDOF} backupnode`
-if [ -n "$pid" ]; then
-   echo stopping previous backupnode processes $pid
-   killall backupnode
-fi
-
-echo ------------------------------
-echo starting backupnode ...
-
 PATH_PRE=`type -path hpcc_setenv`
 if [ -z "$PATH_PRE" ]; then
     # assume default conf location
@@ -47,10 +38,24 @@ ENVPATH=${CONFIG_DIR}/${ENV_XML_FILE}
 RUN_DIR=`cat ${HPCC_CONFIG} | sed -n "/\[DEFAULT\]/,/\[/p" | grep "^runtime=" | sed -e 's/^runtime=//'`
 INSTANCE_DIR=$RUN_DIR/$1
 
+LOGPATH=`updtdalienv $ENVPATH -d log backupnode backupnode`
+export logpthtail="`date +%Y_%m_%d_%H_%M_%S`"
+export logfile="$LOGPATH/backupnode_${logpthtail}.log"
+mkdir -p `dirname $logfile`
+
+pid=`${PIDOF} backupnode`
+if [ -n "$pid" ]; then
+   log "stopping previous backupnode processes $pid"
+   killall backupnode
+fi
+
+log "------------------------------"
+log "starting backupnode ..."
+
 if [ ! -e $INSTANCE_DIR ] ; then
   # perhaps they gave a full path?
   if [ ! -e $1 ] ; then
-    echo Usage: $0 thor_cluster_name
+    echo "Usage: $0 thor_cluster_name"
     exit 1
   fi
   INSTANCE_DIR=$1
@@ -61,7 +66,7 @@ cd $INSTANCE_DIR
 PID_NAME="$PID/`basename $INSTANCE_DIR`.pid"
 BACKUPNODE_DATA=`updtdalienv $ENVPATH -d data backupnode backupnode`
 if [ -z "$BACKUPNODE_DATA" ]; then
-    echo cannot determine backupnode directory 
+    echo "cannot determine backupnode directory"
     exit 1
 fi
 . $INSTANCE_DIR/setvars
@@ -74,7 +79,7 @@ fi
 daliadmin server=$DALISERVER dfsgroup ${groupName} $INSTANCE_DIR/backupnode.slaves
 errcode=$?
 if [ 0 != ${errcode} ]; then
-    echo 'failed to lookup dali group for $groupName'
+    echo "failed to lookup dali group for $groupName"
     exit 1
 fi
 
@@ -89,43 +94,36 @@ mkdir -p $BACKUPNODE_DATA
 rm -f $BACKUPNODE_DATA/*.ERR
 rm -f $BACKUPNODE_DATA/*.DAT
 
-echo Using backupnode directory $BACKUPNODE_DATA
-echo Reading slaves file $INSTANCE_DIR/backupnode.slaves
-echo Scanning files from dali ...
+log "Using backupnode directory $BACKUPNODE_DATA"
+log "Reading slaves file $INSTANCE_DIR/backupnode.slaves"
+log "Scanning files from dali ..."
 
 NODEGROUP=$THORPRIMARY
 if [ -z "$THORPRIMARY" ]; then
   NODEGROUP=$THORNAME
 fi
 
-LOGPATH=`updtdalienv $ENVPATH -d log backupnode backupnode`
-LOGDATE=`date +%m_%d_%Y_%H_%M_%S`
-LOGFILE="$LOGPATH/$LOGDATE".log
-mkdir -p `dirname $LOGFILE` 
 
-$DEPLOY_DIR/backupnode -O $DALISERVER $NODEGROUP $BACKUPNODE_DATA >> $LOGFILE 2>&1
+$DEPLOY_DIR/backupnode -O $DALISERVER $NODEGROUP $BACKUPNODE_DATA >> $logfile 2>&1
 if [ $? -ne 0 ]; then
-  echo Backupnode failed - see $LOGFILE
+  echo Backupnode failed - see $logfile
   exit 1
 fi
 
 # maximum number of threads frunssh will be permitted to use (capped by # slaves)
 MAXTHREADS=1000
 
-frunssh $INSTANCE_DIR/backupnode.slaves "killall backupnode" -i:$SSHidentityfile -u:$SSHusername -pe:$SSHpassword -t:$SSHtimeout -a:$SSHretries -n:$MAXTHREADS -b >> $LOGFILE 2>&1
-echo frunssh $INSTANCE_DIR/backupnode.slaves "/bin/sh -c 'mkdir -p `dirname $LOGPATH/${LOGDATE}_node%n.log`; mkdir -p $INSTANCE_DIR; $DEPLOY_DIR/backupnode -T -X $BACKUPNODE_REMOTEDATA %n %c %a %x $2 > $LOGPATH/${LOGDATE}_node%n.log 2>&1'" -i:$SSHidentityfile -u:$SSHusername -pe:$SSHpassword -t:$SSHtimeout -a:$SSHretries -n:$MAXTHREADS -b >> $LOGFILE 2>&1
-frunssh $INSTANCE_DIR/backupnode.slaves "/bin/sh -c 'mkdir -p `dirname $LOGPATH/${LOGDATE}_node%n.log`; mkdir -p $INSTANCE_DIR; $DEPLOY_DIR/backupnode -T -X $BACKUPNODE_REMOTEDATA %n %c %a %x $2 > $LOGPATH/${LOGDATE}_node%n.log 2>&1'" -i:$SSHidentityfile -u:$SSHusername -pe:$SSHpassword -t:$SSHtimeout -a:$SSHretries -n:$MAXTHREADS -b >> $LOGFILE 2>&1
+frunssh $INSTANCE_DIR/backupnode.slaves "killall backupnode" -i:$SSHidentityfile -u:$SSHusername -pe:$SSHpassword -t:$SSHtimeout -a:$SSHretries -n:$MAXTHREADS -b >> $logfile 2>&1
+log "frunssh $INSTANCE_DIR/backupnode.slaves \"/bin/sh -c 'mkdir -p `dirname $LOGPATH/${logfile}_node%n.log`; mkdir -p $INSTANCE_DIR; $DEPLOY_DIR/backupnode -T -X $BACKUPNODE_REMOTEDATA %n %c %a %x $2 > $LOGPATH/${logfile}_node%n.log 2>&1'\" -i:$SSHidentityfile -u:$SSHusername -pe:$SSHpassword -t:$SSHtimeout -a:$SSHretries -n:$MAXTHREADS -b >> $logfile 2>&1"
+frunssh $INSTANCE_DIR/backupnode.slaves "/bin/sh -c 'mkdir -p `dirname $LOGPATH/${logfile}_node%n.log`; mkdir -p $INSTANCE_DIR; $DEPLOY_DIR/backupnode -T -X $BACKUPNODE_REMOTEDATA %n %c %a %x $2 > $LOGPATH/${logfile}_node%n.log 2>&1'" -i:$SSHidentityfile -u:$SSHusername -pe:$SSHpassword -t:$SSHtimeout -a:$SSHretries -n:$MAXTHREADS -b >> $logfile 2>&1
 
-echo ------------------------------
-sleep 5
-echo ------------------------------
-echo Waiting for backup to complete
+log "------------------------------"
+log "Waiting for backup to complete"
 
-nohup backupnode -W $INSTANCE_DIR/backupnode.slaves $BACKUPNODE_DATA >> $LOGFILE 2>&1 &
+nohup backupnode -W $INSTANCE_DIR/backupnode.slaves $BACKUPNODE_DATA >> $logfile 2>&1 &
 pid=`${PIDOF} backupnode`
 trap "echo start_backupnode exiting, backupnode process $pid still continuing; exit 0" 2
+sleep 5
 if [ -n "$pid" ]; then
-  tail --pid $pid -f $LOGFILE 2>/dev/null
+  tail --pid $pid -f $logfile 2>/dev/null
 fi
-
-

+ 41 - 18
roxie/ccd/ccdserver.cpp

@@ -5269,6 +5269,7 @@ IRoxieServerActivityFactory *createRoxieServerInlineTableActivityFactory(unsigne
 class CRoxieServerWorkUnitReadActivity : public CRoxieServerActivity
 {
     IHThorWorkunitReadArg &helper;
+    CriticalSection readerCrit;
     Owned<IWorkUnitRowReader> wuReader; // MORE - can we use IRoxieInput instead?
 public:
     CRoxieServerWorkUnitReadActivity(const IRoxieServerActivityFactory *_factory, IProbeManager *_probeManager)
@@ -5296,8 +5297,10 @@ public:
 
     virtual void reset() 
     {
-        CriticalBlock b(statecrit);
-        wuReader.clear();
+        {
+            CriticalBlock b(readerCrit);
+            wuReader.clear();
+        }
         CRoxieServerActivity::reset(); 
     };
 
@@ -5305,11 +5308,15 @@ public:
 
     virtual const void *nextInGroup()
     {
-        CriticalBlock b(statecrit);
         ActivityTimer t(totalCycles, timeActivities);
-        if (!wuReader)
-            return NULL;
-        const void *ret = wuReader->nextInGroup();
+        Linked<IWorkUnitRowReader> useReader;
+        {
+            CriticalBlock b(readerCrit);
+            if (!wuReader)
+                return NULL;
+            useReader.set(wuReader);
+        }
+        const void *ret = useReader->nextInGroup();
         if (ret)
             processed++;
         return ret;
@@ -5695,6 +5702,7 @@ protected:
 class CRoxieServerLocalResultReadActivity : public CRoxieServerActivity
 {
     IHThorLocalResultReadArg &helper;
+    CriticalSection iterCrit;
     Owned<IRoxieInput> iter;
     ILocalGraphEx * graph;
     unsigned graphId;
@@ -5723,18 +5731,24 @@ public:
 
     virtual void reset() 
     {
-        CriticalBlock b(statecrit);
-        iter.clear();
+        {
+            CriticalBlock b(iterCrit);
+            iter.clear();
+        }
         CRoxieServerActivity::reset(); 
     };
 
     virtual const void *nextInGroup()
     {
-        CriticalBlock b(statecrit);
         ActivityTimer t(totalCycles, timeActivities);
-        if (!iter)
-            return NULL;
-        const void * next = iter->nextInGroup();
+        Linked<IRoxieInput> useIter;
+        {
+            CriticalBlock b(iterCrit);
+            if (!iter)
+                return NULL;
+            useIter.set(iter);
+        }
+        const void * next = useIter->nextInGroup();
         if (next)
         {
             processed++;
@@ -6020,6 +6034,7 @@ class CRoxieServerGraphLoopResultReadActivity : public CRoxieServerActivity
 {
 protected:
     IHThorGraphLoopResultReadArg &helper;
+    CriticalSection iterCrit;
     Owned<IRoxieInput> iter;
     ILocalGraphEx * graph;
     unsigned graphId;
@@ -6070,18 +6085,26 @@ public:
 
     virtual void reset() 
     {
-        CriticalBlock b(statecrit);
-        if (iter)
-            iter->reset();
-        iter.clear();
+        {
+            CriticalBlock b(iterCrit);
+            if (iter)
+                iter->reset();
+            iter.clear();
+        }
         CRoxieServerActivity::reset(); 
     };
 
     virtual const void *nextInGroup()
     {
-        CriticalBlock b(statecrit);
         ActivityTimer t(totalCycles, timeActivities);
-        const void * next = iter ? iter->nextInGroup() : NULL;
+        Linked<IRoxieInput> useIter;
+        {
+            CriticalBlock b(iterCrit);
+            if (!iter)
+                return NULL;
+            useIter.set(iter);
+        }
+        const void * next = useIter->nextInGroup();
         if (next)
         {
             processed++;

+ 25 - 10
rtl/eclrtl/eclrtl.cpp

@@ -1226,18 +1226,28 @@ void holeIntFormat(size32_t maxlen, char * target, __int64 value, unsigned width
 
 void holeRealFormat(size32_t maxlen, char * target, double value, unsigned width, unsigned places)
 {
-    if ((int) width < 0)
+    if ((int) width <= 0)
         return;
-    char temp[500];
-    if (width > sizeof(temp)) 
+
+    const unsigned tempSize = 500;
+    char temp[tempSize*2+2];  // Space for leading digits/0, '-' and \0 terminator
+
+    //Ensure that we output at most 2*tempSize characters.
+    unsigned formatWidth = width < tempSize ? width : tempSize;
+    if (places >= formatWidth)
+        places = formatWidth-1;
+    unsigned written = sprintf(temp, "%*.*f", formatWidth, places, value);
+
+    const char * src = temp;
+    if (written > width)
     {
-        unsigned delta = width - sizeof(temp);
-        memset(target, ' ', delta);
-        target += delta;
-        width = sizeof(temp);
+        //Strip a leading 0 for very small numbers.
+        if (*src == '0')
+        {
+            written--;
+            src++;
+        }
     }
-    if (places >= width)             places = width-1;
-    unsigned written = sprintf(temp, "%*.*f", width, places, value);
     if (written > width)
     {
         memset(target, '*', width);
@@ -1245,7 +1255,12 @@ void holeRealFormat(size32_t maxlen, char * target, double value, unsigned width
             target[width-places-1] = '.';
     }
     else
-        memcpy(target, temp, width);
+    {
+        unsigned delta = width - written;
+        if (delta)
+            memset(target, ' ', delta);
+        memcpy(target+delta, src, written);
+    }
 }
 
 //=============================================================================

+ 23 - 29
system/jlib/jptree.ipp

@@ -29,12 +29,6 @@
 #include "jptree.hpp"
 #include "jbuff.hpp"
 
-#ifdef __64BIT__
-#pragma pack(push,1)    // 64bit pack PTree's    (could possibly do for 32bit also but may be compatibility problems)
-#endif
-
-
-
 #define ANE_APPEND -1
 #define ANE_SET -2
 ///////////////////
@@ -254,6 +248,13 @@ struct AttrValue
 #define AM_NOCASE_FLAG (0x8000)
 #define AM_NOCASE_MASK (0x7fff)
 
+#ifdef __64BIT__
+#pragma pack(push,1)
+// Byte-Pack AttrMap and PTree because very large numbers are created.  However, this may cause problems on systems that
+// require aligned pointer access.  Without overriding the packing the structure currently wastes 8 bytes.
+// Ideally the classes would be restructured to avoid this, but it would probably require AttrMap to move into PTree
+#endif
+
 class jlib_decl AttrMap
 {
     AttrValue *attrs;
@@ -416,14 +417,21 @@ private:
 
 protected: // data
     IPropertyTree *parent; // ! currently only used if tree embedded into array, used to locate position.
-
     HashKeyElement *name;
     ChildMap *children;
     IPTArrayValue *value;
-    AttrMap attributes;
-    byte flags;
+    //The packing (#pragma pack) is overridden because very large numbers of these objects are created, and the
+    //following two members currently cause 8 bytes to be wasted.  Refactoring the contents of AttrMap into this
+    //class would allow the fields to pack cleanly.
+    AttrMap attributes;     // this has 2 "extra" bytes - which could pack into the space following the count
+    byte flags;             // this could also pack into the space following the count.
 };
 
+#ifdef __64BIT__
+#pragma pack(pop)
+#endif
+
+
 jlib_decl IPropertyTree *createPropBranch(IPropertyTree *tree, const char *xpath, bool createIntermediates=false, IPropertyTree **created=NULL, IPropertyTree **createdParent=NULL);
 
 class LocalPTree : public PTree
@@ -446,11 +454,9 @@ public:
 };
 
 class PTree;
-class SingleIdIterator : public CInterface, implements IPropertyTreeIterator
+class SingleIdIterator : public CInterfaceOf<IPropertyTreeIterator>
 {
 public:
-    IMPLEMENT_IINTERFACE;
-
     SingleIdIterator(const PTree &_tree, unsigned pos=1, unsigned _many=(unsigned)-1);
     ~SingleIdIterator();
     void setCurrent(unsigned pos);
@@ -468,11 +474,9 @@ private:
 };
 
 
-class PTLocalIteratorBase : public CInterface, implements IPropertyTreeIterator
+class PTLocalIteratorBase : public CInterfaceOf<IPropertyTreeIterator>
 {
 public:
-    IMPLEMENT_IINTERFACE;
-
     PTLocalIteratorBase(const PTree *tree, const char *_id, bool _nocase, bool sort);
 
     ~PTLocalIteratorBase();
@@ -485,9 +489,9 @@ public:
     virtual IPropertyTree & query() { return iter->query(); }
 
 protected:
+    bool nocase, sort;  // pack with the link count
     IPropertyTreeIterator *baseIter;
     StringAttr id;
-    bool nocase, sort;
 private:
     const PTree *tree;
     IPropertyTreeIterator *iter;
@@ -498,8 +502,6 @@ private:
 class PTIdMatchIterator : public PTLocalIteratorBase
 {
 public:
-    IMPLEMENT_IINTERFACE;
-
     PTIdMatchIterator(const PTree *tree, const char *id, bool nocase, bool sort) : PTLocalIteratorBase(tree, id, nocase, sort) { }
 
     virtual bool match();
@@ -507,11 +509,9 @@ public:
 
 class StackElement;
 
-class PTStackIterator : public CInterface, implements IPropertyTreeIterator
+class PTStackIterator : public CInterfaceOf<IPropertyTreeIterator>
 {
 public:
-    IMPLEMENT_IINTERFACE;
-
     PTStackIterator(IPropertyTreeIterator *_iter, const char *_xpath);
     ~PTStackIterator();
 
@@ -536,11 +536,11 @@ private: // data
     StackElement *stack;
 };
 
-class CPTreeMaker : public CInterface, implements IPTreeMaker
+class CPTreeMaker : public CInterfaceOf<IPTreeMaker>
 {
+    bool rootProvided, noRoot;  // pack into the space following the link count
     IPropertyTree *root;
     ICopyArrayOf<IPropertyTree> ptreeStack;
-    bool rootProvided, noRoot;
     IPTreeNodeCreator *nodeCreator;
     class CDefaultNodeCreator : public CInterface, implements IPTreeNodeCreator
     {
@@ -555,8 +555,6 @@ class CPTreeMaker : public CInterface, implements IPTreeMaker
 protected:
     IPropertyTree *currentNode;
 public:
-    IMPLEMENT_IINTERFACE;
-
     CPTreeMaker(byte flags=ipt_none, IPTreeNodeCreator *_nodeCreator=NULL, IPropertyTree *_root=NULL, bool _noRoot=false) : noRoot(_noRoot)
     {
         if (_nodeCreator)
@@ -643,9 +641,5 @@ public:
     }
 };
 
-#ifdef __64BIT__
-#pragma pack(pop)   
-#endif
-
 
 #endif

+ 44 - 49
system/jlib/jthread.cpp

@@ -1732,7 +1732,7 @@ class CLinuxPipeProcess: public CInterface, implements IPipeProcess
             }
             return 0;
         }
-        void stop() 
+        void stop()
         {
             stopsem.signal();
             Thread::join();
@@ -1780,6 +1780,27 @@ protected: friend class PipeWriterThread;
     StringArray envVars;
     StringArray envValues;
 
+    void clearUtilityThreads()
+    {
+        Owned<cForkThread> ft;
+        cStdErrorBufferThread *et;
+        {
+            CriticalBlock block(sect); // clear forkthread and stderrbufferthread
+            ft.setown(forkthread.getClear());
+            et = stderrbufferthread;
+            stderrbufferthread = NULL;
+        }
+        if (ft)
+        {
+            ft->join();
+            ft.clear();
+        }
+        if (et)
+        {
+            et->stop();
+            delete et;
+        }
+    }
 public:
     IMPLEMENT_IINTERFACE;
 
@@ -1805,22 +1826,7 @@ public:
         closeInput();
         closeOutput();
         closeError();
-
-        Owned<cForkThread> ft;
-        cStdErrorBufferThread *et;
-        {   CriticalBlock block(sect); // clear forkthread  and stderrbufferthread
-            ft.setown(forkthread.getClear());
-            et = stderrbufferthread;
-            stderrbufferthread = NULL;
-        }
-        if (ft) {
-            ft->join();
-            ft.clear();
-        }
-        if (et) {
-            et->stop();
-            delete et;
-        }
+        clearUtilityThreads();
     }
 
 
@@ -2083,51 +2089,40 @@ public:
             pipeProcess = (HANDLE)-1;
         }
     }
-    
-    
+
     unsigned wait()
     {
-        CriticalBlock block(sect); 
-        if (stderrbufferthread)
-            stderrbufferthread->stop();
-        if (forkthread) {
-            {
-                CriticalUnblock unblock(sect);
-                forkthread->join();
-            }
-            if (pipeProcess != (HANDLE)-1) {
-                if (title.length())
-                    PROGLOG("%s: Pipe: process %d complete %d",title.get(),pipeProcess,retcode);
-                pipeProcess = (HANDLE)-1;
-            }
-            forkthread.clear();
-        }
-        return retcode;
+        bool timedout;
+        return wait(INFINITE, timedout);
     }
 
     unsigned wait(unsigned timeoutms, bool &timedout)
     {
-        CriticalBlock block(sect); 
         timedout = false;
-        if (forkthread) {
+        if (INFINITE != timeoutms)
+        {
+            CriticalBlock block(sect);
+            if (forkthread)
             {
-                CriticalUnblock unblock(sect);
-                if (!forkthread->join(timeoutms)) {
-                    timedout = true;
-                    return retcode;
+                {
+                    CriticalUnblock unblock(sect);
+                    if (!forkthread->join(timeoutms))
+                    {
+                        timedout = true;
+                        return retcode;
+                    }
                 }
-
-            }
-            if (pipeProcess != (HANDLE)-1) {
-                if (title.length())
-                    PROGLOG("%s: Pipe: process %d complete %d",title.get(),pipeProcess,retcode);
-                pipeProcess = (HANDLE)-1;
             }
-            forkthread.clear();
+        }
+        clearUtilityThreads(); // NB: will recall forkthread->join(), but doesn't matter
+        if (pipeProcess != (HANDLE)-1)
+        {
+            if (title.length())
+                PROGLOG("%s: Pipe: process %d complete %d", title.get(), pipeProcess, retcode);
+            pipeProcess = (HANDLE)-1;
         }
         return retcode;
     }
-    
 
     void closeOutput()
     {

+ 40 - 0
testing/regress/ecl/issue13588.ecl

@@ -0,0 +1,40 @@
+/*##############################################################################
+
+    HPCC SYSTEMS software Copyright (C) 2015 HPCC Systems.
+
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+############################################################################## */
+
+
+xRec := RECORD
+    integer x;
+END;
+
+idRecord := RECORD
+    integer         id;
+    string          name;
+    DATASET(xRec)   x;
+END;
+
+rowRecord := RECORD
+    integer id;
+    idRecord r;
+END;
+
+ds := DATASET([
+        {1,{2,'gavin',[{1},{2}]}},
+        {2,{3,'john',[{-1},{-5}]}}], rowRecord);
+
+i := INDEX(ds, { id }, { ds }, 'REGRESS::TEMP::ISSUE13588');
+BUILD(i,OVERWRITE);
+OUTPUT(i,OVERWRITE);

+ 6 - 0
testing/regress/ecl/key/issue13588.xml

@@ -0,0 +1,6 @@
+<Dataset name='Result 1'>
+</Dataset>
+<Dataset name='Result 2'>
+ <Row><id>1</id><r><id>2</id><name>gavin</name><x><Row><x>1</x></Row><Row><x>2</x></Row></x></r><__internal_fpos__>0</__internal_fpos__></Row>
+ <Row><id>2</id><r><id>3</id><name>john</name><x><Row><x>-1</x></Row><Row><x>-5</x></Row></x></r><__internal_fpos__>0</__internal_fpos__></Row>
+</Dataset>

+ 378 - 0
testing/regress/ecl/key/realformat.xml

@@ -0,0 +1,378 @@
+<Dataset name='Result 1'>
+ <Row><Result_1></Result_1></Row>
+</Dataset>
+<Dataset name='Result 2'>
+ <Row><Result_2></Result_2></Row>
+</Dataset>
+<Dataset name='Result 3'>
+ <Row><Result_3>0</Result_3></Row>
+</Dataset>
+<Dataset name='Result 4'>
+ <Row><Result_4>0</Result_4></Row>
+</Dataset>
+<Dataset name='Result 5'>
+ <Row><Result_5>.0</Result_5></Row>
+</Dataset>
+<Dataset name='Result 6'>
+ <Row><Result_6>0.0</Result_6></Row>
+</Dataset>
+<Dataset name='Result 7'>
+ <Row><Result_7>.00</Result_7></Row>
+</Dataset>
+<Dataset name='Result 8'>
+ <Row><Result_8>  0.0</Result_8></Row>
+</Dataset>
+<Dataset name='Result 9'>
+ <Row><Result_9>true</Result_9></Row>
+</Dataset>
+<Dataset name='Result 10'>
+ <Row><Result_10>true</Result_10></Row>
+</Dataset>
+<Dataset name='Result 11'>
+ <Row><Result_11>true</Result_11></Row>
+</Dataset>
+<Dataset name='Result 12'>
+ <Row><Result_12>true</Result_12></Row>
+</Dataset>
+<Dataset name='Result 13'>
+ <Row><Result_13>true</Result_13></Row>
+</Dataset>
+<Dataset name='Result 14'>
+ <Row><Result_14>true</Result_14></Row>
+</Dataset>
+<Dataset name='Result 15'>
+ <Row><Result_15></Result_15></Row>
+</Dataset>
+<Dataset name='Result 16'>
+ <Row><Result_16></Result_16></Row>
+</Dataset>
+<Dataset name='Result 17'>
+ <Row><Result_17>0</Result_17></Row>
+</Dataset>
+<Dataset name='Result 18'>
+ <Row><Result_18>0</Result_18></Row>
+</Dataset>
+<Dataset name='Result 19'>
+ <Row><Result_19>.1</Result_19></Row>
+</Dataset>
+<Dataset name='Result 20'>
+ <Row><Result_20>0.1</Result_20></Row>
+</Dataset>
+<Dataset name='Result 21'>
+ <Row><Result_21>.10</Result_21></Row>
+</Dataset>
+<Dataset name='Result 22'>
+ <Row><Result_22>  0.1</Result_22></Row>
+</Dataset>
+<Dataset name='Result 23'>
+ <Row><Result_23>true</Result_23></Row>
+</Dataset>
+<Dataset name='Result 24'>
+ <Row><Result_24>true</Result_24></Row>
+</Dataset>
+<Dataset name='Result 25'>
+ <Row><Result_25>true</Result_25></Row>
+</Dataset>
+<Dataset name='Result 26'>
+ <Row><Result_26>true</Result_26></Row>
+</Dataset>
+<Dataset name='Result 27'>
+ <Row><Result_27>true</Result_27></Row>
+</Dataset>
+<Dataset name='Result 28'>
+ <Row><Result_28>true</Result_28></Row>
+</Dataset>
+<Dataset name='Result 29'>
+ <Row><Result_29></Result_29></Row>
+</Dataset>
+<Dataset name='Result 30'>
+ <Row><Result_30></Result_30></Row>
+</Dataset>
+<Dataset name='Result 31'>
+ <Row><Result_31>*</Result_31></Row>
+</Dataset>
+<Dataset name='Result 32'>
+ <Row><Result_32>*</Result_32></Row>
+</Dataset>
+<Dataset name='Result 33'>
+ <Row><Result_33>.*</Result_33></Row>
+</Dataset>
+<Dataset name='Result 34'>
+ <Row><Result_34>*.*</Result_34></Row>
+</Dataset>
+<Dataset name='Result 35'>
+ <Row><Result_35>.**</Result_35></Row>
+</Dataset>
+<Dataset name='Result 36'>
+ <Row><Result_36> -0.1</Result_36></Row>
+</Dataset>
+<Dataset name='Result 37'>
+ <Row><Result_37>true</Result_37></Row>
+</Dataset>
+<Dataset name='Result 38'>
+ <Row><Result_38>true</Result_38></Row>
+</Dataset>
+<Dataset name='Result 39'>
+ <Row><Result_39>true</Result_39></Row>
+</Dataset>
+<Dataset name='Result 40'>
+ <Row><Result_40>true</Result_40></Row>
+</Dataset>
+<Dataset name='Result 41'>
+ <Row><Result_41>true</Result_41></Row>
+</Dataset>
+<Dataset name='Result 42'>
+ <Row><Result_42>true</Result_42></Row>
+</Dataset>
+<Dataset name='Result 43'>
+ <Row><Result_43></Result_43></Row>
+</Dataset>
+<Dataset name='Result 44'>
+ <Row><Result_44></Result_44></Row>
+</Dataset>
+<Dataset name='Result 45'>
+ <Row><Result_45>2</Result_45></Row>
+</Dataset>
+<Dataset name='Result 46'>
+ <Row><Result_46>2</Result_46></Row>
+</Dataset>
+<Dataset name='Result 47'>
+ <Row><Result_47>.*</Result_47></Row>
+</Dataset>
+<Dataset name='Result 48'>
+ <Row><Result_48>2.1</Result_48></Row>
+</Dataset>
+<Dataset name='Result 49'>
+ <Row><Result_49>.**</Result_49></Row>
+</Dataset>
+<Dataset name='Result 50'>
+ <Row><Result_50>  2.1</Result_50></Row>
+</Dataset>
+<Dataset name='Result 51'>
+ <Row><Result_51>true</Result_51></Row>
+</Dataset>
+<Dataset name='Result 52'>
+ <Row><Result_52>true</Result_52></Row>
+</Dataset>
+<Dataset name='Result 53'>
+ <Row><Result_53>true</Result_53></Row>
+</Dataset>
+<Dataset name='Result 54'>
+ <Row><Result_54>true</Result_54></Row>
+</Dataset>
+<Dataset name='Result 55'>
+ <Row><Result_55>true</Result_55></Row>
+</Dataset>
+<Dataset name='Result 56'>
+ <Row><Result_56>true</Result_56></Row>
+</Dataset>
+<Dataset name='Result 57'>
+ <Row><Result_57></Result_57></Row>
+</Dataset>
+<Dataset name='Result 58'>
+ <Row><Result_58></Result_58></Row>
+</Dataset>
+<Dataset name='Result 59'>
+ <Row><Result_59>*</Result_59></Row>
+</Dataset>
+<Dataset name='Result 60'>
+ <Row><Result_60>*</Result_60></Row>
+</Dataset>
+<Dataset name='Result 61'>
+ <Row><Result_61>.*</Result_61></Row>
+</Dataset>
+<Dataset name='Result 62'>
+ <Row><Result_62>*.*</Result_62></Row>
+</Dataset>
+<Dataset name='Result 63'>
+ <Row><Result_63>.**</Result_63></Row>
+</Dataset>
+<Dataset name='Result 64'>
+ <Row><Result_64> -2.1</Result_64></Row>
+</Dataset>
+<Dataset name='Result 65'>
+ <Row><Result_65>true</Result_65></Row>
+</Dataset>
+<Dataset name='Result 66'>
+ <Row><Result_66>true</Result_66></Row>
+</Dataset>
+<Dataset name='Result 67'>
+ <Row><Result_67>true</Result_67></Row>
+</Dataset>
+<Dataset name='Result 68'>
+ <Row><Result_68>true</Result_68></Row>
+</Dataset>
+<Dataset name='Result 69'>
+ <Row><Result_69>true</Result_69></Row>
+</Dataset>
+<Dataset name='Result 70'>
+ <Row><Result_70>true</Result_70></Row>
+</Dataset>
+<Dataset name='Result 71'>
+ <Row><Result_71></Result_71></Row>
+</Dataset>
+<Dataset name='Result 72'>
+ <Row><Result_72></Result_72></Row>
+</Dataset>
+<Dataset name='Result 73'>
+ <Row><Result_73>*</Result_73></Row>
+</Dataset>
+<Dataset name='Result 74'>
+ <Row><Result_74>*</Result_74></Row>
+</Dataset>
+<Dataset name='Result 75'>
+ <Row><Result_75>.*</Result_75></Row>
+</Dataset>
+<Dataset name='Result 76'>
+ <Row><Result_76>*.*</Result_76></Row>
+</Dataset>
+<Dataset name='Result 77'>
+ <Row><Result_77>.**</Result_77></Row>
+</Dataset>
+<Dataset name='Result 78'>
+ <Row><Result_78> 23.1</Result_78></Row>
+</Dataset>
+<Dataset name='Result 79'>
+ <Row><Result_79>true</Result_79></Row>
+</Dataset>
+<Dataset name='Result 80'>
+ <Row><Result_80>true</Result_80></Row>
+</Dataset>
+<Dataset name='Result 81'>
+ <Row><Result_81>true</Result_81></Row>
+</Dataset>
+<Dataset name='Result 82'>
+ <Row><Result_82>true</Result_82></Row>
+</Dataset>
+<Dataset name='Result 83'>
+ <Row><Result_83>true</Result_83></Row>
+</Dataset>
+<Dataset name='Result 84'>
+ <Row><Result_84>true</Result_84></Row>
+</Dataset>
+<Dataset name='Result 85'>
+ <Row><Result_85></Result_85></Row>
+</Dataset>
+<Dataset name='Result 86'>
+ <Row><Result_86></Result_86></Row>
+</Dataset>
+<Dataset name='Result 87'>
+ <Row><Result_87>*</Result_87></Row>
+</Dataset>
+<Dataset name='Result 88'>
+ <Row><Result_88>*</Result_88></Row>
+</Dataset>
+<Dataset name='Result 89'>
+ <Row><Result_89>.*</Result_89></Row>
+</Dataset>
+<Dataset name='Result 90'>
+ <Row><Result_90>*.*</Result_90></Row>
+</Dataset>
+<Dataset name='Result 91'>
+ <Row><Result_91>.**</Result_91></Row>
+</Dataset>
+<Dataset name='Result 92'>
+ <Row><Result_92>-23.1</Result_92></Row>
+</Dataset>
+<Dataset name='Result 93'>
+ <Row><Result_93>true</Result_93></Row>
+</Dataset>
+<Dataset name='Result 94'>
+ <Row><Result_94>true</Result_94></Row>
+</Dataset>
+<Dataset name='Result 95'>
+ <Row><Result_95>true</Result_95></Row>
+</Dataset>
+<Dataset name='Result 96'>
+ <Row><Result_96>true</Result_96></Row>
+</Dataset>
+<Dataset name='Result 97'>
+ <Row><Result_97>true</Result_97></Row>
+</Dataset>
+<Dataset name='Result 98'>
+ <Row><Result_98>true</Result_98></Row>
+</Dataset>
+<Dataset name='Result 99'>
+ <Row><Result_99></Result_99></Row>
+</Dataset>
+<Dataset name='Result 100'>
+ <Row><Result_100></Result_100></Row>
+</Dataset>
+<Dataset name='Result 101'>
+ <Row><Result_101>*</Result_101></Row>
+</Dataset>
+<Dataset name='Result 102'>
+ <Row><Result_102>*</Result_102></Row>
+</Dataset>
+<Dataset name='Result 103'>
+ <Row><Result_103>.*</Result_103></Row>
+</Dataset>
+<Dataset name='Result 104'>
+ <Row><Result_104>*.*</Result_104></Row>
+</Dataset>
+<Dataset name='Result 105'>
+ <Row><Result_105>.**</Result_105></Row>
+</Dataset>
+<Dataset name='Result 106'>
+ <Row><Result_106>***.*</Result_106></Row>
+</Dataset>
+<Dataset name='Result 107'>
+ <Row><Result_107>true</Result_107></Row>
+</Dataset>
+<Dataset name='Result 108'>
+ <Row><Result_108>true</Result_108></Row>
+</Dataset>
+<Dataset name='Result 109'>
+ <Row><Result_109>true</Result_109></Row>
+</Dataset>
+<Dataset name='Result 110'>
+ <Row><Result_110>true</Result_110></Row>
+</Dataset>
+<Dataset name='Result 111'>
+ <Row><Result_111>true</Result_111></Row>
+</Dataset>
+<Dataset name='Result 112'>
+ <Row><Result_112>true</Result_112></Row>
+</Dataset>
+<Dataset name='Result 113'>
+ <Row><Result_113></Result_113></Row>
+</Dataset>
+<Dataset name='Result 114'>
+ <Row><Result_114></Result_114></Row>
+</Dataset>
+<Dataset name='Result 115'>
+ <Row><Result_115>*</Result_115></Row>
+</Dataset>
+<Dataset name='Result 116'>
+ <Row><Result_116>*</Result_116></Row>
+</Dataset>
+<Dataset name='Result 117'>
+ <Row><Result_117>.*</Result_117></Row>
+</Dataset>
+<Dataset name='Result 118'>
+ <Row><Result_118>*.*</Result_118></Row>
+</Dataset>
+<Dataset name='Result 119'>
+ <Row><Result_119>.**</Result_119></Row>
+</Dataset>
+<Dataset name='Result 120'>
+ <Row><Result_120>***.*</Result_120></Row>
+</Dataset>
+<Dataset name='Result 121'>
+ <Row><Result_121>true</Result_121></Row>
+</Dataset>
+<Dataset name='Result 122'>
+ <Row><Result_122>true</Result_122></Row>
+</Dataset>
+<Dataset name='Result 123'>
+ <Row><Result_123>true</Result_123></Row>
+</Dataset>
+<Dataset name='Result 124'>
+ <Row><Result_124>true</Result_124></Row>
+</Dataset>
+<Dataset name='Result 125'>
+ <Row><Result_125>true</Result_125></Row>
+</Dataset>
+<Dataset name='Result 126'>
+ <Row><Result_126>true</Result_126></Row>
+</Dataset>

+ 52 - 0
testing/regress/ecl/realformat.ecl

@@ -0,0 +1,52 @@
+/*##############################################################################
+
+    HPCC SYSTEMS software Copyright (C) 2015 HPCC Systems.
+
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+############################################################################## */
+
+//Windows and linux format floating point numbers with > 15 places differently, so only check the significant digits
+checkSignificant(real value, unsigned width, unsigned places, string expected) := FUNCTION
+    formatted := REALFORMAT(value, width, places);
+    RETURN PARALLEL(
+        OUTPUT(LENGTH(formatted) = width);
+        OUTPUT(TRIM(formatted, LEFT, RIGHT)[1..LENGTH(expected)] = expected);
+    );
+END;
+
+
+doFormat(real value, string expected1, string expected2, string expected3) := PARALLEL(
+    REALFORMAT(value, 0, 0);
+    REALFORMAT(value, 0, 1);
+    REALFORMAT(value, 1, 0);
+    REALFORMAT(value, 1, 1);
+    REALFORMAT(value, 2, 1);
+    REALFORMAT(value, 3, 1);
+    REALFORMAT(value, 3, 4);
+    REALFORMAT(value, 5, 1);
+    checkSignificant(value, 99, 10, expected1);
+    checkSignificant(value, 2000, 10, expected2);
+    checkSignificant(value, 2000, 1000, expected3);
+);
+
+doFormat(0.0,                   '0.0000000000', '0.0000000000',     '0.000000000000000');
+doFormat(0.1,                   '0.1000000000', '0.1000000000',     '0.100000000000000');
+doFormat(-0.1,                  '-0.1000000000', '-0.1000000000',  '-0.100000000000000');
+doFormat(2.1,                   '2.1000000000', '2.1000000000',     '2.10000000000000');
+doFormat(-2.1,                  '-2.1000000000', '-2.1000000000',  '-2.10000000000000');
+doFormat(23.1,                  '23.1000000000', '23.1000000000',  '23.1000000000000');
+doFormat(-23.1,                 '-23.1000000000', '-23.100000000','-23.1000000000000');
+
+// only coompare 14 significant digits since linux outputs exactly at 12345678901234499... and does not round up the 15th digit
+doFormat(1.23456789012345e200,  '**************', '12345678901234', '12345678901234');
+doFormat(-1.23456789012345E200, '**************', '-12345678901234', '-12345678901234');