Browse Source

Merge branch 'candidate-8.4.x'

Signed-off-by: Richard Chapman <rchapman@hpccsystems.com>
Richard Chapman 3 years ago
parent
commit
d3e0f52bf3

+ 5 - 1
.devcontainer/devcontainer.json

@@ -15,7 +15,11 @@
 	],
 	],
 	// "postCreateCommand": "git config oh-my-zsh.hide-info 1",
 	// "postCreateCommand": "git config oh-my-zsh.hide-info 1",
 	// Set *default* container specific settings.json values on container create.
 	// Set *default* container specific settings.json values on container create.
-	"settings": {},
+	"settings": {
+		"cmake.configureArgs": [
+			"-DUSE_SHLIBDEPS=ON"
+		]
+	},
 	// Add the IDs of extensions you want installed when the container is created.
 	// Add the IDs of extensions you want installed when the container is created.
 	"extensions": [
 	"extensions": [
 		"ms-vscode.cpptools",
 		"ms-vscode.cpptools",

+ 154 - 0
.github/workflows/build-containers-pr.yml

@@ -0,0 +1,154 @@
+name: Docker smoketest build
+on:
+  pull_request:
+    branches:
+      - "master"
+      - "candidate-*.x"
+      - "!candidate-8.2.*"
+      - "!candidate-8.0.*"
+      - "!candidate-7.12.*"
+      - "!candidate-7.10.*"
+      - "!candidate-7.8.*"
+      - "!candidate-7.6.*"
+      - "!candidate-7.4.*"
+      - "!candidate-7.2.*"
+      - "!candidate-7.0.*"
+      - "!candidate-6.*"
+
+jobs:
+  check-skip:
+    # continue-on-error: true # Uncomment once integration is finished
+    runs-on: ubuntu-latest
+    # Map a step output to a job output
+    outputs:
+      should_skip: ${{ steps.skip_check.outputs.should_skip }}
+    steps:
+      - id: skip_check
+        uses: hpcc-systems/skip-duplicate-actions@master
+        with:
+          github_token: ${{ github.token }}
+          paths_ignore: '["clienttools/**", "devdoc/**", "docs/**", "helm/**", "initfiles/**" ]'
+
+  build-images:
+    needs: check-skip
+    if: ${{ needs.check-skip.outputs.should_skip != 'true' }}
+    runs-on: ubuntu-latest
+    steps:
+      - name: vars
+        id: vars
+        run: |
+          echo ::set-output name=base_ver::7.12.1
+          # echo ::set-output name=container_registry::ghcr.io
+          # echo ::set-output name=cr_user::${{ github.repository_owner }}
+          echo ::set-output name=container_registry::docker.io
+          echo ::set-output name=cr_user::hpccsystems
+          echo ::set-output name=build_prbase_sha::${{ github.event.pull_request.base.sha }}
+          echo ::set-output name=build_prbase_label::${{ github.base_ref }}
+          echo ::set-output name=build_pr_label::pr-${{ github.event.number }}-${{ github.sha }}
+          echo ::set-output name=build_user::${{ github.actor }}
+          echo ::set-output name=build_type::RelWithDebInfo
+          echo ::set-output name=use_cppunit::1
+          echo ::set-output name=platform_build_base::smoketest-platform-build-base
+          echo ::set-output name=platform_build::smoketest-platform-build
+
+      - name: tracing
+        run: |
+          echo "Base ref   = ${{ github.ref }}"
+          echo "Action     = ${{ github.action }}"
+          echo "Event      = ${{ github.event_name }}"
+          echo "Actor      = ${{ github.actor }}"
+          echo "Ref        = ${{ github.ref }}"
+          echo "base sha   = ${{ github.event.pull_request.base.sha }}"
+          echo "Sha        = ${{ github.sha }}"
+          echo "github.repository = ${{ github.repository }}"
+          echo "repository_owner = ${{ github.repository_owner }}"
+          echo "github.workspace = ${{ github.workspace }}"
+          echo "runner.workspace = ${{ runner.workspace }}"
+          echo "github.event.pull_request.head.repo.owner.login = ${{ github.event.pull_request.head.repo.owner.login }}"
+          echo "build_prbase_label = ${{ steps.vars.outputs.build_prbase_label }}"
+          echo "build_pr_label = ${{ steps.vars.outputs.build_pr_label }}"
+
+      - name: Checkout PR
+        uses: actions/checkout@v2
+
+      - name: Set up Docker Buildx
+        id: buildx
+        uses: docker/setup-buildx-action@v1
+        with:
+          driver: docker
+
+      - name: Check if PR-Base prebuilt
+        id: check-images
+        run: |
+          build_base_missing=$(docker manifest inspect ${{ steps.vars.outputs.container_registry }}/${{ steps.vars.outputs.cr_user }}/${{ steps.vars.outputs.platform_build_base }}:${{ steps.vars.outputs.base_ver }} > /dev/null ; echo $?)
+          prbase_missing=$(docker manifest inspect ${{ steps.vars.outputs.container_registry }}/${{ steps.vars.outputs.cr_user }}/${{ steps.vars.outputs.platform_build }}:${{ steps.vars.outputs.build_prbase_label }}-${{ github.event.pull_request.base.sha }} > /dev/null ; echo $?)
+          echo build_base_missing=${build_base_missing}
+          echo prbase_missing=${prbase_missing}
+          echo ::set-output name=build_base_missing::${build_base_missing}
+          if [[ "${prbase_missing}" -eq 1 ]]
+          then
+            echo "Current PR target branch image cannot be found, using latest"
+            prbase_missing=$(docker manifest inspect ${{ steps.vars.outputs.container_registry }}/${{ steps.vars.outputs.cr_user }}/${{ steps.vars.outputs.platform_build }}:${{ steps.vars.outputs.build_prbase_label }}-latest > /dev/null ; echo $?)
+            if [[ "${prbase_missing}" -eq 1 ]]
+            then
+              echo "Cannot find the 'latest' target branch image"
+              echo ::set-output name=prbase_missing::${prbase_missing}
+            fi
+            echo ::set-output name=platform_prbase_ver::${{ steps.vars.outputs.build_prbase_label }}-latest
+          else
+            echo ::set-output name=platform_prbase_ver::${{ steps.vars.outputs.build_prbase_label }}-${{ github.event.pull_request.base.sha }}
+          fi
+
+      # Normal expectation is that the following 2 steps would normally be skipped
+
+      # NB: This is only for the case where the build-base image doesn't already exist.
+      # The build-containers-target-branch.yml action should normally have built/published this image
+      - name: HPCC build-base image
+        if: ${{ steps.check-images.outputs.build_base_missing == '1' && steps.check-images.outputs.prbase_missing == '1' }}
+        uses: docker/build-push-action@v2
+        with:
+          context: dockerfiles/platform-build-base
+          builder: ${{ steps.buildx.outputs.name }}
+          tags: |
+            ${{ steps.vars.outputs.container_registry }}/${{ steps.vars.outputs.cr_user }}/${{ steps.vars.outputs.platform_build_base }}:${{ steps.vars.outputs.base_ver }}
+          build-args: |
+            BASE_VER=${{ steps.vars.outputs.base_ver }}
+
+      # NB: This is only for the case where the target branch image doesn't already exist.
+      # The build-containers-target-branch.yml action would normally have built/published this image,
+      # or they'll be a "latest" which will have been picked up by the check-images step.
+      - name: branch image
+        if: ${{ steps.check-images.outputs.prbase_missing == '1' }}
+        uses: docker/build-push-action@v2
+        with:
+          context: .
+          file: ./dockerfiles/platform-build/Dockerfile
+          builder: ${{ steps.buildx.outputs.name }}
+          tags: |
+            ${{ steps.vars.outputs.container_registry }}/${{ steps.vars.outputs.cr_user }}/${{ steps.vars.outputs.platform_build }}:${{ steps.vars.outputs.build_prbase_label }}-${{ steps.vars.outputs.build_prbase_sha }}
+            ${{ steps.vars.outputs.container_registry }}/${{ steps.vars.outputs.cr_user }}/${{ steps.vars.outputs.platform_build }}:${{ steps.vars.outputs.build_prbase_label }}-latest
+          build-args: |
+            CR_USER=${{ steps.vars.outputs.cr_user }}
+            CR_REPO=${{ steps.vars.outputs.container_registry }}
+            BASE_VER=${{ steps.vars.outputs.base_ver }}
+            BUILD_USER=${{ github.repository_owner }}
+            BUILD_TAG=${{ steps.vars.outputs.build_prbase_sha }}
+            BUILD_TYPE=${{ steps.vars.outputs.build_type }}
+            USE_CPPUNIT=${{ steps.vars.outputs.use_cppunit }}
+            BUILD_THREADS=${{ steps.vars.outputs.build_threads }}
+
+      - name: PR image
+        uses: docker/build-push-action@v2
+        with:
+          context: .
+          file: ./dockerfiles/platform-build-incremental-container/Dockerfile
+          builder: ${{ steps.buildx.outputs.name }}
+          tags: |
+            ${{ steps.vars.outputs.container_registry }}/${{ steps.vars.outputs.build_user }}/${{ steps.vars.outputs.platform_build }}:${{ steps.vars.outputs.build_pr_label }}
+          build-args: |
+            CR_USER=${{ steps.vars.outputs.cr_user }}
+            CR_REPO=${{ steps.vars.outputs.container_registry }}
+            PLATFORM_PRBASE_VER=${{ steps.check-images.outputs.platform_prbase_ver }}
+            GITHUB_REPO=${{ github.repository }}
+            GITHUB_PRREF=${{ github.ref }}
+            BUILD_THREADS=${{ steps.vars.outputs.build_threads }}

+ 127 - 0
.github/workflows/build-containers-target-branch.yml

@@ -0,0 +1,127 @@
+name: Docker target branch build
+on:
+  push:
+    branches:
+      - "master"
+      - "candidate-*.x"
+      - "!candidate-8.2.*"
+      - "!candidate-8.0.*"
+      - "!candidate-7.12.*"
+      - "!candidate-7.10.*"
+      - "!candidate-7.8.*"
+      - "!candidate-7.6.*"
+      - "!candidate-7.4.*"
+      - "!candidate-7.2.*"
+      - "!candidate-7.0.*"
+      - "!candidate-6.*"
+
+jobs:
+  check-skip:
+    # continue-on-error: true # Uncomment once integration is finished
+    runs-on: ubuntu-latest
+    # Map a step output to a job output
+    outputs:
+      should_skip: ${{ steps.skip_check.outputs.should_skip }}
+    steps:
+      - id: skip_check
+        uses: hpcc-systems/skip-duplicate-actions@master
+        with:
+          github_token: ${{ github.token }}
+          paths_ignore: '["clienttools/**", "devdoc/**", "docs/**", "helm/**", "initfiles/**" ]'
+
+  build-images:
+    needs: check-skip
+    if: ${{ needs.check-skip.outputs.should_skip != 'true' }}
+    runs-on: ubuntu-latest
+    steps:
+      - name: vars
+        id: vars
+        run: |
+          echo ::set-output name=base_ver::7.12.1
+          # echo ::set-output name=container_registry::ghcr.io
+          # echo ::set-output name=cr_user::${{ github.repository_owner }}
+          echo ::set-output name=container_registry::docker.io
+          echo ::set-output name=cr_user::${{ secrets.DOCKER_USERNAME }}
+          echo ::set-output name=build_base_sha::${{ github.sha }}
+          echo ::set-output name=build_base_label::${GITHUB_REF##*/}
+          echo ::set-output name=build_user::${{ github.actor }}
+          echo ::set-output name=build_type::RelWithDebInfo
+          echo ::set-output name=use_cppunit::1
+          echo ::set-output name=platform_build_base::smoketest-platform-build-base
+          echo ::set-output name=platform_build::smoketest-platform-build
+
+      - name: tracing
+        run: |
+          echo "Action     = ${{ github.action }}"
+          echo "Event      = ${{ github.event_name }}"
+          echo "Actor      = ${{ github.actor }}"
+          echo "Ref        = ${{ github.ref }}"
+          echo "Sha        = ${{ github.sha }}"
+          echo "github.repository = ${{ github.repository }}"
+          echo "repository_owner = ${{ github.repository_owner }}"
+          echo "github.workspace = ${{ github.workspace }}"
+          echo "runner.workspace = ${{ runner.workspace }}"
+          echo "build_base_sha = ${{ steps.vars.outputs.build_base_sha }}"
+          echo "build_base_label = ${{ steps.vars.outputs.build_base_label }}"
+
+      - name: Checkout
+        uses: actions/checkout@v2
+
+      - name: Set up Docker Buildx
+        id: buildx
+        uses: docker/setup-buildx-action@v1
+
+      # - name: Login to GitHub Container Registry
+      #   uses: docker/login-action@v1
+      #   with:
+      #     registry: ghcr.io
+      #     username: ${{ github.repository_owner }}
+      #     password: ${{ secrets.GITHUB_TOKEN }}
+
+      - name: Login to DockerHub
+        uses: docker/login-action@v1
+        with:
+          username: ${{ secrets.DOCKER_USERNAME }}
+          password: ${{ secrets.DOCKER_PASSWORD }}
+
+      - name: Check if build-base image prebuilt
+        id: check-images
+        run: |
+          build_base_missing=$(docker manifest inspect ${{ steps.vars.outputs.container_registry }}/${{ steps.vars.outputs.cr_user }}/${{ steps.vars.outputs.platform_build_base }}:${{ steps.vars.outputs.base_ver }} > /dev/null ; echo $?)
+          echo build_base_missing=${build_base_missing}
+          echo ::set-output name=build_base_missing::${build_base_missing}
+
+      # build build-base only if missing
+      - name: HPCC build-base image
+        if: ${{ steps.check-images.outputs.build_base_missing == '1' }}
+        uses: docker/build-push-action@v2
+        with:
+          context: dockerfiles/platform-build-base
+          builder: ${{ steps.buildx.outputs.name }}
+          tags: |
+            ${{ steps.vars.outputs.container_registry }}/${{ steps.vars.outputs.cr_user }}/${{ steps.vars.outputs.platform_build_base }}:${{ steps.vars.outputs.base_ver }}
+          push: true
+          build-args: |
+            BASE_VER=${{ steps.vars.outputs.base_ver }}
+
+      # build branch image
+      - name: branch image
+        uses: docker/build-push-action@v2
+        with:
+          context: .
+          file: ./dockerfiles/platform-build/Dockerfile
+          builder: ${{ steps.buildx.outputs.name }}
+          tags: |
+            ${{ steps.vars.outputs.container_registry }}/${{ steps.vars.outputs.cr_user }}/${{ steps.vars.outputs.platform_build }}:${{ steps.vars.outputs.build_base_label }}-${{ steps.vars.outputs.build_base_sha }}
+            ${{ steps.vars.outputs.container_registry }}/${{ steps.vars.outputs.cr_user }}/${{ steps.vars.outputs.platform_build }}:${{ steps.vars.outputs.build_base_label }}-latest
+          push: true
+          build-args: |
+            CR_USER=${{ steps.vars.outputs.cr_user }}
+            CR_REPO=${{ steps.vars.outputs.container_registry }}
+            CR_CONTAINER_NAME=${{ steps.vars.outputs.platform_build_base }}
+            BASE_VER=${{ steps.vars.outputs.base_ver }}
+            BUILD_USER=${{ github.repository_owner }}
+            BUILD_TAG=${{ steps.vars.outputs.build_base_sha }}
+            BUILD_TYPE=${{ steps.vars.outputs.build_type }}
+            USE_CPPUNIT=${{ steps.vars.outputs.use_cppunit }}
+            BUILD_THREADS=${{ steps.vars.outputs.build_threads }}

+ 3 - 1
dali/base/dafdesc.cpp

@@ -3516,7 +3516,9 @@ void initializeStorageGroups(bool createPlanesFromGroups)
         PROGLOG("initializeStorageGroups update");
         PROGLOG("initializeStorageGroups update");
         doInitializeStorageGroups(createPlanesFromGroups);
         doInitializeStorageGroups(createPlanesFromGroups);
     };
     };
-    configUpdateHook.installOnce(updateFunc, true);
+
+    doInitializeStorageGroups(createPlanesFromGroups);
+    configUpdateHook.installOnce(updateFunc, false);
 }
 }
 
 
 bool getDefaultStoragePlane(StringBuffer &ret)
 bool getDefaultStoragePlane(StringBuffer &ret)

+ 1 - 0
dali/server/daserver.cpp

@@ -475,6 +475,7 @@ int main(int argc, const char* argv[])
 
 
         for (unsigned i=1;i<(unsigned)argc;i++) {
         for (unsigned i=1;i<(unsigned)argc;i++) {
             if (streq(argv[i],"--daemon") || streq(argv[i],"-d")) {
             if (streq(argv[i],"--daemon") || streq(argv[i],"-d")) {
+                i++; // consumed within checkCreateDaemon(), bump up here
             }
             }
             else if (streq(argv[i],"--server") || streq(argv[i],"-s"))
             else if (streq(argv[i],"--server") || streq(argv[i],"-s"))
                 server = argv[++i];
                 server = argv[++i];

+ 50 - 0
dockerfiles/platform-build-incremental-container/Dockerfile

@@ -0,0 +1,50 @@
+##############################################################################
+#
+#    HPCC SYSTEMS software Copyright (C) 2021 HPCC Systems®.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+##############################################################################
+
+# Base container image that builds all HPCC platform components
+
+ARG CR_USER=hpccsystems
+ARG CR_REPO=docker.io
+ARG PLATFORM_PRBASE_VER
+FROM ${CR_REPO}/${CR_USER}/smoketest-platform-build:${PLATFORM_PRBASE_VER}
+
+USER hpcc
+
+WORKDIR /hpcc-dev/HPCC-Platform
+ARG GITHUB_REPO
+RUN git remote add upstream https://github.com/${GITHUB_REPO}
+ARG GITHUB_PRREF
+RUN echo GITHUB_PRREF=${GITHUB_PRREF}
+RUN echo git fetch upstream +${GITHUB_PRREF}
+RUN git fetch upstream +${GITHUB_PRREF}
+RUN git checkout FETCH_HEAD
+RUN git submodule update --init --recursive
+
+WORKDIR /hpcc-dev/build
+
+ARG BUILD_THREADS
+RUN if [ -n "${BUILD_THREADS}" ] ; then echo ${BUILD_THREADS} > ~/build_threads; else echo $(nproc) > ~/build_threads ; fi
+RUN echo Building with $(cat ~/build_threads) threads
+RUN make -j$(cat ~/build_threads) jlib
+RUN make -j$(cat ~/build_threads) esp
+RUN make -j$(cat ~/build_threads) roxie
+RUN make -j$(cat ~/build_threads) ws_workunits ecl
+RUN make -j$(cat ~/build_threads)
+
+USER root
+
+RUN make -j$(cat ~hpcc/build_threads) install

+ 4 - 1
dockerfiles/platform-build/Dockerfile

@@ -18,7 +18,10 @@
 # Base container image that builds all HPCC platform components
 # Base container image that builds all HPCC platform components
 
 
 ARG BASE_VER=8.6 
 ARG BASE_VER=8.6 
-FROM hpccsystems/platform-build-base:${BASE_VER}
+ARG CR_USER=hpccsystems
+ARG CR_REPO=docker.io
+ARG CR_CONTAINER_NAME=platform-build-base
+FROM ${CR_REPO}/${CR_USER}/${CR_CONTAINER_NAME}:${BASE_VER}
 
 
 RUN groupadd -g 10001 hpcc
 RUN groupadd -g 10001 hpcc
 RUN useradd -s /bin/bash -r -m -N -c "hpcc runtime User" -u 10000 -g hpcc hpcc
 RUN useradd -s /bin/bash -r -m -N -c "hpcc runtime User" -u 10000 -g hpcc hpcc

+ 24 - 8
esp/src/src-react/components/Title.tsx

@@ -1,8 +1,8 @@
 import * as React from "react";
 import * as React from "react";
-import { ContextualMenuItemType, DefaultButton, IconButton, IIconProps, Image, IPanelProps, IPersonaSharedProps, IRenderFunction, Link, Panel, PanelType, Persona, PersonaSize, SearchBox, Stack, Text, useTheme } from "@fluentui/react";
+import { ContextualMenuItemType, DefaultButton, IconButton, IIconProps, Image, IPanelProps, IPersonaSharedProps, IRenderFunction, Link, mergeStyleSets, Panel, PanelType, Persona, PersonaSize, SearchBox, Stack, Text, useTheme } from "@fluentui/react";
+import { useBoolean } from "@fluentui/react-hooks";
 import { About } from "./About";
 import { About } from "./About";
 import { MyAccount } from "./MyAccount";
 import { MyAccount } from "./MyAccount";
-import { useBoolean } from "@fluentui/react-hooks";
 
 
 import * as WsAccount from "src/ws_account";
 import * as WsAccount from "src/ws_account";
 import * as cookie from "dojo/cookie";
 import * as cookie from "dojo/cookie";
@@ -58,11 +58,9 @@ export const DevTitle: React.FunctionComponent<DevTitleProps> = ({
             items: [
             items: [
                 { key: "legacy", text: nlsHPCC.OpenLegacyECLWatch, href: "/esp/files/stub.htm" },
                 { key: "legacy", text: nlsHPCC.OpenLegacyECLWatch, href: "/esp/files/stub.htm" },
                 { key: "divider_0", itemType: ContextualMenuItemType.Divider },
                 { key: "divider_0", itemType: ContextualMenuItemType.Divider },
-                { key: "errors", href: "#/log", text: `${nlsHPCC.ErrorWarnings} ${log.length > 0 ? `(${log.length})` : ""}`, },
-                { key: "divider_1", itemType: ContextualMenuItemType.Divider },
                 { key: "banner", text: nlsHPCC.SetBanner },
                 { key: "banner", text: nlsHPCC.SetBanner },
                 { key: "toolbar", text: nlsHPCC.SetToolbar },
                 { key: "toolbar", text: nlsHPCC.SetToolbar },
-                { key: "divider_2", itemType: ContextualMenuItemType.Divider },
+                { key: "divider_1", itemType: ContextualMenuItemType.Divider },
                 { key: "docs", href: "https://hpccsystems.com/training/documentation/", text: nlsHPCC.Documentation, target: "_blank" },
                 { key: "docs", href: "https://hpccsystems.com/training/documentation/", text: nlsHPCC.Documentation, target: "_blank" },
                 { key: "downloads", href: "https://hpccsystems.com/download", text: nlsHPCC.Downloads, target: "_blank" },
                 { key: "downloads", href: "https://hpccsystems.com/download", text: nlsHPCC.Downloads, target: "_blank" },
                 { key: "releaseNotes", href: "https://hpccsystems.com/download/release-notes", text: nlsHPCC.ReleaseNotes, target: "_blank" },
                 { key: "releaseNotes", href: "https://hpccsystems.com/download/release-notes", text: nlsHPCC.ReleaseNotes, target: "_blank" },
@@ -75,7 +73,7 @@ export const DevTitle: React.FunctionComponent<DevTitleProps> = ({
                         ]
                         ]
                     }
                     }
                 },
                 },
-                { key: "divider_3", itemType: ContextualMenuItemType.Divider },
+                { key: "divider_2", itemType: ContextualMenuItemType.Divider },
                 {
                 {
                     key: "lock", text: nlsHPCC.Lock, disabled: !currentUser?.username, onClick: () => {
                     key: "lock", text: nlsHPCC.Lock, disabled: !currentUser?.username, onClick: () => {
                         fetch("esp/lock", {
                         fetch("esp/lock", {
@@ -98,16 +96,29 @@ export const DevTitle: React.FunctionComponent<DevTitleProps> = ({
                         });
                         });
                     }
                     }
                 },
                 },
-                { key: "divider_4", itemType: ContextualMenuItemType.Divider },
+                { key: "divider_3", itemType: ContextualMenuItemType.Divider },
                 { key: "config", href: "#/config", text: nlsHPCC.Configuration },
                 { key: "config", href: "#/config", text: nlsHPCC.Configuration },
                 { key: "about", text: nlsHPCC.About, onClick: () => setShowAbout(true) }
                 { key: "about", text: nlsHPCC.About, onClick: () => setShowAbout(true) }
             ],
             ],
             directionalHintFixed: true
             directionalHintFixed: true
         };
         };
-    }, [currentUser?.username, log.length]);
+    }, [currentUser?.username]);
 
 
     const theme = useTheme();
     const theme = useTheme();
 
 
+    const btnStyles = mergeStyleSets({
+        errorsWarnings: {
+            border: "none",
+            background: "transparent",
+            minWidth: 48,
+            padding: "0 10px 0 4px",
+            color: theme.semanticColors.link
+        },
+        errorsWarningsCount: {
+            margin: "-3px 0 0 -3px"
+        }
+    });
+
     React.useEffect(() => {
     React.useEffect(() => {
         WsAccount.MyAccount({})
         WsAccount.MyAccount({})
             .then(({ MyAccountResponse }) => {
             .then(({ MyAccountResponse }) => {
@@ -139,6 +150,11 @@ export const DevTitle: React.FunctionComponent<DevTitleProps> = ({
                         </Stack.Item>
                         </Stack.Item>
                     }
                     }
                     <Stack.Item align="center">
                     <Stack.Item align="center">
+                        <DefaultButton href="#/log" title={nlsHPCC.ErrorWarnings} iconProps={{ iconName: log.length > 0 ? "RingerSolid" : "Ringer" }} className={btnStyles.errorsWarnings}>
+                            <span className={btnStyles.errorsWarningsCount}>{`(${log.length})`}</span>
+                        </DefaultButton>
+                    </Stack.Item>
+                    <Stack.Item align="center">
                         <IconButton title={nlsHPCC.Advanced} iconProps={collapseMenuIcon} menuProps={advMenuProps} />
                         <IconButton title={nlsHPCC.Advanced} iconProps={collapseMenuIcon} menuProps={advMenuProps} />
                     </Stack.Item>
                     </Stack.Item>
                 </Stack>
                 </Stack>

+ 109 - 61
esp/src/src/ECLArchiveWidget.ts

@@ -1,7 +1,7 @@
 import { ECLEditor } from "@hpcc-js/codemirror";
 import { ECLEditor } from "@hpcc-js/codemirror";
 import { extent, Palette } from "@hpcc-js/common";
 import { extent, Palette } from "@hpcc-js/common";
 import { Workunit } from "@hpcc-js/comms";
 import { Workunit } from "@hpcc-js/comms";
-import { HTMLTooltip } from "@hpcc-js/html";
+import { Table } from "@hpcc-js/dgrid";
 import { SplitPanel } from "@hpcc-js/phosphor";
 import { SplitPanel } from "@hpcc-js/phosphor";
 import { DirectoryTree } from "@hpcc-js/tree";
 import { DirectoryTree } from "@hpcc-js/tree";
 import { xml2json } from "@hpcc-js/util";
 import { xml2json } from "@hpcc-js/util";
@@ -45,9 +45,11 @@ export class ECLArchiveWidget {
 
 
     private borderContainer = null;
     private borderContainer = null;
     private editor: ECLEditor = null;
     private editor: ECLEditor = null;
+    private leftPanel: SplitPanel;
+    private summaryTable: Table;
     private archiveViewer: SplitPanel;
     private archiveViewer: SplitPanel;
     private directoryTree: DirectoryTreeEx;
     private directoryTree: DirectoryTreeEx;
-    private tooltip: HTMLTooltip;
+    private selectedMarker: number;
 
 
     buildRendering(args) {
     buildRendering(args) {
         this.inherited(arguments);
         this.inherited(arguments);
@@ -86,14 +88,12 @@ export class ECLArchiveWidget {
             .textFileIcon("fa fa-file-code-o")
             .textFileIcon("fa fa-file-code-o")
             .omitRoot(true)
             .omitRoot(true)
             ;
             ;
+        this.summaryTable = new Table()
+            .sortable(true)
+            ;
         this.editor = new ECLEditor().readOnly(true);
         this.editor = new ECLEditor().readOnly(true);
         this.archiveViewer = new SplitPanel("horizontal");
         this.archiveViewer = new SplitPanel("horizontal");
-        this.tooltip = new HTMLTooltip()
-            .target(document.body)
-            .direction("e")
-            .visible(false)
-            .render()
-            ;
+        this.leftPanel = new SplitPanel("vertical");
 
 
         const tableDataTransformer = d => {
         const tableDataTransformer = d => {
             const ret = d.map((n: any) => {
             const ret = d.map((n: any) => {
@@ -132,10 +132,16 @@ export class ECLArchiveWidget {
                     .iconSize(20)
                     .iconSize(20)
                     .rowItemPadding(2)
                     .rowItemPadding(2)
                     ;
                     ;
-                context.archiveViewer
+                context.leftPanel
                     .addWidget(context.directoryTree)
                     .addWidget(context.directoryTree)
+                    .addWidget(context.summaryTable)
+                    .relativeSizes([0.38, 0.62])
+                    .lazyRender()
+                    ;
+                context.archiveViewer
+                    .addWidget(context.leftPanel)
                     .addWidget(context.editor)
                     .addWidget(context.editor)
-                    .relativeSizes([0.1, 0.9])
+                    .relativeSizes([0.2, 0.8])
                     .lazyRender()
                     .lazyRender()
                     ;
                     ;
                 const scopesOptions = {
                 const scopesOptions = {
@@ -365,6 +371,7 @@ export class ECLArchiveWidget {
                         addMarkers(markers);
                         addMarkers(markers);
                     });
                     });
                 }
                 }
+                updateSummary(markers);
             }
             }
 
 
             function recursiveSort(n) {
             function recursiveSort(n) {
@@ -402,6 +409,73 @@ export class ECLArchiveWidget {
                 return label;
                 return label;
             }
             }
         }
         }
+        function updateSummary(markers) {
+            const propCounts = {};
+            const propFormats = {};
+            const propSums = markers.reduce((ret, n)=>{
+                n.properties.forEach(prop=>{
+                    if(prop.Measure !== undefined){
+                        if(!propCounts[prop.Name]){
+                            propCounts[prop.Name] = 0;
+                            propFormats[prop.Name] = prop.Measure;
+                            ret[prop.Name] = 0;
+                        }
+                        propCounts[prop.Name]++;
+                        ret[prop.Name] += Number(prop.RawValue);
+                    }
+                });
+                return ret;
+            }, {});
+            const propAvgs = Object.keys(propSums).reduce((ret, k)=>{
+                ret[k] = propSums[k] / propCounts[k];
+                return ret;
+            }, {});
+            context.summaryTable
+                .columns(["Name", "Cnt", "Avg", "Sum"])
+                .data([
+                    ...Object.keys(propSums).map(k=>{
+                        let avg = propAvgs[k];
+                        let sum = propSums[k];
+
+                        const isTime = propFormats[k] === "ns";
+                        const isSize = propFormats[k] === "sz";
+
+                        if(isTime) {
+                            avg = _formatTime(avg);
+                            sum = _formatTime(sum);
+                        } else if (isSize) {
+                            avg = _formatSize(avg);
+                            sum = _formatSize(sum);
+                        } else {
+                            avg = avg.toFixed(3);
+                            sum = sum.toFixed(3);
+                        }
+                        return [
+                            k,
+                            propCounts[k],
+                            avg,
+                            sum,
+                        ];
+                    })
+                ])
+                .lazyRender()
+                ;
+            function _formatTime(v){
+                if(v > 1000000000) {
+                    return (v / 1000000000).toFixed(3) + "s";
+                }
+                return (v / 1000000).toFixed(3) + "ms";
+            }
+            function _formatSize(v){
+                if(v > 1000000000) {
+                    return (v * 0.000000000931).toFixed(3) + "Gb";
+                }
+                else if(v > 1000000) {
+                    return (v * 0.0000009537).toFixed(3) + "Mb";
+                }
+                return (v * 0.000977).toFixed(3) + "Kb";
+            }
+        }
         function buildMarkerData(scopesArr) {
         function buildMarkerData(scopesArr) {
             const markers = {};
             const markers = {};
 
 
@@ -460,10 +534,7 @@ export class ECLArchiveWidget {
 
 
             return markers;
             return markers;
         }
         }
-        function markerTooltipTable(marker) {
-            const table = document.createElement("table");
-            const thead = document.createElement("thead");
-            const tbody = document.createElement("tbody");
+        function markerTableData(marker) {
             const labels = [];
             const labels = [];
             const tableDataArr = marker.tableData.map((_table, tableIdx) => {
             const tableDataArr = marker.tableData.map((_table, tableIdx) => {
                 const tableData = JSON.parse(_table);
                 const tableData = JSON.parse(_table);
@@ -489,36 +560,7 @@ export class ECLArchiveWidget {
                     })
                     })
                 ];
                 ];
             });
             });
-
-            _data
-                .filter(row => row[0] === "Label")
-                .forEach(row => {
-                    appendRow(row, thead, () => true);
-                });
-            _data
-                .filter(row => row[0] !== "Label")
-                .forEach(row => {
-                    appendRow(row, tbody, idx => idx === 0);
-                });
-            table.appendChild(thead);
-            table.appendChild(tbody);
-            table.style.maxWidth = "500px";
-            return table;
-
-            function appendRow(cellArr, parentNode, thCondition) {
-                const tr = document.createElement("tr");
-                tr.style.maxHeight = "200px";
-                cellArr.forEach((cellText, i) => {
-                    const td = document.createElement(thCondition(i) ? "th" : "td");
-                    td.style.maxWidth = "180px";
-                    td.style.textAlign = i === 0 ? "right" : "left";
-                    td.style.overflow = "hidden";
-                    td.style.textOverflow = "ellipsis";
-                    td.textContent = cellText;
-                    tr.appendChild(td);
-                });
-                parentNode.appendChild(tr);
-            }
+            return _data;
         }
         }
         function addMarkers(markers) {
         function addMarkers(markers) {
             const palette = Palette.rainbow("YlOrRd");
             const palette = Palette.rainbow("YlOrRd");
@@ -537,24 +579,30 @@ export class ECLArchiveWidget {
                     marker.color,
                     marker.color,
                     "Verdana",
                     "Verdana",
                     "12px",
                     "12px",
+                    () => {},
+                    () => {},
                     () => {
                     () => {
-                        //onmouseenter
-                        const _content = markerTooltipTable(marker);
-                        context.tooltip._cursorLoc = [
-                            (event as MouseEvent).clientX,
-                            (event as MouseEvent).clientY
-                        ];
-                        context.tooltip
-                            .followCursor(true)
-                            .visible(true)
-                            .fitContent(true)
-                            .tooltipContent(_content)
-                            .render()
-                            ;
-                    },
-                    () => {
-                        //onmouseleave
-                        context.tooltip.visible(false);
+                        if(context.selectedMarker === marker.lineNum) {
+                            updateSummary(markers);
+                            context.selectedMarker = -1;
+                            const columnArr = context.summaryTable.columns();
+                            columnArr[0] = "Name";
+                            context.summaryTable
+                                .columns(columnArr)
+                                .lazyRender()
+                                ;
+                        } else {
+
+                            const _data = markerTableData(marker);
+                            
+                            context.summaryTable
+                                .columns(["Line: "+marker.lineNum, ...Array(_data[0].length).fill("")])
+                                .data(_data)
+                                .lazyRender()
+                                ;
+
+                            context.selectedMarker = marker.lineNum;
+                        }
                     }
                     }
                 );
                 );
             });
             });

+ 1 - 1
initfiles/bash/etc/systemd/system/CMakeLists.txt

@@ -32,7 +32,7 @@ set(componentList
     backupnode
     backupnode
     toposerver)
     toposerver)
 
 
-set(dafilesrv "dafilesrv" "-L ${LOG_PATH} -I %i -D" "")
+set(dafilesrv "dafilesrv" "--logDir=${LOG_PATH} --name=%i --daemon" "")
 set(dali "daserver" "--daemon %i" "dafilesrv.service")
 set(dali "daserver" "--daemon %i" "dafilesrv.service")
 set(dfuserver "dfuserver" "--daemon %i" "dafilesrv.service")
 set(dfuserver "dfuserver" "--daemon %i" "dafilesrv.service")
 set(eclagent "agentexec" "--daemon %i" "dafilesrv.service")
 set(eclagent "agentexec" "--daemon %i" "dafilesrv.service")

+ 2 - 1
roxie/ccd/ccdqueue.cpp

@@ -2862,10 +2862,11 @@ public:
         if (udpResendLostPackets && udpMaxSlotsPerClient > TRACKER_BITS)
         if (udpResendLostPackets && udpMaxSlotsPerClient > TRACKER_BITS)
             udpMaxSlotsPerClient = TRACKER_BITS;
             udpMaxSlotsPerClient = TRACKER_BITS;
         unsigned serverFlowPort = topology->getPropInt("@serverFlowPort", CCD_SERVER_FLOW_PORT);
         unsigned serverFlowPort = topology->getPropInt("@serverFlowPort", CCD_SERVER_FLOW_PORT);
+        bool sendFlowOnDataPort = topology->getPropBool("@sendFlowOnDataPort", true);
         unsigned dataPort = topology->getPropInt("@dataPort", CCD_DATA_PORT);
         unsigned dataPort = topology->getPropInt("@dataPort", CCD_DATA_PORT);
         unsigned clientFlowPort = topology->getPropInt("@clientFlowPort", CCD_CLIENT_FLOW_PORT);
         unsigned clientFlowPort = topology->getPropInt("@clientFlowPort", CCD_CLIENT_FLOW_PORT);
         receiveManager.setown(createReceiveManager(serverFlowPort, dataPort, clientFlowPort, udpQueueSize, udpMaxSlotsPerClient, encryptionInTransit));
         receiveManager.setown(createReceiveManager(serverFlowPort, dataPort, clientFlowPort, udpQueueSize, udpMaxSlotsPerClient, encryptionInTransit));
-        sendManager.setown(createSendManager(serverFlowPort, dataPort, clientFlowPort, udpSendQueueSize, fastLaneQueue ? 3 : 2, bucket, encryptionInTransit));
+        sendManager.setown(createSendManager(sendFlowOnDataPort ? dataPort : serverFlowPort, dataPort, clientFlowPort, udpSendQueueSize, fastLaneQueue ? 3 : 2, bucket, encryptionInTransit));
     }
     }
 
 
     virtual void abortPendingData(const SocketEndpoint &ep) override
     virtual void abortPendingData(const SocketEndpoint &ep) override

+ 7 - 1
roxie/udplib/udpmsgpk.cpp

@@ -563,9 +563,15 @@ unsigned CMessageCollator::queryResends() const
 
 
 bool CMessageCollator::attach_databuffer(DataBuffer *dataBuff)
 bool CMessageCollator::attach_databuffer(DataBuffer *dataBuff)
 {
 {
-    activity = true;
     UdpPacketHeader *pktHdr = (UdpPacketHeader*) dataBuff->data;
     UdpPacketHeader *pktHdr = (UdpPacketHeader*) dataBuff->data;
     totalBytesReceived += pktHdr->length;
     totalBytesReceived += pktHdr->length;
+    if (pktHdr->node.isNull())   // Indicates a packet that has been identified as a duplicate to be logged and discarded
+    {
+        noteDuplicate((pktHdr->pktSeq & UDP_PACKET_RESENT) != 0);
+        dataBuff->Release();
+        return true;
+    }
+    activity = true;
     if (memLimitExceeded || roxiemem::memPoolExhausted())
     if (memLimitExceeded || roxiemem::memPoolExhausted())
     {
     {
         DBGLOG("UdpCollator: mem limit exceeded");
         DBGLOG("UdpCollator: mem limit exceeded");

+ 1 - 1
roxie/udplib/udpsha.hpp

@@ -40,7 +40,7 @@ struct UdpPacketHeader
 {
 {
     unsigned short length;      // total length of packet including the header, data, and meta
     unsigned short length;      // total length of packet including the header, data, and meta
     unsigned short metalength;  // length of metadata (comes after header and data)
     unsigned short metalength;  // length of metadata (comes after header and data)
-    ServerIdentifier  node;        // Node this message came from
+    ServerIdentifier  node;     // Node this message came from
     unsigned       msgSeq;      // sequence number of messages ever sent from given node, used with ruid to tell which packets are from same message
     unsigned       msgSeq;      // sequence number of messages ever sent from given node, used with ruid to tell which packets are from same message
     unsigned       pktSeq;      // sequence number of this packet within the message (top bit signifies final packet)
     unsigned       pktSeq;      // sequence number of this packet within the message (top bit signifies final packet)
     sequence_t     sendSeq;     // sequence number of this packet among all those send from this node to this target
     sequence_t     sendSeq;     // sequence number of this packet among all those send from this node to this target

+ 18 - 28
roxie/udplib/udptrr.cpp

@@ -54,6 +54,9 @@ static unsigned lastFlowPermitsSent = 0;
 static unsigned lastFlowRequestsReceived = 0;
 static unsigned lastFlowRequestsReceived = 0;
 static unsigned lastDataPacketsReceived = 0;
 static unsigned lastDataPacketsReceived = 0;
 
 
+// The code that redirects flow messages from data socket to flow socket relies on the assumption tested here
+static_assert(sizeof(UdpRequestToSendMsg) < sizeof(UdpPacketHeader), "Expected UDP rts size to be less than packet header");
+
 class CReceiveManager : implements IReceiveManager, public CInterface
 class CReceiveManager : implements IReceiveManager, public CInterface
 {
 {
     /*
     /*
@@ -573,7 +576,8 @@ class CReceiveManager : implements IReceiveManager, public CInterface
     class receive_data : public Thread 
     class receive_data : public Thread 
     {
     {
         CReceiveManager &parent;
         CReceiveManager &parent;
-        ISocket *receive_socket;
+        ISocket *receive_socket = nullptr;
+        ISocket *selfFlowSocket = nullptr;
         std::atomic<bool> running = { false };
         std::atomic<bool> running = { false };
         Semaphore started;
         Semaphore started;
         
         
@@ -585,6 +589,7 @@ class CReceiveManager : implements IReceiveManager, public CInterface
             if (check_max_socket_read_buffer(ip_buffer) < 0) 
             if (check_max_socket_read_buffer(ip_buffer) < 0) 
                 throw MakeStringException(ROXIE_UDP_ERROR, "System socket max read buffer is less than %u", ip_buffer);
                 throw MakeStringException(ROXIE_UDP_ERROR, "System socket max read buffer is less than %u", ip_buffer);
             receive_socket = ISocket::udp_create(parent.data_port);
             receive_socket = ISocket::udp_create(parent.data_port);
+            selfFlowSocket = ISocket::udp_connect(SocketEndpoint(parent.receive_flow_port, myNode.getIpAddress()));
             receive_socket->set_receive_buffer_size(ip_buffer);
             receive_socket->set_receive_buffer_size(ip_buffer);
             size32_t actualSize = receive_socket->get_receive_buffer_size();
             size32_t actualSize = receive_socket->get_receive_buffer_size();
             DBGLOG("UdpReceiver: rcv_data_socket created port=%d requested sockbuffsize=%d actual sockbuffsize=%d", parent.data_port, ip_buffer, actualSize);
             DBGLOG("UdpReceiver: rcv_data_socket created port=%d requested sockbuffsize=%d actual sockbuffsize=%d", parent.data_port, ip_buffer, actualSize);
@@ -603,8 +608,11 @@ class CReceiveManager : implements IReceiveManager, public CInterface
             running = false;
             running = false;
             if (receive_socket)
             if (receive_socket)
                 receive_socket->close();
                 receive_socket->close();
+            if (selfFlowSocket)
+                selfFlowSocket->close();
             join();
             join();
             ::Release(receive_socket);
             ::Release(receive_socket);
+            ::Release(selfFlowSocket);
         }
         }
 
 
         virtual int run() 
         virtual int run() 
@@ -625,7 +633,13 @@ class CReceiveManager : implements IReceiveManager, public CInterface
                 {
                 {
                     unsigned int res;
                     unsigned int res;
                     b = bufferManager->allocate();
                     b = bufferManager->allocate();
-                    receive_socket->read(b->data, 1, DATA_PAYLOAD, res, 5);
+                    while (true)
+                    {
+                        receive_socket->read(b->data, 1, DATA_PAYLOAD, res, 5);
+                        if (res!=sizeof(UdpRequestToSendMsg))
+                            break;
+                        selfFlowSocket->write(b->data, res);
+                    }
                     dataPacketsReceived++;
                     dataPacketsReceived++;
                     UdpPacketHeader &hdr = *(UdpPacketHeader *) b->data;
                     UdpPacketHeader &hdr = *(UdpPacketHeader *) b->data;
                     assert(hdr.length == res && hdr.length > sizeof(hdr));
                     assert(hdr.length == res && hdr.length > sizeof(hdr));
@@ -637,8 +651,7 @@ class CReceiveManager : implements IReceiveManager, public CInterface
                             StringBuffer s;
                             StringBuffer s;
                             DBGLOG("UdpReceiver: discarding unwanted resent packet %" SEQF "u %x from %s", hdr.sendSeq, hdr.pktSeq, hdr.node.getTraceText(s).str());
                             DBGLOG("UdpReceiver: discarding unwanted resent packet %" SEQF "u %x from %s", hdr.sendSeq, hdr.pktSeq, hdr.node.getTraceText(s).str());
                         }
                         }
-                        parent.noteDuplicate(b);
-                        ::Release(b);
+                        hdr.node.clear();  // Used to indicate a duplicate that collate thread should discard. We don't discard on this thread as don't want to do anything that requires locks...
                     }
                     }
                     else
                     else
                     {
                     {
@@ -647,8 +660,8 @@ class CReceiveManager : implements IReceiveManager, public CInterface
                             StringBuffer s;
                             StringBuffer s;
                             DBGLOG("UdpReceiver: %u bytes received packet %" SEQF "u %x from %s", res, hdr.sendSeq, hdr.pktSeq, hdr.node.getTraceText(s).str());
                             DBGLOG("UdpReceiver: %u bytes received packet %" SEQF "u %x from %s", res, hdr.sendSeq, hdr.pktSeq, hdr.node.getTraceText(s).str());
                         }
                         }
-                        parent.input_queue->pushOwn(b);
                     }
                     }
+                    parent.input_queue->pushOwn(b);
                     b = NULL;
                     b = NULL;
                 }
                 }
                 catch (IException *e) 
                 catch (IException *e) 
@@ -790,29 +803,6 @@ class CReceiveManager : implements IReceiveManager, public CInterface
             collatePacket(dataBuff);
             collatePacket(dataBuff);
         }
         }
     }
     }
-    void noteDuplicate(DataBuffer *dataBuff)
-    {
-        const UdpPacketHeader *pktHdr = (UdpPacketHeader*) dataBuff->data;
-        Linked <CMessageCollator> msgColl;
-        SpinBlock b(collatorsLock);
-        try
-        {
-            msgColl.set(collators[pktHdr->ruid]);
-        }
-        catch (IException *E)
-        {
-            EXCLOG(E);
-            E->Release();
-        }
-        catch (...)
-        {
-            IException *E = MakeStringException(ROXIE_INTERNAL_ERROR, "Unexpected exception caught in CPacketCollator::run");
-            EXCLOG(E);
-            E->Release();
-        }
-        if (msgColl)
-            msgColl->noteDuplicate((pktHdr->pktSeq & UDP_PACKET_RESENT) != 0);
-    }
 
 
     void collatePacket(DataBuffer *dataBuff)
     void collatePacket(DataBuffer *dataBuff)
     {
     {

+ 1 - 1
system/jlib/jmutex.hpp

@@ -31,7 +31,7 @@ extern jlib_decl void spinUntilReady(std::atomic_uint &value);
 
 
 #ifdef _DEBUG
 #ifdef _DEBUG
 //#define SPINLOCK_USE_MUTEX // for testing
 //#define SPINLOCK_USE_MUTEX // for testing
-//#define SPINLOCK_RR_CHECK     // checks for realtime threads
+#define SPINLOCK_RR_CHECK     // checks for realtime threads
 #define _ASSERT_LOCK_SUPPORT
 #define _ASSERT_LOCK_SUPPORT
 #endif
 #endif
 
 

+ 4 - 1
system/metrics/sinks/prometheus/prometheusSink.cpp

@@ -104,7 +104,10 @@ void PrometheusMetricSink::toPrometheusMetrics(const std::vector<std::shared_ptr
 
 
     for (auto &pMetric: reportMetrics)
     for (auto &pMetric: reportMetrics)
     {
     {
-        const std::string & name = pMetric->queryName();
+        std::string name = pMetric->queryName();
+
+        //'.' is a known char used in HPCC metric names but invalid in Prometheus
+        std::replace(name.begin(), name.end(), '.', '_');
         if (verbose)
         if (verbose)
         {
         {
             if (!pMetric->queryDescription().empty())
             if (!pMetric->queryDescription().empty())