浏览代码

HPCC-20643 Move spark build to a plugin

Signed-off-by: Michael Gardner <michael.gardner@lexisnexisrisk.com>

HPCC-20643 Move componentfile, scripts and init

Signed-off-by: Michael Gardner <michael.gardner@lexisnexisrisk.com>

HPCC-20643 Moved init for spark into plugin, default xml if SPARK=ON kept in platform

Signed-off-by: Michael Gardner <michael.gardner@lexisnexisrisk.com>

HPCC-20643 Changes in review

Signed-off-by: Michael Gardner <michael.gardner@lexisnexisrisk.com>

HPCC-20643 Genenvrules change for envgen functionality

Signed-off-by: Michael Gardner <michael.garder@lexisnexisrisk.com>
Michael Gardner 6 年之前
父节点
当前提交
c770c94ade

+ 1 - 4
CMakeLists.txt

@@ -163,6 +163,7 @@ if ( PLUGIN )
     HPCC_ADD_SUBDIRECTORY (plugins/mysql "MYSQLEMBED")
     HPCC_ADD_SUBDIRECTORY (plugins/exampleplugin "EXAMPLEPLUGIN")
     HPCC_ADD_SUBDIRECTORY (plugins/couchbase "COUCHBASEEMBED")
+    HPCC_ADD_SUBDIRECTORY (plugins/spark "SPARK")
 elseif ( NOT MAKE_DOCS_ONLY )
     HPCC_ADD_SUBDIRECTORY (system)
     HPCC_ADD_SUBDIRECTORY (initfiles)
@@ -201,10 +202,6 @@ if(APPLE)
     HPCC_ADD_SUBDIRECTORY(package)
 endif(APPLE)
 
-if(USE_SPARK)
-    HPCC_ADD_SUBDIRECTORY(spark "PLATFORM")
-endif(USE_SPARK)
-
 ###
 ## CPack install and packaging setup.
 ###

+ 1 - 1
cmake_modules/commonSetup.cmake

@@ -99,7 +99,6 @@ IF ("${COMMONSETUP_DONE}" STREQUAL "")
   option(USE_PYTHON3 "Enable python3 language support for platform build" ON)
   option(USE_OPTIONAL "Automatically disable requested features with missing dependencies" ON)
   option(JLIB_ONLY  "Build JLIB for other projects such as Configurator, Ganglia Monitoring, etc" OFF)
-  option(USE_SPARK  "Packaging Spark with HPCC" OFF)
   # Generates code that is more efficient, but will cause problems if target platforms do not support it.
   if (CMAKE_SIZEOF_VOID_P EQUAL 8)
     option(USE_INLINE_TSC "Inline calls to read TSC (time stamp counter)" ON)
@@ -150,6 +149,7 @@ IF ("${COMMONSETUP_DONE}" STREQUAL "")
     SQLITE3EMBED
     KAFKA
     COUCHBASEEMBED
+    SPARK
     EXAMPLEPLUGIN)
     foreach(plugin ${PLUGINS_LIST})
         option(${plugin} "Create a package with ONLY the ${plugin} plugin" OFF)

+ 1 - 2
initfiles/bash/etc/systemd/system/CMakeLists.txt

@@ -29,8 +29,7 @@ set(componentList
     sasha
     thor
     thorslave
-    backupnode
-    sparkthor)
+    backupnode)
 
 set(dafilesrv "dafilesrv" "-L ${LOG_PATH} -I %i -D" "")
 set(dali "daserver" "--daemon %i" "dafilesrv.service")

+ 0 - 8
initfiles/bin/CMakeLists.txt

@@ -39,11 +39,3 @@ FOREACH( iFILES
 )
     install ( PROGRAMS ${iFILES} DESTINATION ${EXEC_DIR} COMPONENT Runtime )
 ENDFOREACH ( iFILES )
-
-if(USE_SPARK)
-    configure_file(init_sparkthor.in init_sparkthor @ONLY)
-    install(
-        PROGRAMS ${CMAKE_CURRENT_BINARY_DIR}/init_sparkthor
-        DESTINATION ${EXEC_DIR}
-        COMPONENT Runtime)
-endif()

+ 0 - 34
initfiles/componentfiles/configxml/CMakeLists.txt

@@ -25,40 +25,6 @@ CONFIGURE_FILE(${CMAKE_CURRENT_SOURCE_DIR}/RoxieTopology.xsl ${CMAKE_CURRENT_BIN
 CONFIGURE_FILE(${CMAKE_CURRENT_SOURCE_DIR}/thor.xsd.in ${CMAKE_CURRENT_BINARY_DIR}/thor.xsd)
 CONFIGURE_FILE(${CMAKE_CURRENT_SOURCE_DIR}/thor.xsl ${CMAKE_CURRENT_BINARY_DIR}/thor.xsl)
 
-if(USE_SPARK)
-    set(SPARKTHOR_ENV_ENTRY "|.//SparkThorProcess")
-    set(SPARKTHOR_BUILDSET_ENTRY
-"            <BuildSet installSet=\"deploy_map.xml\"
-                name=\"sparkthor\"
-                path=\"componentfiles/sparkthor\"
-                processName=\"SparkThorProcess\"
-                schema=\"sparkThor.xsd\"/>")
-    set(SPARKTHOR_CGENCOMPLIST_LINUX_ENTRY
-"  <Component name=\"sparkthor\" processName=\"SparkThor\" schema=\"sparkThor.xsd\">
-     <File name=\"sparkThor.xsl\" method=\"xslt\" destName=\"spark-env.sh\"/>
-     <File name=\"spark-defaults.xsl\" method=\"xslt\" destName=\"spark-defaults.conf\"/>
-   </Component>")
-    set(SPARKTHOR_CGENCOMPLIST_WIN_ENTRY
-"  <Component name=\"sparkthor\" processName=\"SparkThor\" schema=\"sparkThor.xsd\">
-    <File name=\"sparkThor.xsl\" method=\"xslt\" destName=\"spark-env.conf\"/>
-    <File name=\"spark-defaults.xsl\" method=\"xslt\" destName=\"spark-defaults.conf\"/>
-  </Component>")
-    configure_file(
-        ${CMAKE_CURRENT_SOURCE_DIR}/sparkThor.xsl.in
-        ${CMAKE_CURRENT_BINARY_DIR}/sparkThor.xsl
-        @ONLY)
-    configure_file(
-        ${CMAKE_CURRENT_SOURCE_DIR}/spark-defaults.xsl.in
-        ${CMAKE_CURRENT_BINARY_DIR}/spark-defaults.xsl
-        @ONLY)
-    install(
-        FILES ${CMAKE_CURRENT_SOURCE_DIR}/sparkThor.xsd
-              ${CMAKE_CURRENT_BINARY_DIR}/sparkThor.xsl
-              ${CMAKE_CURRENT_BINARY_DIR}/spark-defaults.xsl
-        DESTINATION componentfiles/configxml
-        COMPONENT Runtime)
-endif()
-
 configure_file(environment.xsd.in environment.xsd @ONLY)
 configure_file(cgencomplist_linux.xml.in cgencomplist_linux.xml @ONLY)
 configure_file(cgencomplist_win.xml.in cgencomplist_win.xml @ONLY)

+ 5 - 1
initfiles/componentfiles/configxml/buildsetCC.xml.in

@@ -48,7 +48,11 @@
                 path="componentfiles/backupnode"
                 processName="BackupNodeProcess"
                 schema="backupnode.xsd"/>
-            @SPARKTHOR_BUILDSET_ENTRY@
+"            <BuildSet installSet="deploy_map.xml"
+                name="sparkthor"
+                path="componentfiles/sparkthor"
+                processName="SparkThorProcess"
+                schema="sparkThor.xsd"/>
             <BuildSet installSet="deploy_map.xml"
                 name="daliplugin"
                 path="componentfiles/daliplugin"

+ 4 - 1
initfiles/componentfiles/configxml/cgencomplist_linux.xml.in

@@ -51,7 +51,10 @@
   <Component name="backupnode" processName="BackupNode" schema="backupnode.xsd">
     <File name="backupnode_vars.xsl" method="xslt" destName="backupnode.conf"/>
   </Component>
-  @SPARKTHOR_CGENCOMPLIST_LINUX_ENTRY@
+  <Component name="sparkthor" processName="SparkThor" schema="sparkThor.xsd">
+    <File name="sparkThor.xsl" method="xslt" destName="spark-env.sh"/>
+    <File name="spark-defaults.xsl" method="xslt" destName="spark-defaults.conf"/>
+  </Component>
   <Component name="ldapServer" processName="LDAPServerProcess">
     <File name="ldapserver.xsl" method="xslt" destName=".ldaprc"/>
   </Component>

+ 4 - 1
initfiles/componentfiles/configxml/cgencomplist_win.xml.in

@@ -70,7 +70,10 @@
   <Component name="backupnode" processName="BackupNode" schema="backupnode.xsd">
     <File name="backupnode.xsl" method="xslt" destName="backupnode.xml"/>
   </Component>
-  @SPARKTHOR_CGENCOMPLIST_WIN_ENTRY@ 
+  <Component name="sparkthor" processName="SparkThor" schema="sparkThor.xsd">
+    <File name="sparkThor.xsl" method="xslt" destName="spark-env.conf"/>
+    <File name="spark-defaults.xsl" method="xslt" destName="spark-defaults.conf"/>
+  </Component>
   <Component name="thor" processName='ThorCluster' schema='thor.xsd'>
     <File name="thor.xsl" method="xslt" destName="thor.xml"/>
     <File name="setvars.xsl" method="xslt" destName="setvars.bat"/>

+ 1 - 1
initfiles/componentfiles/configxml/environment.xsd.in

@@ -795,7 +795,7 @@
             <xs:field xpath="@program"/>
         </xs:keyref>
         <xs:key name="processKey">
-		<xs:selector xpath=".//EclServerProcess|.//EclAgentProcess|.//SybaseProcess|.//SpareProcess|.//HoleCluster|.//ThorCluster|.//DaliServer|.//BackupNodeProcess@SPARKTHOR_ENV_ENTRY"/>
+            <xs:selector xpath=".//EclServerProcess|.//EclAgentProcess|.//SybaseProcess|.//SpareProcess|.//HoleCluster|.//ThorCluster|.//DaliServer|.//BackupNodeProcess|.//SparkThorProcess"/>
             <xs:field xpath="@name"/>
         </xs:key>
         <xs:key name="hmonKey">

+ 8 - 6
initfiles/etc/DIR_NAME/CMakeLists.txt

@@ -20,18 +20,20 @@ if(NOT WSSQL_SERVICE)
     set(WSSQL_SERVICE_BLOCK_E "-->")
 endif(NOT WSSQL_SERVICE)
 
+if(NOT SPARK)
+    set(SPARK_ENV_S "<!-- Built with -DSPARK=OFF ")
+    set(SPARK_ENV_F "-->")
+    set(DNG_SPARKTHOR ",sparkthor")
+endif(NOT SPARK)
+
 if(NOT "${CUSTOM_PACKAGE_SUFFIX}" STREQUAL "")
     set(version_file_prefix "${CUSTOM_PACKAGE_SUFFIX}_")
 endif()
 
-if(NOT USE_SPARK)
-    set(SPARK_ENV_S "<!-- Built with -DUSE_SPARK=OFF ")
-    set(SPARK_ENV_E "-->")
-endif(NOT USE_SPARK)
-
 configure_file("version.in" "version")
 configure_file("environment.conf.in" "environment.conf" IMMEDIATE)
 configure_file("environment.xml.in" "environment.xml" IMMEDIATE)
+configure_file("genenvrules.conf.in" "genenvrules.conf" @ONLY)
 foreach(iFILES
         ${CMAKE_CURRENT_BINARY_DIR}/environment.conf
         ${CMAKE_CURRENT_BINARY_DIR}/version)
@@ -40,7 +42,7 @@ endforeach(iFILES)
 
 install(FILES ${CMAKE_CURRENT_BINARY_DIR}/environment.xml DESTINATION etc/${DIR_NAME}/rpmnew COMPONENT Runtime)
 install(FILES ${CMAKE_CURRENT_BINARY_DIR}/environment.conf DESTINATION etc/${DIR_NAME}/rpmnew COMPONENT Runtime)
-install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/genenvrules.conf DESTINATION etc/${DIR_NAME}/rpmnew COMPONENT Runtime)
+install(FILES ${CMAKE_CURRENT_BINARY_DIR}/genenvrules.conf DESTINATION etc/${DIR_NAME}/rpmnew COMPONENT Runtime)
 
 add_subdirectory(configmgr)
 add_subdirectory(config2mgr)

+ 3 - 3
initfiles/etc/DIR_NAME/environment.xml.in

@@ -906,8 +906,8 @@
              directory="/var/lib/HPCCSystems/mysparkthor"
              name="s1"
              netAddress="."/>
-  </SparkThorProcess>
-  @SPARK_ENV_E@
+   </SparkThorProcess>
+   @SPARK_ENV_F@
  </Software>
  <EnvSettings>
   <pid>${PID_PATH}</pid>
@@ -1248,7 +1248,7 @@
              path="componentfiles/sparkthor"
              processName="SparkThorProcess"
              schema="sparkThor.xsd"/>
-   @SPARK_ENV_E@
+   @SPARK_ENV_F@
   </Build>
  </Programs>
 </Environment>

+ 1 - 1
initfiles/etc/DIR_NAME/genenvrules.conf

@@ -1,7 +1,7 @@
 
 [Algorithm]
 max_comps_per_node=4
-do_not_generate=SiteCertificate,dfuplus,soapplus,eclplus,ldapServer,ws_account,eclserver,DynamicESDL,cassandraloggingagent,esploggingagent,loggingmanager,wslogging,daliplugin
+do_not_generate=SiteCertificate,dfuplus,soapplus,eclplus,ldapServer,ws_account,eclserver,DynamicESDL,cassandraloggingagent,esploggingagent,loggingmanager,wslogging,daliplugin@DNG_SPARKTHOR@
 avoid_combo=dali-eclagent,dali-sasha
 comps_on_all_nodes=dafilesrv,ftslave
 exclude_from_comps_on_all_nodes=ldapServer

+ 0 - 9
initfiles/sbin/CMakeLists.txt

@@ -37,15 +37,6 @@ ADD_CUSTOM_TARGET(ProcessFiles-initfiles-sbin ALL DEPENDS ${outFiles} ${noInstal
 configure_file("${CMAKE_CURRENT_SOURCE_DIR}/regex.awk.in.cmake" "${CMAKE_CURRENT_BINARY_DIR}/regex.awk")
 configure_file("${CMAKE_CURRENT_SOURCE_DIR}/backup.sh.in" "${CMAKE_CURRENT_BINARY_DIR}/backup.sh" @ONLY)
 
-if(USE_SPARK)
-    configure_file("${CMAKE_CURRENT_SOURCE_DIR}/sparkthor.sh.in" "${CMAKE_CURRENT_BINARY_DIR}/sparkthor.sh" @ONLY)
-    configure_file("${CMAKE_CURRENT_SOURCE_DIR}/sparkthor-worker.sh.in" "${CMAKE_CURRENT_BINARY_DIR}/sparkthor-worker.sh" @ONLY)
-    install(PROGRAMS 
-        ${CMAKE_CURRENT_BINARY_DIR}/sparkthor.sh
-        ${CMAKE_CURRENT_BINARY_DIR}/sparkthor-worker.sh
-        DESTINATION sbin COMPONENT Runtime)
-endif(USE_SPARK)
-
 FOREACH( oFILES
     ${outFiles}
     ${CMAKE_CURRENT_BINARY_DIR}/backup.sh

+ 1 - 0
plugins/CMakeLists.txt

@@ -39,3 +39,4 @@ add_subdirectory (kafka)
 add_subdirectory (exampleplugin)
 add_subdirectory (couchbase)
 add_subdirectory (sqs)
+add_subdirectory (spark)

+ 33 - 2
spark/CMakeLists.txt

@@ -26,7 +26,8 @@ cmake_minimum_required(VERSION 3.3)
 
 project(spark-integration)
 
-if(USE_SPARK)
+if(SPARK)
+    ADD_PLUGIN(spark PACKAGES Java MINVERSION 1.8.0)
 
     if(SPARK_URL)
         string( REPLACE "\/" ";" SPARK_URL_LIST ${SPARK_URL} )
@@ -131,6 +132,7 @@ if(USE_SPARK)
         DESTINATION "jars/spark/"
         )
 
+    
     configure_file(spark-defaults.conf.in spark-defaults.conf @ONLY)
     configure_file(spark-env.sh.in spark-env.sh @ONLY)
     install(
@@ -141,4 +143,33 @@ if(USE_SPARK)
         DESTINATION "externals/spark-hadoop/conf"
         )
 
-endif(USE_SPARK)
+    configure_file("${CMAKE_CURRENT_SOURCE_DIR}/sparkthor.sh.in" "${CMAKE_CURRENT_BINARY_DIR}/sparkthor.sh" @ONLY)
+    configure_file("${CMAKE_CURRENT_SOURCE_DIR}/sparkthor-worker.sh.in" "${CMAKE_CURRENT_BINARY_DIR}/sparkthor-worker.sh" @ONLY)
+    install(PROGRAMS 
+        ${CMAKE_CURRENT_BINARY_DIR}/sparkthor.sh
+        ${CMAKE_CURRENT_BINARY_DIR}/sparkthor-worker.sh
+        DESTINATION sbin COMPONENT Runtime)
+
+    configure_file(init_sparkthor.in init_sparkthor @ONLY)
+    install(
+        PROGRAMS ${CMAKE_CURRENT_BINARY_DIR}/init_sparkthor
+        DESTINATION ${EXEC_DIR}
+        COMPONENT Runtime)
+    
+    configure_file(${CMAKE_CURRENT_SOURCE_DIR}/sparkThor.xsl.in ${CMAKE_CURRENT_BINARY_DIR}/sparkThor.xsl @ONLY)
+    configure_file(${CMAKE_CURRENT_SOURCE_DIR}/spark-defaults.xsl.in ${CMAKE_CURRENT_BINARY_DIR}/spark-defaults.xsl @ONLY)
+    install(FILES
+        ${CMAKE_CURRENT_SOURCE_DIR}/sparkThor.xsd
+        ${CMAKE_CURRENT_BINARY_DIR}/sparkThor.xsl
+        ${CMAKE_CURRENT_BINARY_DIR}/spark-defaults.xsl
+        DESTINATION componentfiles/configxml
+        COMPONENT Runtime)
+        
+    configure_file(sparkthor@instance.service.in sparkthor@.service @ONLY)
+    install(FILES ${CMAKE_CURRENT_BINARY_DIR}/sparkthor@.service DESTINATION etc/systemd/system COMPONENT Systemd)
+
+    if(PLATFORM)
+        install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/sparkthor-service.install DESTINATION etc/init.d/install COMPONENT Systemd)
+        install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/sparkthor-service.uninstall DESTINATION etc/init.d/uninstall COMPONENT Systemd)
+    endif(PLATFORM)
+endif(SPARK)

initfiles/bin/init_sparkthor.in → plugins/spark/init_sparkthor.in


spark/spark-defaults.conf.in → plugins/spark/spark-defaults.conf.in


initfiles/componentfiles/configxml/spark-defaults.xsl.in → plugins/spark/spark-defaults.xsl.in


spark/spark-env.sh.in → plugins/spark/spark-env.sh.in


initfiles/componentfiles/configxml/sparkThor.xsd → plugins/spark/sparkThor.xsd


initfiles/componentfiles/configxml/sparkThor.xsl.in → plugins/spark/sparkThor.xsl.in


+ 3 - 0
plugins/spark/sparkthor-service.install

@@ -0,0 +1,3 @@
+if [[ -e /bin/systemctl ]]; then
+    installFile "$path/etc/systemd/system/sparkthor@.service" "/etc/systemd/system/sparkthor@.service" 1 || exit 1
+fi

+ 3 - 0
plugins/spark/sparkthor-service.uninstall

@@ -0,0 +1,3 @@
+if [[ -e /bin/systemctl ]]; then
+    removeSymlink "/etc/systemd/system/sparkthor@*.service"
+fi

initfiles/sbin/sparkthor-worker.sh.in → plugins/spark/sparkthor-worker.sh.in


initfiles/sbin/sparkthor.sh.in → plugins/spark/sparkthor.sh.in


initfiles/bash/etc/systemd/system/sparkthor@instance.service.in → plugins/spark/sparkthor@instance.service.in