Browse Source

Merge pull request #2904 from rpastrana/gh-2859-Remove_h2h

gh-2859 Remove h2h from HPCC-Platform/plugins/

Reviewed-by: Gavin Halliday <ghalliday@hpccsystems.com>
Gavin Halliday 13 years ago
parent
commit
780026d946

+ 0 - 79
cmake_modules/FindLIBHDFS.cmake

@@ -1,79 +0,0 @@
-################################################################################
-#    Copyright (C) 2011 HPCC Systems.
-#
-#    All rights reserved. This program is free software: you can redistribute it and/or modify
-#    it under the terms of the GNU Affero General Public License as
-#    published by the Free Software Foundation, either version 3 of the
-#    License, or (at your option) any later version.
-#
-#    This program is distributed in the hope that it will be useful,
-#    but WITHOUT ANY WARRANTY; without even the implied warranty of
-#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#    GNU Affero General Public License for more details.
-#
-#    You should have received a copy of the GNU Affero General Public License
-#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
-################################################################################
-
-
-# - Try to find the LIBHDFS xml library
-# Once done this will define
-#
-#  LIBHDFS_FOUND - system has the LIBHDFS library
-#  LIBHDFS_INCLUDE_DIR - the LIBHDFS include directory
-#  LIBHDFS_LIBRARIES - The libraries needed to use LIBHDFS
-
-if (NOT LIBHDFS_FOUND)
-  IF (WIN32)
-    SET (libhdfs_libs "hdfs" "libhdfs")
-  ELSE()
-    SET (libhdfs_libs "hdfs" "libhdfs")
-  ENDIF()
-
-  IF (NOT ${EXTERNALS_DIRECTORY} STREQUAL "")
-    IF (UNIX)
-      IF (${ARCH64BIT} EQUAL 1)
-        SET (osdir "Linux-amd64-64")
-      ELSE()
-        SET (osdir "Linux-i386-32")
-      ENDIF()
-    ELSEIF(WIN32)
-      SET (osdir "lib")
-    ELSE()
-      SET (osdir "unknown")
-    ENDIF()
-    IF (NOT ("${osdir}" STREQUAL "unknown"))
-      FIND_PATH (LIBHDFS_INCLUDE_DIR NAMES libhdfs/hdfs.h PATHS "${EXTERNALS_DIRECTORY}/${HADOOP_PATH}/src/c++" NO_DEFAULT_PATH)
-      FIND_LIBRARY (LIBHDFS_LIBRARIES NAMES ${libhdfs_libs} PATHS "${EXTERNALS_DIRECTORY}/${HADOOP_PATH}/c++/${osdir}" NO_DEFAULT_PATH)
-    ENDIF()
-  ENDIF()
-
-  if (USE_NATIVE_LIBRARIES)
-    # if we didn't find in externals, look in system include path
-    FIND_PATH (LIBHDFS_INCLUDE_DIR NAMES hdfs.h PATHS "${HADOOP_PATH}/src/c++/libhdfs" )
-
-    IF (UNIX)
-      IF (${ARCH64BIT} EQUAL 1)
-        SET (hdosdir "Linux-amd64-64")
-      ELSE()
-        SET (hdosdir "Linux-i386-32")
-      ENDIF()
-    ELSEIF(WIN32)
-      SET (hdosdir "lib")
-    ELSE()
-      SET (hdosdir "unknown")
-    ENDIF()
-
-    IF (NOT ("${hdosdir}" STREQUAL "unknown"))
-     FIND_LIBRARY (LIBHDFS_LIBRARIES NAMES ${libhdfs_libs} PATHS "${HADOOP_PATH}/c++/${hdosdir}/lib" )
-    ENDIF()
-
-  endif()
-
-  include(FindPackageHandleStandardArgs)
-  find_package_handle_standard_args(Libhdfs DEFAULT_MSG
-    LIBHDFS_LIBRARIES
-    LIBHDFS_INCLUDE_DIR
-  )
-  MARK_AS_ADVANCED(LIBHDFS_INCLUDE_DIR LIBHDFS_LIBRARIES )
-ENDIF()

+ 0 - 3
plugins/CMakeLists.txt

@@ -24,6 +24,3 @@ add_subdirectory (parselib)
 add_subdirectory (stringlib)
 add_subdirectory (unicodelib)
 add_subdirectory (workunitservices)
-if (USE_HDFSCONNECTOR)
-  add_subdirectory (dataconnectors)
-endif()

+ 0 - 142
plugins/dataconnectors/CMakeLists.txt

@@ -1,142 +0,0 @@
-project (hpccsystems-dataconnector)
-cmake_minimum_required (VERSION 2.6)
-
-set ( HPCC_DATACONNECTOR_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR})
-set ( HPCC_SOURCE_DIR ${HPCC_DATACONNECTOR_SOURCE_DIR}/../../)
-include(${HPCC_SOURCE_DIR}/version.cmake)
-
-set ( CMAKE_MODULE_PATH "${HPCC_SOURCE_DIR}/cmake_modules")
-set ( EXECUTABLE_OUTPUT_PATH "${CMAKE_BINARY_DIR}/${CMAKE_BUILD_TYPE}/bin" )
-set ( PRODUCT_PREFIX "hpccsystems" )
-
-SET(CMAKE_SKIP_BUILD_RPATH  FALSE)
-SET(CMAKE_BUILD_WITH_INSTALL_RPATH FALSE)
-SET(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/${OSSDIR}/lib")
-SET(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE)
-
-#include(${HPCC_SOURCE_DIR}/cmake_modules/optionDefaults.cmake)
-
-if ("${CMAKE_BUILD_TYPE}" STREQUAL "")
-    set ( CMAKE_BUILD_TYPE "Release" )
-elseif (NOT "${CMAKE_BUILD_TYPE}" MATCHES "Debug|Release")
-    message (FATAL_ERROR "Unknown build type $ENV{CMAKE_BUILD_TYPE}")
-endif ()
-message ("-- Making ${CMAKE_BUILD_TYPE} system")
-
-if (CMAKE_SIZEOF_VOID_P EQUAL 8)
-   set ( ARCH64BIT 1 )
-else ()
-   set ( ARCH64BIT 0 )
-endif ()
-message ("-- 64bit architecture is ${ARCH64BIT}")
-
-set (CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -D_DEBUG -DDEBUG")
-
-include(${HPCC_SOURCE_DIR}/cmake_modules/optionDefaults.cmake)
-include(${HPCC_SOURCE_DIR}/cmake_modules/commonSetup.cmake)
-
-INCLUDE(InstallRequiredSystemLibraries)
-SET(CPACK_PACKAGE_VERSION_MAJOR ${majorver})
-SET(CPACK_PACKAGE_VERSION_MINOR ${minorver})
-SET(CPACK_PACKAGE_VERSION_PATCH ${point}${stagever})
-set ( CPACK_PACKAGE_CONTACT "HPCCSystems <ossdevelopment@lexisnexis.com>" )
-set( CPACK_SOURCE_GENERATOR TGZ )
-set ( CPACK_RPM_PACKAGE_VERSION "${projname}")
-SET(CPACK_RPM_PACKAGE_RELEASE "${version}${stagever}")
-if ( ${ARCH64BIT} EQUAL 1 )
-    set ( CPACK_RPM_PACKAGE_ARCHITECTURE "x86_64")
-else( ${ARCH64BIT} EQUAL 1 )
-    set ( CPACK_RPM_PACKAGE_ARCHITECTURE "i386")
-endif ( ${ARCH64BIT} EQUAL 1 )
-set(CPACK_SYSTEM_NAME "${CMAKE_SYSTEM_NAME}-${CPACK_RPM_PACKAGE_ARCHITECTURE}")
-
-
-if ("${CMAKE_BUILD_TYPE}" STREQUAL "Release")
-    set(CPACK_STRIP_FILES TRUE)
-endif()
-
-set ( CPACK_INSTALL_CMAKE_PROJECTS "${CMAKE_CURRENT_BINARY_DIR};hdfsconnector;ALL;/")
-
-if ( CMAKE_SYSTEM MATCHES Linux )
-    EXECUTE_PROCESS (
-                COMMAND ${HPCC_SOURCE_DIR}/cmake_modules/distrocheck.sh
-                    OUTPUT_VARIABLE packageManagement
-                        ERROR_VARIABLE  packageManagement
-                )
-    EXECUTE_PROCESS (
-                COMMAND ${HPCC_SOURCE_DIR}/cmake_modules/getpackagerevisionarch.sh
-                    OUTPUT_VARIABLE packageRevisionArch
-                        ERROR_VARIABLE  packageRevisionArch
-                )
-    EXECUTE_PROCESS (
-                COMMAND ${HPCC_SOURCE_DIR}/cmake_modules/getpackagerevisionarch.sh --noarch
-                    OUTPUT_VARIABLE packageRevision
-                        ERROR_VARIABLE  packageRevision
-                )
-
-    message ( "-- Auto Detecting Packaging type")
-    message ( "-- distro uses ${packageManagement}, revision is ${packageRevisionArch}" )
-
-if ( ${packageManagement} STREQUAL "DEB" )
-        set(CPACK_PACKAGE_FILE_NAME     "${CMAKE_PROJECT_NAME}_${CPACK_RPM_PACKAGE_VERSION}-${version}-${stagever}${packageRevisionArch}")
-    elseif ( ${packageManagement} STREQUAL "RPM" )
-        set(CPACK_PACKAGE_FILE_NAME     "${CMAKE_PROJECT_NAME}_${CPACK_RPM_PACKAGE_VERSION}-${version}-${stagever}.${packageRevisionArch}")
-        else()
-        set(CPACK_PACKAGE_FILE_NAME "${CMAKE_PROJECT_NAME}_${CPACK_RPM_PACKAGE_VERSION}_${version}-${stagever}${CPACK_SYSTEM_NAME}")
-    endif ()
-endif ( CMAKE_SYSTEM MATCHES Linux )
-
-MESSAGE ("-- Current release version is ${CPACK_PACKAGE_FILE_NAME}")
-
-set( CPACK_SOURCE_GENERATOR TGZ )
-
-###
-## CPack commands in this section require cpack 2.8.1 to function.
-## When using cpack 2.8.1, the command "make package" will create
-## an RPM.
-###
-
-if (NOT "${CMAKE_VERSION}" VERSION_LESS "2.8.1")
-    if ( CMAKE_SYSTEM MATCHES Linux )
-        if ( ${packageManagement} STREQUAL "DEB" )
-            if ("${CMAKE_VERSION}" VERSION_EQUAL "2.8.2")
-                message("WARNING: CMAKE 2.8.2  would not build DEB package")
-            else ()
-                set ( CPACK_GENERATOR "${packageManagement}" )
-                message("-- Will build DEB package")
-                ###
-                ## CPack instruction required for Debian
-                ###
-                message ("-- Packing BASH installation files")
-                set ( CPACK_DEBIAN_PACKAGE_CONTROL_EXTRA "${CMAKE_CURRENT_BINARY_DIR}/initfiles/bash/sbin/deb/postinst;${CMAKE_CURRENT_BINARY_DIR}/initfiles/sbin/prerm;${CMAKE_CURRENT_BINARY_DIR}/initfiles/bash/sbin/deb/postrm" )
-            endif ()
-
-        elseif ( ${packageManagement} STREQUAL "RPM" )
-            set ( CPACK_GENERATOR "${packageManagement}" )
-            ###
-            ## CPack instruction required for RPM
-            ###
-            message("-- Will build RPM package")
-            message ("-- Packing BASH installation files")
-            set ( CPACK_RPM_POST_INSTALL_SCRIPT_FILE "${CMAKE_CURRENT_BINARY_DIR}/initfiles/bash/sbin/deb/postinst" )
-
-            set ( CPACK_RPM_PRE_UNINSTALL_SCRIPT_FILE "${CMAKE_CURRENT_BINARY_DIR}/initfiles/sbin/prerm" )
-            set ( CPACK_RPM_POST_UNINSTALL_SCRIPT_FILE "${CMAKE_CURRENT_BINARY_DIR}/initfiles/bash/sbin/deb/postrm" )
-                else()
-            message("WARNING: Unsupported package ${packageManagement}.")
-        endif ()
-
-    endif ( CMAKE_SYSTEM MATCHES Linux )
-    if ( EXISTS ${HPCC_SOURCE_DIR}/cmake_modules/dependencies/${packageRevision}.cmake )
-        include( ${HPCC_SOURCE_DIR}/cmake_modules/dependencies/${packageRevision}.cmake )
-    else()
-        message("-- WARNING: DEPENDENCY FILE FOR ${packageRevision} NOT FOUND, Using deps template.")
-        include( ${HPCC_SOURCE_DIR}/cmake_modules/dependencies/template.cmake )
-    endif()
-else()
-    message("WARNING: CMAKE 2.8.1 or later required to create RPMs from this project")
-endif()
-
-add_subdirectory (hdfsconnector)
-
-INCLUDE(CPack)

+ 0 - 44
plugins/dataconnectors/hdfsconnector/CMakeLists.txt

@@ -1,44 +0,0 @@
-project(hdfsconnector)
-
-option(USE_HDFSCONNECTOR "Configure use of hdfs data connector" OFF)
-if ( USE_HDFSCONNECTOR )
-add_subdirectory (ecl)
-	option(HADOOP_PATH "Set the Hadoop path.")
-	if( NOT HADOOP_PATH )
-		set(HADOOP_PATH "/usr/local/hadoop")
-	endif()
-
-	#set other paths
-	#generate config for script.
-	#add script processor for vars.
-
-	set(HPCC_ETC_DIR "${CMAKE_INSTALL_PREFIX}/${OSSDIR}/etc")
-	set(HPCC_CONF_DIR "${CMAKE_INSTALL_PREFIX}/${OSSDIR}${CONFIG_DIR}")
-	set(HDFSCONN_CONF_FILE "hdfsconnector.conf")
-
-	configure_file("hdfsconnector.conf.in" "hdfsconnector.conf")
-	configure_file("hdfspipe.in" "hdfspipe" @ONLY )
-
-	find_package(JNI REQUIRED)
-	find_package(LIBHDFS REQUIRED)
-
-	set( SRC hdfsconnector.cpp )
-
-	include_directories (
-					${CMAKE_BINARY_DIR}
-					${CMAKE_BINARY_DIR}/oss
-					${JNI_INCLUDE_DIRS}
-					${JAVA_INCLUDE_PATH}
-					${JAVA_INCLUDE_PATH2}
-					${LIBHDFS_INCLUDE_DIR}	)
-
-	HPCC_ADD_EXECUTABLE( hdfsconnector ${SRC} )
-
-	set ( INSTALLDIR "${OSSDIR}/bin")
-	Install ( TARGETS hdfsconnector DESTINATION ${INSTALLDIR} COMPONENT Runtime)
-	Install ( PROGRAMS ${CMAKE_CURRENT_BINARY_DIR}/hdfspipe DESTINATION ${INSTALLDIR} COMPONENT Runtime )
-	Install ( FILES ${CMAKE_CURRENT_BINARY_DIR}/hdfsconnector.conf DESTINATION ${HPCC_CONF_DIR} COMPONENT Runtime )
-	target_link_libraries ( hdfsconnector
-					${JAVA_JVM_LIBRARY}
-					${LIBHDFS_LIBRARIES})
-endif()

+ 0 - 1
plugins/dataconnectors/hdfsconnector/ecl/CMakeLists.txt

@@ -1 +0,0 @@
-Install ( FILES HDFSConnector.ecl DESTINATION "${OSSDIR}/share/DataConnectors" COMPONENT Runtime )

+ 0 - 226
plugins/dataconnectors/hdfsconnector/ecl/HDFSConnector.ecl

@@ -1,226 +0,0 @@
-/* HDFSConnector
-Pipe data to and from Hadoop
-
-It is necessary to add this option to your workunit:
-#option('pickBestEngine', 0);
-
-This will force your HadoopPipe job to run on the target cluster (as opposed to the optimizer
-picking hThor when you've selected Thor, for instance) so that the data lands where you want
-it.
-
-For HadoopOut to work HDFS must have append support enabled.  Be default it's disabled.  To
-enable it add this to hdfs-site.xml:
-		<property>
-			<name>dfs.support.append</name>
-			<value>true</value>
-			<final>true</value>
-		</property>
-*/
-
-import std;
-
-EXPORT HDFSConnector := MODULE
-
-    /*
-   * HDFSConnector.PipeIn - this macro to be called by the user to pipe in data from the Hadoop file system (HDFS).
-     *
-     * @param ECL_RS            The ECL recordset to pipe into.
-     * @param HadoopFileName    The fully qualified target HDFS file name.
-     * @param Layout            The structure which describes the ECL_RS recordset.
-     * @param HadoopFileFormat  The Hadoop data file format : FLAT | CSV.
-     * @param HDFSHost          The Hadoop DFS host name or IP address.
-     * @param HDSFPort          The Hadoop DFS port number.
-     *                              If targeting a local HDFS HDFSHost='default' and HDSFPort=0 will work
-     *                              As long as the local hadoop conf folder is visible to the 'hdfspipe' script
-    */
-
-	export PipeIn(ECL_RS, HadoopFileName, Layout, HadoopFileFormat, HDFSHost, HDSFPort) := MACRO
-	#uniquename(formatstr)
-		%formatstr% := STD.Str.FilterOut(#TEXT(HadoopFileFormat), ' \t\n\r');
-		#IF(%formatstr%[1..3] = 'XML')
-			#IF (LENGTH(%formatstr%) > 3)
-				#uniquename(rowtagcont)
-				#uniquename(firsttok)
-				%firsttok% := STD.Str.Extract(%formatstr%[4..],1);
-				%rowtagcont% := %firsttok%[STD.Str.Find(%firsttok%, '\'',1)+1..STD.Str.Find(%firsttok%, '\'',2)-1];
-
-				#uniquename(headingpos)
-				%headingpos% := STD.Str.Find(%formatstr%, 'HEADING');
-				#IF (%headingpos% > 0)
-					#uniquename(headingcont)
-					#uniquename(headingcont2)
-					#uniquename(headertext)
-					#uniquename(footertext)
-					%headingcont% := %formatstr%[%headingpos%+SIZEOF('HEADING')..];
-					%headingcont2%:= %headingcont%[STD.Str.Find(%headingcont%, '(')+1..STD.Str.Find(%headingcont%, ')')-1];
-
-					%headertext% := 	STD.Str.Extract(%headingcont2%,1);
-					%footertext% := 	STD.Str.Extract(%headingcont2%,2);
-				#END
-			#ELSE
-				%rowtagcont% := 'Row';
-			#END
-			ECL_RS:= PIPE('hdfspipe -si '
-				+ ' -nodeid ' + STD.system.Thorlib.node()
-				+ ' -clustercount ' + STD.system.Thorlib.nodes()
-				+ ' -filename ' + HadoopFileName
-				+ ' -format '	+  %formatstr%[1..3]
-				+ ' -rowtag ' + %rowtagcont%
-				// + ' -headertext ' + '???'
-				// + ' -footertext ' + '???'
-				+ ' -host ' + HDFSHost + ' -port ' + HDSFPort,
-				Layout, HadoopFileFormat);
-
-		#ELSEIF (%formatstr%[1..3] = 'CSV')
-		 #uniquename(termpos)
-		 %termpos% := STD.Str.Find(%formatstr%, 'TERMINATOR');
-
-			#IF(%termpos% > 0)
-				#uniquename(termcont)
-				#uniquename(termcont2)
-				%termcont% := %formatstr%[%termpos%+11..];
-				%termcont2%:= %termcont%[..STD.Str.Find(%termcont%, ')')-1];
-
-				ECL_RS:= PIPE('hdfspipe -si '
-				+ ' -nodeid ' + STD.system.Thorlib.node()
-				+ ' -clustercount ' + STD.system.Thorlib.nodes()
-				+ ' -maxlen ' + sizeof(Layout, MAX)
-				+ ' -filename ' + HadoopFileName
-				+ ' -format '	+  %formatstr%[1..3]
-				+ ' -terminator ' + %termcont2%
-				//+ ' -outputterminator 1'
-				+ ' -host ' + HDFSHost	+ ' -port ' + HDSFPort,
-				Layout, HadoopFileFormat);
-			#ELSE
-				ECL_RS:= PIPE('hdfspipe -si '
-				+ ' -nodeid ' + STD.system.Thorlib.node()
-				+ ' -clustercount ' + STD.system.Thorlib.nodes()
-				+ ' -maxlen ' + sizeof(Layout, MAX)
-				+ ' -filename ' + HadoopFileName
-				+ ' -format '	+  %formatstr%[1..3]
-				+ ' -host ' + HDFSHost	+ ' -port ' + HDSFPort,
-				Layout, HadoopFileFormat);
-			#END
-		#ELSE
-				ECL_RS:= PIPE('hdfspipe -si'
-				+ ' -nodeid ' + STD.system.Thorlib.node()
-				+ ' -clustercount ' + STD.system.Thorlib.nodes()
-				+ ' -reclen ' + sizeof(Layout)
-				+ ' -filename ' + HadoopFileName
-				+ ' -format '	+  %formatstr%
-				+ ' -host ' + HDFSHost 	+ ' -port ' + HDSFPort,
-				Layout);
-		#END
-	ENDMACRO;
-
-    /*
-    HDFSConnector.PipeOut - writes the given recordset 'ECL_RS' to the target HDFS system in
-                                                file parts. One file part for each HPCC node.
-
-    ECL_RS              - The ECL recordset to pipe out.
-    HadoopFileName      - The fully qualified target HDFS file name.
-    Layout              - The structure which describes the ECL_RS recordset.
-    HadoopFileFormat    - The Hadoop data file format : FLAT | CSV
-    HDFSHost            - The Hadoop DFS host name or IP address.
-    HDSFPort            - The Hadoop DFS port number.
-    HDFSUser            - HDFS username to use to login to HDFS in order to write the file
-                            must have permission to write to the target HDFS location.
-
-    Example:
-
-    HDFSConnector.PipeOut(sue, '/user/hadoop/HDFSAccounts', Layout_CSV_Accounts, CSV, '192.168.56.102', '54310', 'hadoop');
-    HDFSConnector.PipeOut(sue, '/user/hadoop/HDFSPersons', Layout_Flat_Persons, FLAT, '192.168.56.102', '54310', 'hadoop');
-    */
-
-	export PipeOut(ECL_RS, HadoopFileName, Layout, HadoopFileFormat, HDFSHost, HDSFPort, HDFSUser) := MACRO
-	#uniquename(formatstr)
-	#uniquename(outpartaction)
-	#uniquename(mergepartsaction)
-		%formatstr% := STD.Str.FilterOut(#TEXT(HadoopFileFormat), ' \t\n\r');
-		#IF(%formatstr%[1..4] != 'FLAT')
-		OUTPUT(ECL_RS,,
-				PIPE('hdfspipe -sop '
-				+ ' -host ' + HDFSHost
-				+ ' -port ' + HDSFPort
-				+ ' -filename ' + HadoopFileName
-				+ ' -nodeid ' + STD.system.Thorlib.node()
-				+ ' -clustercount ' + STD.system.Thorlib.nodes()
-				+ ' -hdfsuser ' + HDFSUser, HadoopFileFormat));
-		#ELSE
-		OUTPUT(ECL_RS,,
-				PIPE('hdfspipe -sop '
-				+ ' -host ' + HDFSHost
-				+ ' -port ' + HDSFPort
-				+ ' -filename ' + HadoopFileName
-				+ ' -nodeid ' + STD.system.Thorlib.node()
-				+ ' -clustercount ' + STD.system.Thorlib.nodes()
-				+ ' -hdfsuser ' + HDFSUser));
-		#END
-	ENDMACRO;
-
-    /*
-    HDFSConnector.PipeOutAndMerge - writes the given recordset 'ECL_RS' to the target HDFS system
-                                                             in file parts and merges them together to form a single target file
-                                                             on the HDFS system.
-
-    ECL_RS          - The ECL recordset to pipe out.
-    HadoopFileName  - The fully qualified target HDFS file name.
-    Layout          - The structure which describes the ECL_RS recordset
-    HadoopFileFormat- The Hadoop data file format : FLAT | CSV
-    HDFSHost        - The Hadoop DFS host name or IP address.
-    Port            - The Hadoop DFS port number.
-    HDFSUser        - HDFS username to use to login to HDFS in order to write the file
-                        must have permission to write to the target HDFS location.
-
-    Example:
-
-    HDFSConnector.PipeOut(sue, '/user/hadoop/HDFSAccounts', Layout_CSV_Accounts, CSV, '192.168.56.102', '54310', 'hadoop');
-    HDFSConnector.PipeOut(sue, '/user/hadoop/HDFSPersons', Layout_Flat_Persons, FLAT, '192.168.56.102', '54310', 'hadoop');
-    */
-
-	export PipeOutAndMerge(ECL_RS, HadoopFileName, Layout, HadoopFileFormat, HDFSHost, HDSFPort, HDFSUser) := MACRO
-	#uniquename(formatstr)
-	#uniquename(outpartaction)
-	#uniquename(mergepartsaction)
-		%formatstr% := STD.Str.FilterOut(#TEXT(HadoopFileFormat), ' \t\n\r');
-		#IF(%formatstr%[1..4] != 'FLAT')
-		//%mergepartsaction% :=DISTRIBUTE(ECL_RS , 1);
-		%outpartaction%:=OUTPUT(ECL_RS,,
-				PIPE('hdfspipe -sop '
-				+ ' -host ' + HDFSHost
-				+ ' -port ' + HDSFPort
-				+ ' -filename ' + HadoopFileName
-				+ ' -nodeid ' + STD.system.Thorlib.node()
-				+ ' -clustercount ' + STD.system.Thorlib.nodes()
-				+ ' -hdfsuser ' + HDFSUser, HadoopFileFormat));
-
-				%mergepartsaction%:=OUTPUT(PIPE('hdfspipe -mf'
-				 + ' -nodeid ' + STD.system.Thorlib.node()
-				 + ' -clustercount ' + STD.system.Thorlib.nodes()
-				 + ' -filename ' + HadoopFileName
-				 + ' -cleanmerge  1'
-				 + ' -hdfsuser ' + HDFSUser
-				 + ' -host ' + HDFSHost 	+ ' -port ' + HDSFPort, Layout));
-				 SEQUENTIAL(%outpartaction%, %mergepartsaction%);
-		#ELSE
-		%outpartaction%:=OUTPUT(ECL_RS,,
-				PIPE('hdfspipe -sop '
-				+ ' -host ' + HDFSHost
-				+ ' -port ' + HDSFPort
-				+ ' -filename ' + HadoopFileName
-				+ ' -nodeid ' + STD.system.Thorlib.node()
-				+ ' -clustercount ' + STD.system.Thorlib.nodes()
-				+ ' -hdfsuser ' + HDFSUser));
-
-				%mergepartsaction%:=OUTPUT(PIPE('hdfspipe -mf'
-				 + ' -nodeid ' + STD.system.Thorlib.node()
-				 + ' -clustercount ' + STD.system.Thorlib.nodes()
-				 + ' -filename ' + HadoopFileName
-				 + ' -cleanmerge  1'
-				 + ' -hdfsuser ' + HDFSUser
-				 + ' -host ' + HDFSHost 	+ ' -port ' + HDSFPort, Layout));
-				 SEQUENTIAL(%outpartaction%, %mergepartsaction%);
-		#END
-	ENDMACRO;
-
-END;

+ 0 - 2
plugins/dataconnectors/hdfsconnector/hdfsconnector.conf.in

@@ -1,2 +0,0 @@
-HADOOP_LOCATION=${HADOOP_PATH}
-LOGS_LOCATION=$log

File diff suppressed because it is too large
+ 0 - 1171
plugins/dataconnectors/hdfsconnector/hdfsconnector.cpp


+ 0 - 112
plugins/dataconnectors/hdfsconnector/hdfspipe.in

@@ -1,112 +0,0 @@
-#!/bin/bash
-
-source @HPCC_ETC_DIR@/init.d/hpcc_common
-set_environmentvars
-
-source @HPCC_CONF_DIR@/@HDFSCONN_CONF_FILE@ 
-
-CLASSPATH=$CLASSPATH:$HADOOP_LOCATION/conf
-
-for f in $HADOOP_LOCATION/*.jar ; do
-	CLASSPATH=$CLASSPATH:$f
-done
-
-for f in $HADOOP_LOCATION/lib/*.jar ; do
-	CLASSPATH=$CLASSPATH:$f
-done
-
-export CLASSPATH=$CLASSPATH
-
-PID=$$
-
-idfound=0;
-nodeid=0;
-
-for p in $*;
- do
-   if [ "$idfound" = "1" ];
-   then
-        nodeid=$p;
-        idfound=0;
-   elif [ "$p" = "-nodeid" ];
-   then
-	idfound=1;
-   fi
-done;
-
-#the log variable is read from the HPCC Platform config
-LOGS_LOCATION=$log
-HDFSCONNLOGLOC=$LOGS_LOCATION/mydataconnectors
-LOG=$HDFSCONNLOGLOC/HDFSCONNECTOR.$nodeid.$PID.log
-
-if [ -e $HDFSCONNLOGLOC ]
-  then
-    echo "log file found"	>> $LOG
-  else
-    mkdir $HDFSCONNLOGLOC
-fi
-
-echo "Script starting"		>> $LOG
-echo "Running as user: $USER"   >> $LOG
-echo "Running mode: $run_mode"  >> $LOG
-echo "Incoming params: ${@}"    >> $LOG
-echo "nodeid: $nodeid" 		>> $LOG
-
-if [ "$1" = "" ];
-then
-	echo "Running without input params" >> $LOG
-	exit 1;
-elif [ $1 = "-mf" ];
-then
-	/opt/HPCCSystems/bin/hdfsconnector "${@}" 2>> $LOG;
-elif [ $1 = "-si" ];
-then
-	/opt/HPCCSystems/bin/hdfsconnector  "${@}" 2>> $LOG;
-elif [ $1 = "-so" ];
-then
-
-	HPCCTMPFILE=/tmp/HPCCTMPFILE_$nodeid;
-	if [ -f $HPCCTMPFILE ]
-	then
-		rm "$HPCCTMPFILE" 			2>> $LOG
-		echo "deleted tmp file" 		>> $LOG
-	else
-		echo "$HPCCTMPFILE does not exist" 	>> $LOG
-	fi
-
-	echo "redirecting stdin to tmp file ... " 	>> $LOG
-	cat /dev/stdin > "$HPCCTMPFILE"
-
-	ls -l "$HPCCTMPFILE" 				>> $LOG
-
-	echo "calling hdfsconnector..." 		>> $LOG
-
-	/opt/HPCCSystems/bin/hdfsconnector "${@}" -pipepath $HPCCTMPFILE  	2>> $LOG
-
-	echo "write exited with: $?" 			>> $LOG
-elif [ $1 = "-sop" ];
-then
-	pipepath=/tmp/HPCCPIPE_$nodeid;
-	mkfifo $pipepath 2> /tmp/HPCC-FIFO.err.$PID;
-	chmod 666 $pipepath 2> /tmp/HPCC-FIFO.err.$PID;
-
-	echo "mkfifo $pipepath setup ..." 		>> $LOG
-
-	if test ! -s "/tmp/HPCC-FIFO.err.$PID"
-	then
-		rm -f /tmp/HPCC-FIFO.err.$PID 2> /dev/null
-	else
-		echo "  WARNING (hdfsconnector mkfifo) error registered in file: /tmp/HPCC-FIFO.err.$PID " >> $LOG
-	fi
-	/opt/HPCCSystems/bin/hdfsconnector  "${@}" -pipepath $pipepath	2>> $LOG &
-	echo "redirecting stdin to named pipe ... " 	>> $LOG
-	cat < /dev/stdin > "$pipepath"			2>> $LOG
-
-	echo "write exited with: $?" 			>> $LOG
-
-else
-	echo "Error: check your params." 		>> $LOG;
-	exit 1;
-fi
-
-exit 0;

+ 0 - 11
plugins/dbconnectors/hpccjdbc/src/com/hpccsystems/jdbcdriver/HPCCVersionTracker.java

@@ -1,11 +0,0 @@
-package com.hpccsystems.jdbcdriver;
-
-public class HPCCVersionTracker
-{
-	static final String HPCCProject 	= "PLACEHOLDER";
-	static final int HPCCMajor 			= 0;
-	static final int HPCCMinor 			= 0;
-	static final int HPCCPoint 			= 0;
-	static final String HPCCPMaturity 	= "PLACEHOLDER";
-	static final int HPCCSequence 		= 0;
-}