ソースを参照

Merge pull request #5252 from ghalliday/issue10412

HPCC-10412 Fix problems with #elseif and catch multiple #else

Reviewed-By: Jamie Noss <james.noss@lexisnexis.com>
Reviewed-By: Richard Chapman <rchapman@hpccsystems.com>
Richard Chapman 11 年 前
コミット
5b94949dea

+ 16 - 6
ecl/hql/hqlgram.hpp

@@ -998,6 +998,12 @@ protected:
 typedef void* yyscan_t;
 #endif
 
+enum
+{
+    HEFhadtrue = 0x0001,
+    HEFhadelse = 0x0002,
+};
+
 class HqlLex
 {
     public:
@@ -1097,9 +1103,10 @@ class HqlLex
         void reportError(const YYSTYPE & returnToken, int errNo, const char *format, ...) __attribute__((format(printf, 4, 5)));
         void reportWarning(const YYSTYPE & returnToken, int warnNo, const char *format, ...) __attribute__((format(printf, 4, 5)));
 
-        void beginNestedHash(unsigned kind) { hashendKinds.append(kind); hashendDepths.append(1); }
-        unsigned endNestedHash() { hashendKinds.pop(); return hashendDepths.pop(); }
-        void clearNestedHash() { hashendKinds.kill(); hashendDepths.kill(); }
+        void beginNestedHash(unsigned kind) { hashendKinds.append(kind); hashendFlags.append(0); }
+        void endNestedHash() { hashendKinds.pop(); hashendFlags.pop(); }
+        void clearNestedHash() { hashendKinds.kill(); hashendFlags.kill(); }
+        void setHashEndFlags(unsigned i) { if (hashendFlags.ordinality()) { hashendFlags.pop(); hashendFlags.append(i); } }
 
         inline bool parserExpecting(int tok, const short * activeState)
         {
@@ -1131,6 +1138,7 @@ class HqlLex
         void doPreprocessorLookup(const YYSTYPE & errpos, bool stringify, int extra);
         void doApply(YYSTYPE & returnToken);
         int doElse(YYSTYPE & returnToken, bool lookup, const short * activeState, bool isElseIf);
+        int doEnd(YYSTYPE & returnToken, bool lookup, const short * activeState);
         void doExpand(YYSTYPE & returnToken);
         void doTrace(YYSTYPE & returnToken);
         void doError(YYSTYPE & returnToken, bool isError);
@@ -1138,7 +1146,7 @@ class HqlLex
         void doFor(YYSTYPE & returnToken, bool doAll);
         int doHashText(YYSTYPE & returnToken);
         void doLoop(YYSTYPE & returnToken);
-        void doIf(YYSTYPE & returnToken);
+        void doIf(YYSTYPE & returnToken, bool isElseIf);
         void doSet(YYSTYPE & returnToken, bool _append);
         void doLine(YYSTYPE & returnToken);
         void doDeclare(YYSTYPE & returnToken);
@@ -1149,6 +1157,8 @@ class HqlLex
         void doInModule(YYSTYPE & returnToken);
         void doMangle(YYSTYPE & returnToken, bool de);
         void doUniqueName(YYSTYPE & returnToken);
+        void doSkipUntilEnd(YYSTYPE & returnToken, const char * forwhat);
+
         void processEncrypted();
 
         void declareUniqueName(const char* name, const char * pattern);
@@ -1179,8 +1189,8 @@ private:
         enum { HashStmtNone, HashStmtFor, HashStmtForAll, HashStmtLoop, HashStmtIf };
         int lastToken;
         int macroGathering;
-        int skipping;
-        UnsignedArray hashendDepths;
+        int skipNesting;
+        UnsignedArray hashendFlags;
         UnsignedArray hashendKinds;
         bool hasHashbreak;
         int loopTimes;

+ 50 - 61
ecl/hql/hqllex.l

@@ -87,7 +87,7 @@ int HqlLex::lookupIdentifierToken(YYSTYPE & returnToken, HqlLex * lexer, bool lo
     IHqlExpression *expr; 
     //fprintf(stderr, "\nLookupSymbol %s\n",name->str());
     //Lookup expr last otherwise it leaks.
-    if ( !lookup  || lexer->macroGathering || lexer->skipping|| !(expr=lexer->lookupSymbol(cname,returnToken)))
+    if ( !lookup  || lexer->macroGathering || lexer->skipNesting|| !(expr=lexer->lookupSymbol(cname,returnToken)))
     {
         TraceReturnToken(UNKNOWN_ID);
         returnToken.setId(cname);
@@ -299,14 +299,14 @@ xpathseq      ([^}\r\n])+
 
 #ERROR              {
                         setupdatepos; 
-                        if (lexer->macroGathering || lexer->skipping)
+                        if (lexer->macroGathering || lexer->skipNesting)
                             return SKIPPED;
                         lexer->doError(returnToken, true); 
                         return lexer->yyLex(returnToken, lookup, activeState);
                     }
 #WARNING            {
                         setupdatepos; 
-                        if (lexer->macroGathering || lexer->skipping)
+                        if (lexer->macroGathering || lexer->skipNesting)
                             return SKIPPED;
                         lexer->doError(returnToken, false); 
                         return lexer->yyLex(returnToken, lookup, activeState); 
@@ -316,9 +316,9 @@ xpathseq      ([^}\r\n])+
                         if (lexer->macroGathering)
                             return SKIPPED;
                         lexer->beginNestedHash(HashStmtFor);
-                        if (lexer->skipping)
+                        if (lexer->skipNesting)
                         {
-                            lexer->skipping++;
+                            lexer->skipNesting++;
                             return SKIPPED;
                         }
                         lexer->doFor(returnToken, false); 
@@ -329,9 +329,9 @@ xpathseq      ([^}\r\n])+
                         if (lexer->macroGathering)
                             return SKIPPED;
                         lexer->beginNestedHash(HashStmtForAll);
-                        if (lexer->skipping)
+                        if (lexer->skipNesting)
                         {
-                            lexer->skipping++;
+                            lexer->skipNesting++;
                             return SKIPPED;
                         }
                         lexer->doFor(returnToken, true); 
@@ -342,9 +342,9 @@ xpathseq      ([^}\r\n])+
                         if (lexer->macroGathering)
                             return SKIPPED;
                         lexer->beginNestedHash(HashStmtLoop);
-                        if (lexer->skipping)
+                        if (lexer->skipNesting)
                         {
-                            lexer->skipping++;
+                            lexer->skipNesting++;
                             return SKIPPED;
                         }
                         lexer->doLoop(returnToken); 
@@ -355,7 +355,7 @@ xpathseq      ([^}\r\n])+
                         if (lexer->macroGathering)
                             return SKIPPED;
                         lexer->hasHashbreak = true;
-                        if (lexer->skipping)
+                        if (lexer->skipNesting)
                             return SKIPPED;
                         return HASHBREAK; 
                     }
@@ -364,130 +364,130 @@ xpathseq      ([^}\r\n])+
                         if (lexer->macroGathering)
                             return SKIPPED;
                         lexer->beginNestedHash(HashStmtIf);
-                        if (lexer->skipping)
+                        if (lexer->skipNesting)
                         {
-                            lexer->skipping++;
+                            lexer->skipNesting++;
                             return SKIPPED;
                         }
-                        lexer->doIf(returnToken);
+                        lexer->doIf(returnToken, false);
                         return lexer->yyLex(returnToken, lookup, activeState); 
                     }
 #EXPAND             {
                         setupdatepos; 
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED; 
                         lexer->doExpand(returnToken); 
                         return lexer->yyLex(returnToken, lookup, activeState); 
                     }
 #DECLARE            { 
                         setupdatepos; 
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED; 
                         lexer->doDeclare(returnToken); 
                         return lexer->yyLex(returnToken, lookup, activeState); 
                     }
 #SET                { 
                         setupdatepos; 
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED; 
                         lexer->doSet(returnToken, false); 
                         return lexer->yyLex(returnToken, lookup, activeState); 
                     }
 #TRACE              { 
                         setupdatepos; 
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED; 
                         lexer->doTrace(returnToken); 
                         return lexer->yyLex(returnToken, lookup, activeState); 
                     }
 #EXPORT             { 
                         setupdatepos; 
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED; 
                         lexer->doExport(returnToken, false); 
                         return lexer->yyLex(returnToken, lookup, activeState); 
                     }
 #EXPORTXML              { 
                         setupdatepos; 
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED; 
                         lexer->doExport(returnToken, true); 
                         return lexer->yyLex(returnToken, lookup, activeState); 
                     }
 #MANGLE             { 
                         setupdatepos; 
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED; 
                         lexer->doMangle(returnToken, false); 
                         return lexer->yyLex(returnToken, lookup, activeState); 
                     }
 #DEMANGLE           { 
                         setupdatepos; 
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED; 
                         lexer->doMangle(returnToken, true); 
                         return lexer->yyLex(returnToken, lookup, activeState); 
                     }
 #APPLY              { 
                         setupdatepos; 
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED; 
                         lexer->doApply(returnToken); 
                         return lexer->yyLex(returnToken, lookup, activeState); 
                     }
 #APPEND             {
                         setupdatepos; 
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED; 
                         lexer->doSet(returnToken, true); 
                         return lexer->yyLex(returnToken, lookup, activeState); 
                     }
 #CONSTANT           { 
                         setupdatepos;
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED; 
                         return(HASH_CONSTANT);
                     }
 #IFDEFINED          {
                         setupdatepos; 
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED; 
                         lexer->doDefined(returnToken);
                         return lexer->yyLex(returnToken, lookup, activeState); 
                     }
 #ELSE               {
                         setupdatepos;
-                        if (lexer->macroGathering) 
+                        if (lexer->macroGathering)
                             return SKIPPED;
                         return lexer->doElse(returnToken, lookup, activeState, false);
                     }
 #ELSEIF             {
                         setupdatepos;
-                        if (lexer->macroGathering) 
+                        if (lexer->macroGathering)
                             return SKIPPED;
                         return lexer->doElse(returnToken, lookup, activeState, true);
                     }
 #ELSIF              {
                         setupdatepos;
-                        if (lexer->macroGathering) 
+                        if (lexer->macroGathering)
                             return SKIPPED;
                         return lexer->doElse(returnToken, lookup, activeState, true);
                     }
 #ELIF               {
                         setupdatepos;
-                        if (lexer->macroGathering) 
+                        if (lexer->macroGathering)
                             return SKIPPED;
                         return lexer->doElse(returnToken, lookup, activeState, true);
                     }
 #DEBUG              { 
                         setupdatepos;
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED; 
                         return(HASH_OPTION); 
                     }
 #GETDATATYPE        {
                         setupdatepos; 
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED; 
                         lexer->doGetDataType(returnToken); 
                         return lexer->yyLex(returnToken, lookup, activeState); 
@@ -495,21 +495,21 @@ xpathseq      ([^}\r\n])+
 
 #INMODULE           {
                         setupdatepos; 
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED; 
                         lexer->doInModule(returnToken); 
                         return lexer->yyLex(returnToken, lookup, activeState); 
                     }
 #ISVALID            {
                         setupdatepos; 
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED; 
                         lexer->doIsValid(returnToken); 
                         return lexer->yyLex(returnToken, lookup, activeState); 
                     }
 #ISDEFINED          {
                         setupdatepos; 
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED; 
                         bool defined = lexer->doIsDefined(returnToken);
 //                      RETURNHARD(defined ? TOK_TRUE : TOK_FALSE);
@@ -523,39 +523,39 @@ xpathseq      ([^}\r\n])+
                     }
 #LINK               {
                         setupdatepos;
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED; 
                         return(HASH_LINK); 
                     }
 #ONWARNING          { 
                         setupdatepos;
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED; 
                         return(HASH_ONWARNING); 
                     }
 #OPTION             { 
                         setupdatepos;
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED; 
                         return(HASH_OPTION); 
                     }
 #STORED             { 
                         setupdatepos;
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED; 
                         return(HASH_STORED); 
                     }
 #TEXT               { 
                         setupdatepos; 
-                        if (lexer->skipping || lexer->macroGathering || !lookup) 
+                        if (lexer->skipNesting || lexer->macroGathering || !lookup)
                             return SKIPPED; 
                         return lexer->doHashText(returnToken); 
                     }
 #UNIQUENAME         {
                         setupdatepos; 
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                         {   
-                            //PrintLog("lexer->skipping #UNIQUENAME");
+                            //PrintLog("lexer->skipNesting #UNIQUENAME");
                             return SKIPPED; 
                         }
                         lexer->doUniqueName(returnToken); 
@@ -563,7 +563,7 @@ xpathseq      ([^}\r\n])+
                     }
 #WORKUNIT           { 
                         setupdatepos;
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED; 
                         return(HASH_WORKUNIT); 
                     }
@@ -583,26 +583,15 @@ __LINE__            {
 #END                { 
 //Place #END last so the specialised versions are hit first
                         setupdatepos; 
-                        if (lexer->macroGathering) 
-                            return SKIPPED; 
-                        unsigned depth = 1;
-                        if (lexer->hashendDepths.ordinality() == 0)
-                            lexer->reportError(returnToken, ERR_TMPLT_EXTRAEND,"#END doesn't match a # command");
-                        else
-                            depth = lexer->endNestedHash();
-                        if (lexer->skipping)
-                        {
-                            lexer->skipping -= depth;
-                            return(HASHEND);  
-                        }
-                        
-                        return lexer->yyLex(returnToken, lookup, activeState); 
+                        if (lexer->macroGathering)
+                            return SKIPPED;
+                        return lexer->doEnd(returnToken, lookup, activeState);
                     }
 #{letter}{alphanum}* {
 //Trap any unknown #commands
                         /* otherwise, unknown # command */
                         setupdatepos; 
-                        if (lexer->skipping) 
+                        if (lexer->skipNesting)
                             return SKIPPED; 
                         StringBuffer msg("Unknown # command: ");
                         msg.append(CUR_TOKEN_TEXT);
@@ -1009,7 +998,7 @@ __STAND_ALONE__     {
                         
 {percent}{alphanumcolon}*{percent} {
                         setupdatepos;
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED;
                         lexer->doPreprocessorLookup(returnToken, false, 0);
                         return lexer->yyLex(returnToken, lookup, activeState); 
@@ -1017,14 +1006,14 @@ __STAND_ALONE__     {
 
 {percent}'{alphanumcolon}*'{percent} {
                         setupdatepos;
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED;
                         lexer->doPreprocessorLookup(returnToken, true, 1);
                         return lexer->yyLex(returnToken, lookup, activeState); 
                     };
 {percent}\{{xpathseq}\}{percent} {
                         setupdatepos;
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED;
                         lexer->doPreprocessorLookup(returnToken, false, 1);
                         return lexer->yyLex(returnToken, lookup, activeState); 
@@ -1032,7 +1021,7 @@ __STAND_ALONE__     {
 
 {percent}'\{{xpathseq}\}'{percent} {
                         setupdatepos;
-                        if (lexer->skipping || lexer->macroGathering) 
+                        if (lexer->skipNesting || lexer->macroGathering)
                             return SKIPPED;
                         lexer->doPreprocessorLookup(returnToken, true, 2);
                         return lexer->yyLex(returnToken, lookup, activeState); 

+ 72 - 46
ecl/hql/hqlparse.cpp

@@ -172,7 +172,7 @@ void HqlLex::init(IFileContents * _text)
     hasHashbreak = false;
     encrypted = false;
     loopTimes = 0;
-    skipping = 0;
+    skipNesting = 0;
     macroGathering = 0;
     forLoop = NULL;
 
@@ -666,7 +666,24 @@ bool HqlLex::getParameter(StringBuffer &curParam, const char* for_what, int* sta
     }
 }
 
-void HqlLex::doIf(YYSTYPE & returnToken)
+void HqlLex::doSkipUntilEnd(YYSTYPE & returnToken, const char * forwhat)
+{
+    while (skipNesting)
+    {
+        int tok = yyLex(returnToken, false,0);
+        returnToken.release();
+        if (tok == EOF)
+        {
+            StringBuffer msg;
+            msg.appendf("Unexpected EOF in %s: #END expected",forwhat);
+            reportError(returnToken, ERR_TMPLT_HASHENDEXPECTED, "%s", msg.str());
+            clearNestedHash();      // prevent unnecessary more error messages
+            break;
+        }
+    }
+}
+
+void HqlLex::doIf(YYSTYPE & returnToken, bool isElseIf)
 {
     StringBuffer forwhat; 
     int line = returnToken.pos.lineno, col = returnToken.pos.column;
@@ -684,25 +701,16 @@ void HqlLex::doIf(YYSTYPE & returnToken)
             ;
     }
     curParam.append(')');
-    IValue *value = parseConstExpression(returnToken, curParam, queryTopXmlScope(),line,col);
+    Owned<IValue> value = parseConstExpression(returnToken, curParam, queryTopXmlScope(),line,col);
     if (value && !value->getBoolValue())
     {
-        skipping = 1;
-        while (skipping)
-        {
-            int tok = yyLex(returnToken, false,0);
-            returnToken.release();
-            if (tok == EOF)
-            {
-                StringBuffer msg;
-                msg.appendf("Unexpected EOF in %s: #END expected",forwhat.str());
-                reportError(returnToken, ERR_TMPLT_HASHENDEXPECTED, "%s", msg.str());
-                clearNestedHash();      // prevent unnecessary more error messages
-                break;
-            }
-        }
+        setHashEndFlags(0);
+        skipNesting = 1;
+        if (!isElseIf)
+            doSkipUntilEnd(returnToken, forwhat);
     }
-    ::Release(value);
+    else
+        setHashEndFlags(HEFhadtrue);
 }
 
 int HqlLex::doElse(YYSTYPE & returnToken, bool lookup, const short * activeState, bool isElseIf)
@@ -716,38 +724,56 @@ int HqlLex::doElse(YYSTYPE & returnToken, bool lookup, const short * activeState
         return SKIPPED;
     }
 
-    if (isElseIf)
-        hashendDepths.append(hashendDepths.pop()+1);
+    unsigned flags = hashendFlags.tos();
+    if (!isElseIf)
+    {
+        if (flags & HEFhadelse)
+            reportError(returnToken, ERR_TMPLT_EXTRAELSE,"Multiple #ELSE for the same #IF");
+        setHashEndFlags(flags|HEFhadelse);
+    }
 
-    switch (skipping)
+    switch (skipNesting)
     {
     case 0:
-        skipping = hashendDepths.tos();
-        while (skipping)
-        {
-            int tok = yyLex(returnToken, lookup, activeState);
-            returnToken.release();
-            if (tok == EOF)
-            {
-                forwhat.insert(0,"Unexpected EOF in ").append(": #END expected");
-                reportError(returnToken, ERR_TMPLT_HASHENDEXPECTED, "%s", forwhat.str());
-                clearNestedHash();      // prevent unnecessary more error messages
-                return tok;
-            }
-        }
+        skipNesting = 1;
+        doSkipUntilEnd(returnToken, forwhat);
         return yyLex(returnToken, lookup, activeState);
     case 1:
-        skipping = 0;
-        if (isElseIf)
-            doIf(returnToken);
+        if (flags & HEFhadtrue)
+        {
+            //Don't need to do anything
+        }
+        else
+        {
+            skipNesting = 0;
+            if (isElseIf)
+                doIf(returnToken, true);
+            else
+                setHashEndFlags(HEFhadtrue|HEFhadelse);
+        }
         return SKIPPED;     // looks wrong, but called within a doIf() loop, and first return is ignored
     default:
-        if (isElseIf)
-            skipping++;
         return SKIPPED;
     }
 }
 
+int HqlLex::doEnd(YYSTYPE & returnToken, bool lookup, const short * activeState)
+{
+    if (hashendKinds.ordinality() != 0)
+    {
+        endNestedHash();
+        if (skipNesting)
+        {
+            skipNesting -= 1;
+            return(HASHEND);
+        }
+    }
+    else
+        reportError(returnToken, ERR_TMPLT_EXTRAEND,"#END doesn't match a # command");
+
+    return yyLex(returnToken, lookup, activeState);
+}
+
 void HqlLex::doDeclare(YYSTYPE & returnToken)
 {
     StringBuffer forwhat;
@@ -1098,7 +1124,7 @@ void HqlLex::doFor(YYSTYPE & returnToken, bool doAll)
 
     StringBuffer forFilterText;
     // Note - we gather the for filter and body in skip mode (deferring evaluation of #if etc) since the context will be different each time...
-    skipping = 1;
+    skipNesting = 1;
     int tok = yyLex(returnToken, false,0);
     if (tok == '(')
     {
@@ -1134,7 +1160,7 @@ void HqlLex::doFor(YYSTYPE & returnToken, bool doAll)
             clearNestedHash();      // prevent unnecessary more error messages
             return;
         }
-        if (tok == HASHEND && !skipping)
+        if (tok == HASHEND && !skipNesting)
             break;
         forBodyText.append(' ');
         getTokenText(forBodyText);
@@ -1162,7 +1188,7 @@ void HqlLex::doLoop(YYSTYPE & returnToken)
     // Now gather the tokens we are going to repeat...
     StringBuffer forBodyText;
     // Note - we gather the for filter and body in skip mode (deferring evaluation of #if etc) since the context will be different each time...
-    skipping = 1;
+    skipNesting = 1;
     hasHashbreak = false;
     for (;;)
     {
@@ -1179,7 +1205,7 @@ void HqlLex::doLoop(YYSTYPE & returnToken)
             clearNestedHash();      // prevent unnecessary more error messages
             return;
         }
-        if (tok == HASHEND && !skipping)
+        if (tok == HASHEND && !skipNesting)
             break;
         forBodyText.append(' ');
         getTokenText(forBodyText);
@@ -2148,12 +2174,12 @@ int HqlLex::yyLex(YYSTYPE & returnToken, bool lookup, const short * activeState)
                 reportError(returnToken, ERR_COMMENT_UNENDED,"Comment is not terminated");
             else if (inCpp)
                 reportError(returnToken, ERR_COMMENT_UNENDED,"BEGINC++ or EMBED is not terminated");
-            if (hashendDepths.ordinality())
+            if (hashendKinds.ordinality())
             {
                 StringBuffer msg("Unexpected EOF: ");
-                msg.append(hashendDepths.ordinality()).append(" more #END needed");
+                msg.append(hashendKinds.ordinality()).append(" more #END needed");
                 reportError(returnToken, ERR_TMPLT_HASHENDEXPECTED, "%s", msg.str());
-                hashendDepths.kill(); // prevent unnecessary more error messages
+                clearNestedHash();
             }
         }
 

+ 9 - 0
ecl/regress/issue10412a.ecl

@@ -0,0 +1,9 @@
+T := 0
+#IF (FALSE)
+  + 1
+#ELSEIF (FALSE)
+  + 2
+#ELSE
+#END
+;
+OUTPUT(T);

+ 8 - 0
ecl/regress/issue10412b.ecl

@@ -0,0 +1,8 @@
+T := 0
+#IF (FALSE)
+  + 1
+#ELSEIF (FALSE)
+  + 2
+#END
+;
+OUTPUT(T);

+ 12 - 0
ecl/regress/issue10412c.ecl

@@ -0,0 +1,12 @@
+T := 0
+#IF (FALSE)
+  + 1
+#ELSEIF (FALSE)
+  + 2
+#ELSEIF (FALSE)
+  + 4
+#ELSEIF (FALSE)
+  + 8
+#END
+;
+OUTPUT(T);

+ 19 - 0
ecl/regress/issue10412d.ecl

@@ -0,0 +1,19 @@
+T := 0
+#IF (FALSE)
+  + 1
+#ELSEIF (FALSE)
+  + 2
+#ELSEIF (FALSE)
+  + 4
+#ELSEIF (FALSE)
+  + 8
+#IF (FALSE)
+  + 16
+#ELSEIF (FALSE)
+  + 32
+#ELSEIF (FALSE)
+  + 64
+#END
+#END
+;
+OUTPUT(T);

+ 20 - 0
ecl/regress/issue10412e.ecl

@@ -0,0 +1,20 @@
+T := 0
+#IF (FALSE)
+  + 1
+#ELSEIF (FALSE)
+  + 2
+#ELSEIF (FALSE)
+  + 4
+#ELSEIF (FALSE)
+  + 8
+#ELSE
+#IF (FALSE)
+  + 16
+#ELSEIF (FALSE)
+  + 32
+#ELSEIF (FALSE)
+  + 64
+#END
+#END
+;
+OUTPUT(T);

+ 12 - 0
ecl/regress/issue10412f.ecl

@@ -0,0 +1,12 @@
+T := 1
+#IF (FALSE)
+  + 1
+#ELSE
+  + 10
+#ELSE
+  + 100
+#ELSE
+  + 1000
+#END
+;
+OUTPUT(T);

+ 12 - 0
ecl/regress/issue10412g.ecl

@@ -0,0 +1,12 @@
+T := 0
+#IF (TRUE)
+  + 1
+#ELSEIF (TRUE)
+  + 2
+#ELSEIF (TRUE)
+  + 4
+#ELSEIF (TRUE)
+  + 8
+#END
+;
+OUTPUT(T);

+ 12 - 0
ecl/regress/issue10412h.ecl

@@ -0,0 +1,12 @@
+T := 0
+#IF (FALSE)
+  + 1
+#ELSEIF (TRUE)
+  + 2
+#ELSEIF (TRUE)
+  + 4
+#ELSEIF (TRUE)
+  + 8
+#END
+;
+OUTPUT(T);