From 179bdfd91d10bb8a0c56307a7675a3a97f8a324c Mon Sep 17 00:00:00 2001 From: Karim Taha Date: Fri, 28 Jun 2024 22:51:57 +0300 Subject: [PATCH] Introduce catchTok and modify several definitions --- .../src/Happy/Backend/GLR/ProduceCode.lhs | 14 +- packages/backend-lalr/data/HappyTemplate.hs | 216 +++- .../backend-lalr/src/Happy/Backend/LALR.hs | 4 +- .../src/Happy/Backend/LALR/ProduceCode.lhs | 458 +++---- .../src/Happy/CodeGen/Common/Options.lhs | 23 +- packages/frontend/boot-src/Parser.ly | 10 +- packages/frontend/src/Happy/Frontend.hs | 7 + .../frontend/src/Happy/Frontend/AbsSyn.lhs | 35 +- .../src/Happy/Frontend/AttrGrammar/Parser.hs | 792 ++++++------ .../frontend/src/Happy/Frontend/Lexer.lhs | 55 +- .../frontend/src/Happy/Frontend/Mangler.lhs | 57 +- .../frontend/src/Happy/Frontend/Parser.hs | 1076 +++++++++-------- packages/grammar/src/Happy/Grammar.lhs | 21 +- packages/tabular/src/Happy/Tabular.lhs | 2 +- packages/tabular/src/Happy/Tabular/First.lhs | 3 +- packages/tabular/src/Happy/Tabular/LALR.lhs | 20 +- tests/issue265.y | 80 ++ tests/monaderror-explist.y | 8 +- tests/monaderror-resume.y | 116 ++ 19 files changed, 1761 insertions(+), 1236 deletions(-) create mode 100644 tests/issue265.y create mode 100644 tests/monaderror-resume.y diff --git a/packages/backend-glr/src/Happy/Backend/GLR/ProduceCode.lhs b/packages/backend-glr/src/Happy/Backend/GLR/ProduceCode.lhs index 7d56d51b..1d43ebaf 100644 --- a/packages/backend-glr/src/Happy/Backend/GLR/ProduceCode.lhs +++ b/packages/backend-glr/src/Happy/Backend/GLR/ProduceCode.lhs @@ -94,7 +94,7 @@ the driver and data strs (large template). > -> CommonOptions -- Happy.CodeGen.Common.Options > -> (String -- data > ,String) -- parser -> + > produceGLRParser (base, lib) basename tables start header trailer (debug,options) g common_options > = ( content base $ "" > , lib_content lib @@ -103,7 +103,7 @@ the driver and data strs (large template). > (imps, lang_exts) = case ghcExts_opt of > UseGhcExts is os -> (is, os) > _ -> ("", []) -> + > defines = concat > [ [ "HAPPY_DEBUG" | debug ] > , [ "HAPPY_GHC" | UseGhcExts _ _ <- return ghcExts_opt ] @@ -251,7 +251,7 @@ Formats the tables as code. > -> GhcExts -- Use unboxed values? > -> Grammar -- Happy Grammar > -> ShowS -> + > mkTbls (action,goto) sem_info exts g > = let gsMap = mkGSymMap g > semfn_map = mk_semfn_map sem_info @@ -303,10 +303,12 @@ It also shares identical reduction values as CAFs > errorLine = name ++ " _ _ = Error" > mkState (i,arr) > = filter (/="") $ map (mkLine i) (assocs arr) -> + > mkLine state (symInt,action) > | symInt == errorTok -- skip error productions > = "" -- NB see ProduceCode's handling of these +> | symInt == catchTok -- skip error productions +> = "" -- NB see ProduceCode's handling of these > | otherwise > = case action of > LR'Fail -> "" @@ -356,10 +358,10 @@ Do the same with the Happy goto table. > name = "goto" > errorLine = "goto _ _ = " ++ show_st exts (negate 1) > mkLines = map mkState (assocs goTbl) -> + > mkState (i,arr) > = unlines $ filter (/="") $ map (mkLine i) (assocs arr) -> + > mkLine state (ntInt,goto) > = case goto of > NoGoto -> "" diff --git a/packages/backend-lalr/data/HappyTemplate.hs b/packages/backend-lalr/data/HappyTemplate.hs index 9e2bdefd..5338aff2 100644 --- a/packages/backend-lalr/data/HappyTemplate.hs +++ b/packages/backend-lalr/data/HappyTemplate.hs @@ -23,6 +23,7 @@ type Happy_Int = Happy_GHC_Exts.Int# data Happy_IntList = HappyCons Happy_Int Happy_IntList #define ERROR_TOK 0# +#define CATCH_TOK 1# #if defined(HAPPY_COERCE) # define GET_ERROR_TOKEN(x) (case Happy_GHC_Exts.unsafeCoerce# x of { (Happy_GHC_Exts.I# i) -> i }) @@ -71,7 +72,7 @@ happyDoAction i tk st = ",\taction: ") case happyDecodeAction (happyNextAction i st) of HappyFail -> DEBUG_TRACE("failing.\n") - happyFail (happyExpListPerState (Happy_GHC_Exts.I# st)) i tk st + happyFail i tk st HappyAccept -> DEBUG_TRACE("accept.\n") happyAccept i tk st HappyReduce rule -> DEBUG_TRACE("reduce (rule " ++ show (Happy_GHC_Exts.I# rule) ++ ")") @@ -98,6 +99,7 @@ data HappyAction | HappyAccept | HappyReduce Happy_Int -- rule number | HappyShift Happy_Int -- new state + deriving Show {-# INLINE happyDecodeAction #-} happyDecodeAction :: Happy_Int -> HappyAction @@ -132,6 +134,13 @@ happyIndexOffAddr (HappyA# arr) off = (Happy_GHC_Exts.indexInt32OffAddr# arr off) #endif +indexRuleArr arr r = (Happy_GHC_Exts.I# nt, Happy_GHC_Exts.I# len) + where + (Happy_GHC_Exts.I# n_starts) = happy_n_starts + offs = TIMES(MINUS(r,n_starts),2#) + nt = happyIndexOffAddr arr offs + len = happyIndexOffAddr arr PLUS(offs,1#) + {-# INLINE happyLt #-} happyLt x y = LT(x,y) @@ -146,51 +155,44 @@ data HappyAddr = HappyA# Happy_GHC_Exts.Addr# happyShift new_state ERROR_TOK tk st sts stk@(x `HappyStk` _) = let i = GET_ERROR_TOKEN(x) in --- trace "shifting the error token" $ + DEBUG_TRACE("shifting the error token") happyDoAction i tk new_state (HappyCons st sts) stk +-- TODO: When `i` would enter error recovery again, we should instead +-- discard input until the lookahead is acceptable. Perhaps this is +-- simplest to implement in CodeGen for productions using `error`; +-- there we know the context and can implement local shift+discard actions. +-- still need to remember parser-defined error site, though. happyShift new_state i tk st sts stk = happyNewToken new_state (HappyCons st sts) (MK_TOKEN(tk) `HappyStk` stk) -- happyReduce is specialised for the common cases. -happySpecReduce_0 i fn ERROR_TOK tk st sts stk - = happyFail [] ERROR_TOK tk st sts stk happySpecReduce_0 nt fn j tk st sts stk - = happyGoto nt j tk st (HappyCons st sts) (fn `HappyStk` stk) + = happySeq fn (happyGoto nt j tk st (HappyCons st sts) (fn `HappyStk` stk)) -happySpecReduce_1 i fn ERROR_TOK tk st sts stk - = happyFail [] ERROR_TOK tk st sts stk -happySpecReduce_1 nt fn j tk _ sts@(HappyCons st _) (v1 `HappyStk` stk') +happySpecReduce_1 nt fn j tk old_st sts@(HappyCons st _) (v1 `HappyStk` stk') = let r = fn v1 in - happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')) + happyTcHack old_st (happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk'))) -happySpecReduce_2 i fn ERROR_TOK tk st sts stk - = happyFail [] ERROR_TOK tk st sts stk -happySpecReduce_2 nt fn j tk _ +happySpecReduce_2 nt fn j tk old_st (HappyCons _ sts@(HappyCons st _)) (v1 `HappyStk` v2 `HappyStk` stk') = let r = fn v1 v2 in - happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')) + happyTcHack old_st (happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk'))) -happySpecReduce_3 i fn ERROR_TOK tk st sts stk - = happyFail [] ERROR_TOK tk st sts stk -happySpecReduce_3 nt fn j tk _ +happySpecReduce_3 nt fn j tk old_st (HappyCons _ (HappyCons _ sts@(HappyCons st _))) (v1 `HappyStk` v2 `HappyStk` v3 `HappyStk` stk') = let r = fn v1 v2 v3 in - happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')) + happyTcHack old_st (happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk'))) -happyReduce k i fn ERROR_TOK tk st sts stk - = happyFail [] ERROR_TOK tk st sts stk happyReduce k nt fn j tk st sts stk - = case happyDrop MINUS(k,(1# :: Happy_Int)) sts of + = case happyDrop k (HappyCons st sts) of sts1@(HappyCons st1 _) -> let r = fn stk in -- it doesn't hurt to always seq here... happyDoSeq r (happyGoto nt j tk st1 sts1 r) -happyMonadReduce k nt fn ERROR_TOK tk st sts stk - = happyFail [] ERROR_TOK tk st sts stk happyMonadReduce k nt fn j tk st sts stk = case happyDrop k (HappyCons st sts) of sts1@(HappyCons st1 _) -> @@ -198,15 +200,11 @@ happyMonadReduce k nt fn j tk st sts stk = happyThen1 (fn stk tk) (\r -> happyGoto nt j tk st1 sts1 (r `HappyStk` drop_stk)) -happyMonad2Reduce k nt fn ERROR_TOK tk st sts stk - = happyFail [] ERROR_TOK tk st sts stk happyMonad2Reduce k nt fn j tk st sts stk = - case happyDrop k (HappyCons st sts) of + j `happyTcHack` case happyDrop k (HappyCons st sts) of sts1@(HappyCons st1 _) -> let drop_stk = happyDropStk k stk - off = happyAdjustOffset (happyIndexOffAddr happyGotoOffsets st1) - off_i = PLUS(off, nt) - new_state = happyIndexOffAddr happyTable off_i + new_state = happyIndexGotoTable nt st1 in happyThen1 (fn stk tk) (\r -> happyNewToken new_state sts1 (r `HappyStk` drop_stk)) @@ -226,30 +224,144 @@ happyGoto nt j tk st = where new_state = happyIndexGotoTable nt st ----------------------------------------------------------------------------- --- Error recovery (ERROR_TOK is the error token) - --- parse error if we are in recovery and we fail again -happyFail explist ERROR_TOK tk old_st _ stk@(x `HappyStk` _) = - let i = GET_ERROR_TOKEN(x) in --- trace "failing" $ - happyError_ explist i tk - -{- We don't need state discarding for our restricted implementation of - "error". In fact, it can cause some bogus parses, so I've disabled it - for now --SDM - --- discard a state -happyFail ERROR_TOK tk old_st (HappyCons action sts) - (saved_tok `HappyStk` _ `HappyStk` stk) = --- trace ("discarding state, depth " ++ show (length stk)) $ - happyDoAction ERROR_TOK tk action sts (saved_tok`HappyStk`stk) --} - --- Enter error recovery: generate an error token, --- save the old token and carry on. -happyFail explist i tk action sts stk = --- trace "entering error recovery" $ - happyDoAction ERROR_TOK tk action sts (MK_ERROR_TOKEN(i) `HappyStk` stk) +-- Error recovery +-- +-- When there is no applicable action for the current lookahead token `tk`, +-- happy enters error recovery mode. It works in 2 phases: +-- +-- 1. Fixup: Try to see if there is an action for the error token (`errorTok`, +-- which is ERROR_TOK). If there is, do *not* emit an error and pretend +-- instead that an `errorTok` was inserted. +-- When there is no `errorTok` action, call the error handler +-- (e.g., `happyError`) with the resumption continuation `happyResume`. +-- 2. Error resumption mode: If the error handler wants to resume parsing in +-- order to report multiple parse errors, it will call the resumption +-- continuation (of result type `P (Maybe a)`). +-- In the absence of the %resumptive declaration, this resumption will +-- always (do a bit of work, and) `return Nothing`. +-- In the presence of the %resumptive declaration, the grammar author +-- can use the special `catch` terminal to declare where parsing should +-- resume after an error. +-- E.g., if `stmt : expr ';' | catch ';'` then the resumption will +-- +-- (a) Pop off the state stack until it finds an item +-- `stmt -> . catch ';'`. +-- Then, it will push a `catchTok` onto the stack, perform a shift and +-- end up in item `stmt -> catch . ';'`. +-- (b) Discard tokens from the lexer until it finds ';'. +-- (In general, it will discard until the lookahead has a non-default +-- action in the matches a token that applies +-- in the situation `P -> α catch . β`, where β might empty.) +-- +-- The `catch` resumption mechanism (2) is what usually is associated with +-- `error` in `bison` or `menhir`. Since `error` is used for the Fixup mechanism +-- (1) above, we call the corresponding token `catch`. + +-- Enter error Fixup: generate an error token, +-- save the old token and carry on. +-- When a `happyShift` accepts, we will pop off the error +-- token to resume parsing with the current lookahead `i`. +happyTryFixup i tk action sts stk = + DEBUG_TRACE("entering `error` fixup.\n") + happyDoAction ERROR_TOK tk action sts (MK_ERROR_TOKEN(i) `HappyStk` stk) + -- NB: `happyShift` will simply pop the error token and carry on with + -- `tk`. Hence we don't change `tk` in the call here + +-- parse error if we are in fixup and fail again +happyFixupFailed tk st sts (x `HappyStk` stk) = + let i = GET_ERROR_TOKEN(x) in + DEBUG_TRACE("`error` fixup failed.\n") + let resume = happyResume i tk st sts stk + expected = map happyTokenToString (happyExpectedTokens st sts) in + if happyAlreadyInResumption st sts + then resume + else happyReport i tk expected resume + +happyAlreadyInResumption st sts + | (Happy_GHC_Exts.I# n_starts) <- happy_n_starts, LT(st, n_starts) + = False -- end of the stack + | (Happy_GHC_Exts.I# st) `elem` happyCatchStates + = True + | HappyCons st1 sts1 <- sts + = happyAlreadyInResumption st1 sts1 + +happyFail ERROR_TOK = happyFixupFailed +happyFail i = happyTryFixup i + +happyResume i tk st sts stk = pop_items st sts stk + where + pop_items st sts stk + | HappyShift new_state <- happyDecodeAction (happyNextAction CATCH_TOK st) + = DEBUG_TRACE("shifting catch token " ++ show (Happy_GHC_Exts.I# st) + ++ " -> " ++ show (Happy_GHC_Exts.I# new_state) ++ "\n") + discard_input_until_exp i tk new_state (HappyCons st sts) (MK_ERROR_TOKEN(i) `HappyStk` stk) + | DEBUG_TRACE("can't shift catch in " ++ show (Happy_GHC_Exts.I# st) ++ ", ") True + , (Happy_GHC_Exts.I# n_starts) <- happy_n_starts, LT(st, n_starts) + = DEBUG_TRACE("because it is a start state. no resumption.\n") + happyAbort + | (HappyCons st1 sts1) <- sts, _ `HappyStk` stk1 <- stk + = DEBUG_TRACE("discarding.\n") + pop_items st1 sts1 stk1 + discard_input_until_exp i tk st sts stk + | ultimately_fails i st sts + = DEBUG_TRACE("discard token in state " ++ show (Happy_GHC_Exts.I# st) + ++ ": " ++ show (Happy_GHC_Exts.I# i) ++ "\n") + happyLex (\_eof_tk -> happyAbort) + (\i tk -> discard_input_until_exp i tk st sts stk) -- not eof + | otherwise + = DEBUG_TRACE("found expected token in state " ++ show (Happy_GHC_Exts.I# st) + ++ ": " ++ show (Happy_GHC_Exts.I# i) ++ "\n") + (happyDoAction i tk st sts stk) + + ultimately_fails i st sts = + DEBUG_TRACE("trying token " ++ show (Happy_GHC_Exts.I# i) + ++ " in state " ++ show (Happy_GHC_Exts.I# st) ++ ": ") + case happyDecodeAction (happyNextAction i st) of + HappyFail -> DEBUG_TRACE("fail.\n") True + HappyAccept -> DEBUG_TRACE("accept.\n") False + HappyShift _ -> DEBUG_TRACE("shift.\n") False + HappyReduce r -> case happySimulateReduce r st sts of + HappyCons st1 sts1 -> ultimately_fails i st1 sts1 + +happySimulateReduce r st sts = + DEBUG_TRACE("simulate reduction of rule " ++ show (Happy_GHC_Exts.I# r) ++ ", ") + let (Happy_GHC_Exts.I# nt, Happy_GHC_Exts.I# len) = indexRuleArr happyRuleArr r in + DEBUG_TRACE("nt " ++ show (Happy_GHC_Exts.I# nt) ++ ", len: " + ++ show (Happy_GHC_Exts.I# len) ++ ", new_st ") + let sts1@(HappyCons st1 _) = happyDrop len (HappyCons st sts) + new_st = happyIndexGotoTable nt st1 in + DEBUG_TRACE(show (Happy_GHC_Exts.I# new_st) ++ ".\n") + HappyCons new_st sts1 + +happyTokenToString i = happyTokenStrings Prelude.!! (i Prelude.- 2) +happyExpectedTokens st sts = + DEBUG_TRACE("constructing expected tokens.\n") + search_shifts st sts [] + where + search_shifts st sts shifts = foldr (add_action st sts) shifts (distinct_actions st) + add_action st sts (Happy_GHC_Exts.I# i, Happy_GHC_Exts.I# act) shifts = + DEBUG_TRACE("found action in state " ++ show (Happy_GHC_Exts.I# st) + ++ ", input " ++ show (Happy_GHC_Exts.I# i) ++ ", " + ++ show (happyDecodeAction act) ++ "\n") + case happyDecodeAction act of + HappyFail -> shifts + HappyAccept -> shifts -- This would always be %eof or error... Not helpful + HappyShift _ -> Happy_Data_List.insert (Happy_GHC_Exts.I# i) shifts + HappyReduce r -> case happySimulateReduce r st sts of + HappyCons st1 sts1 -> search_shifts st1 sts1 shifts + distinct_actions st + = ((-1), Happy_GHC_Exts.I# (happyIndexOffAddr happyDefActions st)) + : [ (i, act) | i <- [begin_i..happy_n_terms], act <- get_act row_off i ] + where + row_off = happyIndexOffAddr happyActOffsets st + begin_i = 2 -- +2: errorTok,catchTok + get_act off (Happy_GHC_Exts.I# i) + | let off_i = PLUS(off,i) + , GTE(off_i,0#) + , EQ(happyIndexOffAddr happyCheck off_i,i) + = [Happy_GHC_Exts.I# (happyIndexOffAddr happyTable off_i)] + | otherwise + = [] -- Internal happy errors: diff --git a/packages/backend-lalr/src/Happy/Backend/LALR.hs b/packages/backend-lalr/src/Happy/Backend/LALR.hs index ade8fe78..9bd88a50 100644 --- a/packages/backend-lalr/src/Happy/Backend/LALR.hs +++ b/packages/backend-lalr/src/Happy/Backend/LALR.hs @@ -19,14 +19,16 @@ magicFilter magicName = case magicName of in filter_output importsToInject :: Bool -> String -importsToInject debug = concat ["\n", import_array, import_bits, import_glaexts, debug_imports, applicative_imports] +importsToInject debug = concat ["\n", import_array, import_list, import_bits, import_glaexts, debug_imports, applicative_imports] where debug_imports | debug = import_debug | otherwise = "" applicative_imports = import_applicative import_glaexts = "import qualified GHC.Exts as Happy_GHC_Exts\n" + import_ghcstack = "import qualified GHC.Stack as Happy_GHC_Stack\n" import_array = "import qualified Data.Array as Happy_Data_Array\n" + import_list = "import qualified Data.List as Happy_Data_List\n" import_bits = "import qualified Data.Bits as Bits\n" import_debug = "import qualified System.IO as Happy_System_IO\n" ++ "import qualified System.IO.Unsafe as Happy_System_IO_Unsafe\n" ++ diff --git a/packages/backend-lalr/src/Happy/Backend/LALR/ProduceCode.lhs b/packages/backend-lalr/src/Happy/Backend/LALR/ProduceCode.lhs index b05f26b1..7fc1df1a 100644 --- a/packages/backend-lalr/src/Happy/Backend/LALR/ProduceCode.lhs +++ b/packages/backend-lalr/src/Happy/Backend/LALR/ProduceCode.lhs @@ -14,7 +14,7 @@ The code generator. > import Data.Maybe ( isNothing, fromMaybe ) > import Data.Char ( ord, chr ) -> import Data.List ( sortBy ) +> import Data.List ( nub, sortBy ) > import Control.Monad ( forM_ ) > import Control.Monad.ST ( ST, runST ) @@ -25,8 +25,9 @@ The code generator. > import Data.Array.Unboxed ( UArray ) > import Data.Array.MArray ( MArray(..), freeze, readArray, writeArray ) > import Data.Array.IArray ( Array, IArray(..), (!), array, assocs, elems ) +> import Debug.Trace -%----------------------------------------------------------------------------- +----------------------------------------------------------------------------- Produce the complete output file. > produceParser :: Grammar -- grammar info @@ -60,7 +61,7 @@ Produce the complete output file. > , monad = (use_monad,monad_context,monad_tycon,monad_then,monad_return) > , token_type = token_type' > , error_handler = error_handler' -> , error_sig = error_sig' +> , error_expected = error_expected' > }) > action goto lang_exts module_header module_trailer > coerce strict @@ -70,7 +71,7 @@ Produce the complete output file. > -- comment goes *after* the module header, so that we > -- don't screw up any OPTIONS pragmas in the header. > . produceAbsSynDecl . nl -> . produceExpListPerState +> . produceTokToStringList > . produceActionTable > . produceReductions > . produceTokenConverter . nl @@ -84,20 +85,21 @@ Produce the complete output file. > where > n_starts = length starts' > token = brack token_type' -> + > nowarn_opts = str "{-# OPTIONS_GHC -w #-}" . nl > -- XXX Happy-generated code is full of warnings. Some are easy to > -- fix, others not so easy, and others would require GHC version > -- #ifdefs. For now I'm just disabling all of them. -> + > partTySigs_opts = ifGeGhc710 (str "{-# LANGUAGE PartialTypeSignatures #-}" . nl) > intMaybeHash = str "Happy_GHC_Exts.Int#" > -- Parsing monad and its constraints -> pty = str monad_tycon -> pcont = str monad_context -> +> pty = str monad_tycon -- str "P" +> ptyAt a = brack' (pty . str " " . a) -- \(str "a") -> str "(P a)" +> pcont = str monad_context -- str "Read a", some constraint for "P" to be a monad + > -- If GHC is enabled, wrap the content in a CPP ifdef that includes the > -- content and tests whether the GHC version is >= 7.10.3 > ifGeGhc710 :: (String -> String) -> String -> String @@ -108,13 +110,13 @@ Produce the complete output file. > n_missing_types = length (filter isNothing (elems nt_types)) > happyAbsSyn = str "(HappyAbsSyn " . str wild_tyvars . str ")" > where wild_tyvars = unwords (replicate n_missing_types "_") -> + > -- This decides how to include (if at all) a type signature > -- See > filterTypeSig :: (String -> String) -> String -> String > filterTypeSig content | n_missing_types == 0 = content > | otherwise = ifGeGhc710 content -> + > top_opts = > nowarn_opts > . (str $ unlines @@ -152,7 +154,7 @@ If we're using coercions, we need to generate the injections etc. > = let > happy_item = str "HappyAbsSyn " . str_tyvars > bhappy_item = brack' happy_item -> + > inject n ty > = (case ty of > Nothing -> id @@ -163,7 +165,7 @@ If we're using coercions, we need to generate the injections etc. > . mkHappyWrapCon ty n (str "x") > . nl > . str "{-# INLINE " . mkHappyIn n . str " #-}" -> + > extract n ty > = mkHappyOut n . str " :: " . bhappy_item > . str " -> " . typeParamOut n ty . char '\n' @@ -308,18 +310,18 @@ happyMonadReduce to get polymorphic recursion. Sigh. > . str " -> " . intMaybeHash > . str " -> Happy_IntList -> HappyStk " > . happyAbsSyn . str " -> " -> . pty . str " " . happyAbsSyn . str "\n" +> . ptyAt happyAbsSyn . str "\n" > in filterTypeSig tysig . mkReduceFun i . str " = " > . str s . strspace . lt' . strspace . showInt adjusted_nt > . strspace . reductionFun . nl > . reductionFun . strspace -> + > reductionFun = str "happyReduction_" . shows i -> + > tokPatterns > | coerce = reverse (map mkDummyVar [1 .. length toks]) > | otherwise = reverse (zipWith tokPattern [1..] toks) -> + > tokPattern n _ | n `notElem` vars_used = char '_' > tokPattern n t | t >= firstStartTok && t < fst_term > = if coerce @@ -333,21 +335,21 @@ happyMonadReduce to get polymorphic recursion. Sigh. > else str "(HappyTerminal " > . mkHappyTerminalVar n t > . char ')' -> + > tokLets code'' > | coerce && not (null cases) > = interleave "\n\t" cases > . code'' . str (replicate (length cases) '}') > | otherwise = code'' -> + > cases = [ str "case " . extract t . strspace . mkDummyVar n > . str " of { " . tokPattern n t . str " -> " > | (n,t) <- zip [1..] toks, > n `elem` vars_used ] -> + > extract t | t >= firstStartTok && t < fst_term = mkHappyOut t > | otherwise = str "happyOutTok" -> + > lt = length toks > this_absSynCon | coerce = mkHappyIn nt @@ -358,54 +360,60 @@ The token conversion function. > produceTokenConverter > = case lexer' of { -> + > Nothing -> -> str "happyNewToken action sts stk [] =\n\t" -> . eofAction "notHappyAtAll" -> . str " []\n\n" -> . str "happyNewToken action sts stk (tk:tks) =\n\t" -> . str "let cont i = " . doAction . str " sts stk tks in\n\t" -> . str "case tk of {\n\t" +> str "happyTerminalToTok term = case term of {\n\t" > . interleave ";\n\t" (map doToken token_rep) -> . str "_ -> happyError' ((tk:tks), [])\n\t" -> . str "}\n\n" -> . str "happyError_ explist " . eofTok . str " tk tks = happyError' (tks, explist)\n" -> . str "happyError_ explist _ tk tks = happyError' ((tk:tks), explist)\n"; +> . str "_ -> error \"Encountered a token that was not declared to happy\"\n\t}\n" +> . str "{-# NOINLINE happyTerminalToTok #-}\n" +> . str "\n" +> . str "happyEofTok = " . shows (tokIndex eof) . str "\n" +> . str "\n" +> . str "happyLex kend _kmore [] = kend notHappyAtAll []\n" +> . str "happyLex _kend kmore (tk:tks) = kmore (happyTerminalToTok tk) tk tks\n" +> . str "{-# INLINE happyLex #-}\n" +> . str "\n" +> . str "happyNewToken action sts stk = happyLex (\\tk -> " . eofAction . str ") (" +> . str "\\i tk -> " . doAction . str " sts stk)\n" +> . str "\n" +> . str "happyReport " . eofTok . str " tk explist resume tks = happyReport' tks explist resume\n" +> . str "happyReport _ tk explist resume tks = happyReport' (tk:tks) explist resume\n\n"; > -- when the token is EOF, tk == _|_ (notHappyAtAll) > -- so we must not pass it to happyError' > Just (lexer'',eof') -> -> str "happyNewToken action sts stk\n\t= " -> . str lexer'' -> . str "(\\tk -> " -> . str "\n\tlet cont i = " -> . doAction -> . str " sts stk in\n\t" -> . str "case tk of {\n\t" -> . str (eof' ++ " -> ") -> . eofAction "tk" . str ";\n\t" +> str "happyTerminalToTok term = case term of {\n\t" +> . str eof' . str " -> " . eofTok . str ";\n\t" > . interleave ";\n\t" (map doToken token_rep) -> . str "_ -> happyError' (tk, [])\n\t" -> . str "})\n\n" -> . str "happyError_ explist " . eofTok . str " tk = happyError' (tk, explist)\n" -> . str "happyError_ explist _ tk = happyError' (tk, explist)\n"; -> -- superfluous pattern match needed to force happyError_ to +> . str "_ -> error \"Encountered a token that was not declared to happy\"\n\t}\n" +> . str "{-# NOINLINE happyTerminalToTok #-}\n" +> . str "\n" +> . str "happyEofTok = " . shows (tokIndex eof) . str "\n" +> . str "\n" +> . str "happyLex kend kmore = " . str lexer'' . str " (\\tk -> case tk of {\n\t" +> . str eof' . str " -> kend tk;\n\t" +> . str "_ -> kmore (happyTerminalToTok tk) tk })\n" +> . str "{-# INLINE happyLex #-}\n" +> . str "\n" +> . str "happyNewToken action sts stk = happyLex (\\tk -> " . eofAction . str ") (" +> . str "\\i tk -> " . doAction . str " sts stk)\n" +> . str "\n" +> . str "happyReport " . eofTok . str " = happyReport'\n" +> . str "happyReport _ = happyReport'\n\n" +> -- superfluous pattern match needed to force happyReport to > -- have the correct type. > } > where - -> eofAction tk = str "happyDoAction " -> . eofTok . strspace . str tk -> . str " action" . str " sts stk" +> eofAction = str "happyDoAction " +> . eofTok . strspace . str "tk action" . str " sts stk" > eofTok = showInt (tokIndex eof) > doAction = str "happyDoAction i tk action" -> doToken (i,tok) -> = str (removeDollarDollar tok) -> . str " -> cont " -> . showInt (tokIndex i) +> doToken (i,tok) = str (removeDollarDollar tok) +> . str " -> " +> . showInt (tokIndex i) Use a variable rather than '_' to replace '$$', so we can use it on the left hand side of '@'. @@ -425,7 +433,7 @@ the left hand side of '@'. > Just str' -> mapDollarDollar str' > pat = mkHappyVar i -> tokIndex i = i - n_nonterminals - n_starts - 2 +> tokIndex i = i - fst_term + 2 > -- tokens adjusted to start at zero, see ARRAY_NOTES %----------------------------------------------------------------------------- @@ -434,10 +442,17 @@ Action Tables. Here we do a bit of trickery and replace the normal default action (failure) for each state with at least one reduction action. For each such state, we pick one reduction action to be the default action. -This should make the code smaller without affecting the speed. It -changes the sematics for errors, however; errors could be detected in -a different state now (but they'll still be detected at the same point -in the token stream). +This should make the code smaller without affecting the speed. +It changes the sematics for errors, however; errors could be detected in a +different state now (but they'll still be detected at the same point in the +token stream). + +SG: For a data point, in issue93 the happyTable triples in size when we always +pick failure as the default reduction. +Presumably that is because there are quite a few reduction states, in which the +only non-default transition is a reduction. +Our scheme above ensures that these states don't occupy space in the main +happyTable at all; they just get an entry in the happyDefActions. Further notes on default cases: @@ -479,27 +494,17 @@ machinery to discard states in the parser... > produceActionTable > = produceActionArray > . produceReduceArray +> . produceRuleArray +> . produceCatchStates > . str "happy_n_terms = " . shows n_terminals . str " :: Prelude.Int\n" > . str "happy_n_nonterms = " . shows n_nonterminals . str " :: Prelude.Int\n\n" +> . str "happy_n_starts = " . shows n_starts . str " :: Prelude.Int\n\n" + +> produceTokToStringList +> = str "{-# NOINLINE happyTokenStrings #-}\n" +> . str "happyTokenStrings = " . shows (drop (fst_term - 1) (elems token_names')) . str "\n" +> -- fst_term - 1: fst_term includes eofToken, but that is last in the list. -> produceExpListPerState -> = produceExpListArray -> . str "{-# NOINLINE happyExpListPerState #-}\n" -> . str "happyExpListPerState st =\n" -> . str " token_strs_expected\n" -> . str " where token_strs = " . str (show $ elems token_names') . str "\n" -> . str " bit_start = st Prelude.* " . str (show nr_tokens) . str "\n" -> . str " bit_end = (st Prelude.+ 1) Prelude.* " . str (show nr_tokens) . str "\n" -> . str " read_bit = readArrayBit happyExpList\n" -> . str " bits = Prelude.map read_bit [bit_start..bit_end Prelude.- 1]\n" -> . str " bits_indexed = Prelude.zip bits [0.." -> . str (show (nr_tokens - 1)) . str "]\n" -> . str " token_strs_expected = Prelude.concatMap f bits_indexed\n" -> . str " f (Prelude.False, _) = []\n" -> . str " f (Prelude.True, nr) = [token_strs Prelude.!! nr]\n" -> . str "\n" -> where (first_token, last_token) = bounds token_names' -> nr_tokens = last_token - first_token + 1 action array indexed by (terminal * last_state) + state @@ -508,61 +513,56 @@ action array indexed by (terminal * last_state) + state > . str "happyActOffsets = HappyA# \"" --" > . hexChars act_offs > . str "\"#\n\n" --" -> > . str "happyGotoOffsets :: HappyAddr\n" > . str "happyGotoOffsets = HappyA# \"" --" > . hexChars goto_offs > . str "\"#\n\n" --" -> -> . str "happyAdjustOffset :: Happy_GHC_Exts.Int# -> Happy_GHC_Exts.Int#\n" -> . str "happyAdjustOffset off = " -> . (if length table < 32768 -> then str "off" -> else str "if happyLt off (" . shows min_off . str "# :: Happy_GHC_Exts.Int#)" -> . str " then off Happy_GHC_Exts.+# 65536#" -> . str " else off") -> . str "\n\n" --" -> + > . str "happyDefActions :: HappyAddr\n" > . str "happyDefActions = HappyA# \"" --" > . hexChars defaults > . str "\"#\n\n" --" -> + > . str "happyCheck :: HappyAddr\n" > . str "happyCheck = HappyA# \"" --" > . hexChars check > . str "\"#\n\n" --" -> + > . str "happyTable :: HappyAddr\n" > . str "happyTable = HappyA# \"" --" > . hexChars table > . str "\"#\n\n" --" - -> produceExpListArray -> = str "happyExpList :: HappyAddr\n" -> . str "happyExpList = HappyA# \"" --" -> . hexCharsForBits explist -> . str "\"#\n\n" --" - > n_terminals = length terms > n_nonterminals = length nonterms - n_starts -- lose %starts -> -> (act_offs,goto_offs,table,defaults,check,explist,min_off) + +> (act_offs,goto_offs,table,defaults,check,catch_states) > = mkTables action goto first_nonterm' fst_term > n_terminals n_nonterminals n_starts (bounds token_names') -> +> table_size = length table - 1 + > produceReduceArray -> = {- str "happyReduceArr :: Array Int a\n" -} -> str "happyReduceArr = Happy_Data_Array.array (" -> . shows (n_starts :: Int) -- omit the %start reductions -> . str ", " -> . shows n_rules -> . str ") [\n" +> = str "happyReduceArr = Happy_Data_Array.array (" +> . shows (n_starts :: Int) -- omit the %start reductions +> . str ", " +> . shows n_rules +> . str ") [\n" > . interleave' ",\n" (map reduceArrElem [n_starts..n_rules]) > . str "\n\t]\n\n" +> produceRuleArray -- rule number to (non-terminal number, rule length) +> = str "happyRuleArr :: HappyAddr\n" +> . str "happyRuleArr = HappyA# \"" -- " +> . hexChars (concatMap (\(nt,len) -> [nt,len]) ruleArrElems) +> . str "\"#\n\n" --" +> +> ruleArrElems = map (\(Production nt toks _code _prio) -> (nt-first_nonterm',length toks)) (drop n_starts prods) +> > n_rules = length prods - 1 :: Int +> +> produceCatchStates +> = str "happyCatchStates :: [Int]\n" +> . str "happyCatchStates = " . shows catch_states . str "\n\n" > showInt i = shows i . showChar '#' @@ -623,6 +623,7 @@ MonadStuff: happyThen :: () => HappyIdentity a -> (a -> HappyIdentity b) -> HappyIdentity b happyReturn :: () => a -> HappyIdentity a happyThen1 m k tks = happyThen m (\a -> k a tks) + happyFmap1 f m tks = happyThen (m tks) (\a -> happyReturn (f a)) happyReturn1 = \a tks -> happyReturn a - with %monad: @@ -630,6 +631,7 @@ MonadStuff: happyThen :: CONTEXT => P a -> (a -> P b) -> P b happyReturn :: CONTEXT => a -> P a happyThen1 m k tks = happyThen m (\a -> k a tks) + happyFmap1 f m tks = happyThen (m tks) (\a -> happyReturn (f a)) happyReturn1 = \a tks -> happyReturn a - with %monad & %lexer: @@ -637,31 +639,39 @@ MonadStuff: happyThen :: CONTEXT => P a -> (a -> P b) -> P b happyReturn :: CONTEXT => a -> P a happyThen1 = happyThen + happyFmap1 f m = happyThen m (\a -> happyReturn (f a)) happyReturn1 = happyReturn > produceMonadStuff = -> str "happyThen :: " . pcont . str " => " . pty -> . str " a -> (a -> " . pty -> . str " b) -> " . pty . str " b\n" +> str "happyThen :: " . pcont . str " => " . ptyAt (str "a") +> . str " -> (a -> " . ptyAt (str "b") +> . str ") -> " . ptyAt (str "b") . str "\n" > . str "happyThen = " . brack monad_then . nl -> . str "happyReturn :: " . pcont . str " => a -> " . pty . str " a\n" +> . str "happyReturn :: " . pcont . str " => a -> " . ptyAt (str "a") . str "\n" > . str "happyReturn = " . brack monad_return . nl > . case lexer' of > Nothing -> > str "happyThen1 m k tks = (" . str monad_then > . str ") m (\\a -> k a tks)\n" -> . str "happyReturn1 :: " . pcont . str " => a -> b -> " . pty . str " a\n" +> . str "happyFmap1 f m tks = happyThen (m tks) (\\a -> happyReturn (f a))\n" +> . str "happyReturn1 :: " . pcont . str " => a -> b -> " . ptyAt (str "a") . str "\n" > . str "happyReturn1 = \\a tks -> " . brack monad_return > . str " a\n" -> . str "happyError' :: " . str monad_context . str " => ([" -> . token -> . str "], [Prelude.String]) -> " -> . str monad_tycon -> . str " a\n" -> . str "happyError' = " -> . str (if use_monad then "" else "HappyIdentity Prelude.. ") -> . errorHandler . str "\n" +> . str "happyReport' :: " . pcont . str " => " +> . str "[" . token . str "] -> " +> . str "[Prelude.String] -> (" +> . str "[" . token . str "] -> " +> . ptyAt (str "a") . str ") -> " +> . ptyAt (str "a") +> . str "\n" +> . str "happyReport' = " . callReportError . str "\n" +> . str "\n" +> . str "happyAbort :: " . pcont . str " => " +> . str "[" . token . str "] -> " +> . ptyAt (str "a") +> . str "\n" +> . str "happyAbort = " . str abort_handler . str "\n" > _ -> > let > happyParseSig = @@ -671,53 +681,80 @@ MonadStuff: > newTokenSig = > str "happyNewToken :: " . pcont . str " => " . intMaybeHash > . str " -> Happy_IntList -> HappyStk " . happyAbsSyn -> . str " -> " . pty . str " " . happyAbsSyn . str"\n" +> . str " -> " . ptyAt happyAbsSyn . str"\n" > . str "\n" > doActionSig = > str "happyDoAction :: " . pcont . str " => " . intMaybeHash > . str " -> " . str token_type' . str " -> " . intMaybeHash > . str " -> Happy_IntList -> HappyStk " . happyAbsSyn -> . str " -> " . pty . str " " . happyAbsSyn . str "\n" +> . str " -> " . ptyAt happyAbsSyn . str "\n" > . str "\n" > reduceArrSig = > str "happyReduceArr :: " . pcont > . str " => Happy_Data_Array.Array Prelude.Int (" . intMaybeHash > . str " -> " . str token_type' . str " -> " . intMaybeHash > . str " -> Happy_IntList -> HappyStk " . happyAbsSyn -> . str " -> " . pty . str " " . happyAbsSyn . str ")\n" +> . str " -> " . ptyAt happyAbsSyn . str ")\n" > . str "\n" > in filterTypeSig (happyParseSig . newTokenSig . doActionSig . reduceArrSig) > . str "happyThen1 :: " . pcont . str " => " . pty > . str " a -> (a -> " . pty > . str " b) -> " . pty . str " b\n" > . str "happyThen1 = happyThen\n" -> . str "happyReturn1 :: " . pcont . str " => a -> " . pty . str " a\n" +> . str "happyFmap1 f m = happyThen m (\\a -> happyReturn (f a))\n" +> . str "happyReturn1 :: " . pcont . str " => a -> " . ptyAt (str "a") . str "\n" > . str "happyReturn1 = happyReturn\n" -> . str "happyError' :: " . str monad_context . str " => (" -> . token . str ", [Prelude.String]) -> " -> . str monad_tycon -> . str " a\n" -> . str "happyError' tk = " -> . str (if use_monad then "" else "HappyIdentity ") -> . errorHandler . str " tk\n" - +> . str "happyReport' :: " . pcont . str " => " +> . token . str " -> " +> . str "[Prelude.String] -> " +> . ptyAt (str "a") . str " -> " +> . ptyAt (str "a") +> . str "\n" +> . str "happyReport' = " . callReportError . str "\n" +> . str "\n" +> . str "happyAbort :: " . pcont . str " => " +> . ptyAt (str "a") +> . str "\n" +> . str "happyAbort = " . str abort_handler . str "\n" +> . str "\n" + +The error handler takes up to three arguments. An error handler specified with %error is passed the current token -when used with %lexer, but happyError (the old way but kept for -compatibility) is not passed the current token. Also, the %errorhandlertype -directive determines the API of the provided function. - -> errorHandler = -> case error_handler' of -> Just h -> case error_sig' of -> ErrorHandlerTypeExpList -> str h -> ErrorHandlerTypeDefault -> str "(\\(tokens, _) -> " . str h . str " tokens)" -> Nothing -> case lexer' of -> Nothing -> str "(\\(tokens, _) -> happyError tokens)" -> Just _ -> str "(\\(tokens, explist) -> happyError)" +when used with %lexer as the first argument, but happyError (the old way but kept for +compatibility) is not passed the current token. +Furthermore, the second argument is the list of expected tokens +in the presence of the %error.expected directive. +The last argument is the "resumption", a continuation that tries to find +an item on the stack taking a @catch@ terminal where parsing may resume, +in the presence of the two-argument form of the %error directive. + +> callReportError = -- this one wraps around report_error_handler to expose a unified interface +> str "(\\tokens expected resume -> " . +> (if use_monad then str "" +> else str "HappyIdentity Prelude.$ ") . +> report_error_handler . +> (case (error_handler', lexer') of (DefaultErrorHandler, Just _) -> id +> _ -> str " tokens") . +> (if error_expected' then str " expected" +> else id) . +> (case error_handler' of ResumptiveErrorHandler{} -> str " resume" +> _ -> id) . +> str ")" +> report_error_handler = case error_handler' of +> DefaultErrorHandler -> str "happyError" +> CustomErrorHandler h -> brack h +> ResumptiveErrorHandler _abort report -> brack report +> abort_handler = case error_handler' of +> ResumptiveErrorHandler abort _report -> abort +> _ -> "error \"Called abort handler in non-resumptive parser\"" > reduceArrElem n > = str "\t(" . shows n . str " , " > . str "happyReduce_" . shows n . char ')' +> +> showRuleArrElem r (nt, len) +> = str "\t(" . shows r . str " , (" +> . shows nt . str "," . shows len . str ") )" ----------------------------------------------------------------------------- -- Produce the parser entry and exit points @@ -758,9 +795,9 @@ directive determines the API of the provided function. > (True,Nothing) -> \(name,_,_,_) -> monadAE name > (False,Just _) -> error "attribute grammars not supported for non-monadic parsers with %lexer" > (False,Nothing)-> \(name,_,_,_) -> regularAE name -> + > defaultAttr = fst (head attributes') -> + > monadAndLexerAE name > = str name . str " = " > . str "do { " @@ -828,20 +865,20 @@ See notes under "Action Tables" above for some subtleties in this function. > getDefault actions = > -- pick out the action for the error token, if any > case [ act | (e, act) <- actions, e == errorTok ] of -> + > -- use error reduction as the default action, if there is one. > act@(LR'Reduce _ _) : _ -> act > act@(LR'Multiple _ (LR'Reduce _ _)) : _ -> act -> + > -- if the error token is shifted or otherwise, don't generate > -- a default action. This is *important*! > (act : _) | act /= LR'Fail -> LR'Fail -> + > -- no error actions, pick a reduce to be the default. > _ -> case reduces of > [] -> LR'Fail > (act:_) -> act -- pick the first one we see for now -> + > where reduces > = [ act | (_, act@(LR'Reduce _ _)) <- actions ] > ++ [ act | (_, LR'Multiple _ act@(LR'Reduce _ _)) <- actions ] @@ -891,6 +928,10 @@ See notes under "Action Tables" above for some subtleties in this function. -- try to fit the actions into the check table, using the ordering -- from above. +SG: If you want to know more about similar compression schemes, consult + Storing a Sparse Table (https://dl.acm.org/doi/10.1145/359168.359175) +One can think of the mapping @\(state,token) -> (offs ! state)+token@ as a hash +and @check@ as the way to detect "collisions" (i.e., default entries). > mkTables > :: ActionTable -> GotoTable -> Name -> Int -> Int -> Int -> Int -> (Int, Int) -> @@ -899,34 +940,31 @@ See notes under "Action Tables" above for some subtleties in this function. > , [Int] -- happyTable > , [Int] -- happyDefAction > , [Int] -- happyCheck -> , [Int] -- happyExpList -> , Int -- happyMinOffset +> , [Int] -- happyCatchStates > ) -> + > mkTables action goto first_nonterm' fst_term > n_terminals n_nonterminals n_starts > token_names_bound -> + > = ( elems act_offs > , elems goto_offs > , take max_off (elems table) > , def_actions > , take max_off (elems check) -> , elems explist -> , min_off +> , shifted_catch_states > ) > where -> -> (table,check,act_offs,goto_offs,explist,min_off,max_off) +> (table,check,act_offs,goto_offs,max_off) > = runST (genTables (length actions) > max_token token_names_bound -> sorted_actions explist_actions) +> sorted_actions) > > -- the maximum token number used in the parser > max_token = max n_terminals (n_starts+n_nonterminals) - 1 -> + > def_actions = map (\(_,_,def,_,_,_) -> def) actions -> + > actions :: [TableEntry] > actions = > [ (ActionEntry, @@ -937,30 +975,31 @@ See notes under "Action Tables" above for some subtleties in this function. > length acts'', > acts'') > | (state, acts) <- assocs action, -> let (err:_dummy:vec) = assocs acts +> let (err:catch:_dummy:vec) = assocs acts > vec' = drop (n_starts+n_nonterminals) vec -> acts' = filter notFail (err:vec') +> acts' = filter notFail (err:catch:vec') > default_act = getDefault acts' > acts'' = mkActVals acts' default_act > ] > -> explist_actions :: [(Int, [Int])] -> explist_actions = [ (state, concatMap f $ assocs acts) -> | (state, acts) <- assocs action ] -> where -> f (t, LR'Shift _ _ ) = [t - fst token_names_bound] -> f (_, _) = [] +> shifted_catch_states :: [Int] +> shifted_catch_states = -- collect the states in which we have just shifted a catchTok +> nub [ to_state | (_from_state, acts) <- assocs action +> , let (_err:catch:_) = assocs acts +> , (_tok, LR'Shift to_state _) <- return catch ] > -> -- adjust terminals by -(fst_term+1), so they start at 1 (error is 0). + +> -- adjust terminals by -(fst_term+2), so they start at 2 (error is 0, catch is 1). > -- (see ARRAY_NOTES) > adjust token | token == errorTok = 0 -> | otherwise = token - fst_term + 1 -> +> | token == catchTok = 1 +> | otherwise = token - fst_term + 2 + > mkActVals assocs' default_act = > [ (adjust token, actionVal act) > | (token, act) <- assocs' > , act /= default_act ] -> + > gotos :: [TableEntry] > gotos = [ (GotoEntry, > state, 0, @@ -972,12 +1011,12 @@ See notes under "Action Tables" above for some subtleties in this function. > | (state, goto_arr) <- assocs goto, > let goto_vals = mkGotoVals (assocs goto_arr) > ] -> + > -- adjust nonterminals by -first_nonterm', so they start at zero > -- (see ARRAY_NOTES) > mkGotoVals assocs' = > [ (token - first_nonterm', i) | (token, Goto i) <- assocs' ] -> + > sorted_actions = sortBy (flip cmp_state) (actions ++ gotos) > cmp_state (_,_,_,width1,tally1,_) (_,_,_,width2,tally2,_) > | width1 < width2 = LT @@ -998,34 +1037,29 @@ See notes under "Action Tables" above for some subtleties in this function. > -> Int -- maximum token no. > -> (Int, Int) -- token names bounds > -> [TableEntry] -- entries for the table -> -> [(Int, [Int])] -- expected tokens lists > -> ST s ( UArray Int Int -- table > , UArray Int Int -- check > , UArray Int Int -- action offsets > , UArray Int Int -- goto offsets -> , UArray Int Int -- expected tokens list -> , Int -- lowest offset in table > , Int -- highest offset in table > ) -> -> genTables n_actions max_token token_names_bound entries explist = do -> + +> genTables n_actions max_token token_names_bound entries = do + > table <- newArray (0, mAX_TABLE_SIZE) 0 > check <- newArray (0, mAX_TABLE_SIZE) (-1) > act_offs <- newArray (0, n_actions) 0 > goto_offs <- newArray (0, n_actions) 0 > off_arr <- newArray (-max_token, mAX_TABLE_SIZE) 0 -> exp_array <- newArray (0, (n_actions * n_token_names + 31) `div` 32) 0 -- 32 bits per entry > -> (min_off,max_off) <- genTables' table check act_offs goto_offs off_arr exp_array entries -> explist max_token n_token_names +> max_off <- genTables' table check act_offs goto_offs off_arr entries +> max_token n_token_names > > table' <- freeze table > check' <- freeze check > act_offs' <- freeze act_offs > goto_offs' <- freeze goto_offs -> exp_array' <- freeze exp_array -> return (table',check',act_offs',goto_offs',exp_array',min_off,max_off+1) +> return (table',check',act_offs',goto_offs',max_off+1) > where > n_states = n_actions - 1 @@ -1040,34 +1074,23 @@ See notes under "Action Tables" above for some subtleties in this function. > -> STUArray s Int Int -- action offsets > -> STUArray s Int Int -- goto offsets > -> STUArray s Int Int -- offset array -> -> STUArray s Int Int -- expected token list > -> [TableEntry] -- entries for the table -> -> [(Int, [Int])] -- expected tokens lists > -> Int -- maximum token no. > -> Int -- number of token names -> -> ST s (Int,Int) -- lowest and highest offsets in table +> -> ST s Int -- highest offsets in table > -> genTables' table check act_offs goto_offs off_arr exp_array entries -> explist max_token n_token_names -> = fill_exp_array >> fit_all entries 0 0 1 +> genTables' table check act_offs goto_offs off_arr entries +> max_token n_token_names +> = fit_all entries 0 1 > where > -> fit_all [] min_off max_off _ = return (min_off, max_off) -> fit_all (s:ss) min_off max_off fst_zero = do -> (off, new_min_off, new_max_off, new_fst_zero) <- fit s min_off max_off fst_zero +> fit_all [] max_off _ = return max_off +> fit_all (s:ss) max_off fst_zero = do +> (off, new_max_off, new_fst_zero) <- fit s max_off fst_zero > ss' <- same_states s ss off > writeArray off_arr off 1 -> fit_all ss' new_min_off new_max_off new_fst_zero -> -> fill_exp_array = -> forM_ explist $ \(state, tokens) -> -> forM_ tokens $ \token -> do -> let bit_nr = state * n_token_names + token -> let word_nr = bit_nr `div` 32 -> let word_offset = bit_nr `mod` 32 -> x <- readArray exp_array word_nr -> writeArray exp_array word_nr (setBit x word_offset) -> +> fit_all ss' new_max_off new_fst_zero + > -- try to merge identical states. We only try the next state(s) > -- in the list, but the list is kind-of sorted so we shouldn't > -- miss too many. @@ -1076,34 +1099,31 @@ See notes under "Action Tables" above for some subtleties in this function. > | acts == acts' = do writeArray (which_off e) no off > same_states s ss' off > | otherwise = return ss -> + > which_off ActionEntry = act_offs > which_off GotoEntry = goto_offs -> + > -- fit a vector into the table. Return the offset of the vector, > -- the maximum offset used in the table, and the offset of the first > -- entry in the table (used to speed up the lookups a bit). -> fit (_,_,_,_,_,[]) min_off max_off fst_zero = return (0,min_off,max_off,fst_zero) -> +> fit (_,_,_,_,_,[]) max_off fst_zero = return (0,max_off,fst_zero) + > fit (act_or_goto, state_no, _deflt, _, _, state@((t,_):_)) -> min_off max_off fst_zero = do +> max_off fst_zero = do > -- start at offset 1 in the table: all the empty states > -- (states with just a default reduction) are mapped to > -- offset zero. > off <- findFreeOffset (-t+fst_zero) check off_arr state -> let new_min_off | furthest_left < min_off = furthest_left -> | otherwise = min_off -> new_max_off | furthest_right > max_off = furthest_right +> let new_max_off | furthest_right > max_off = furthest_right > | otherwise = max_off -> furthest_left = off > furthest_right = off + max_token -> + > -- trace ("fit: state " ++ show state_no ++ ", off " ++ show off ++ ", elems " ++ show state) $ do -> + > writeArray (which_off act_or_goto) state_no off > addState off table check state > new_fst_zero <- findFstFreeSlot check fst_zero -> return (off, new_min_off, new_max_off, new_fst_zero) +> return (off, new_max_off, new_fst_zero) When looking for a free offset in the table, we use the 'check' table rather than the main table. The check table starts off with (-1) in @@ -1119,11 +1139,11 @@ slot is free or not. > findFreeOffset off table off_arr state = do > -- offset 0 isn't allowed > if off == 0 then try_next else do -> + > -- don't use an offset we've used before > b <- readArray off_arr off > if b /= 0 then try_next else do -> + > -- check whether the actions for this state fit in the table > ok <- fits off state table > if not ok then try_next else return off diff --git a/packages/codegen-common/src/Happy/CodeGen/Common/Options.lhs b/packages/codegen-common/src/Happy/CodeGen/Common/Options.lhs index 66239991..c30c3f7f 100644 --- a/packages/codegen-common/src/Happy/CodeGen/Common/Options.lhs +++ b/packages/codegen-common/src/Happy/CodeGen/Common/Options.lhs @@ -5,13 +5,20 @@ The CommonOptions data type. ----------------------------------------------------------------------------- > module Happy.CodeGen.Common.Options ( -> ErrorHandlerType(..), -> CommonOptions(..) +> ErrorHandlerInfo(..), CommonOptions(..) > ) where -> data ErrorHandlerType -> = ErrorHandlerTypeDefault -> | ErrorHandlerTypeExpList +> data ErrorHandlerInfo +> = DefaultErrorHandler +> -- ^ Default handler `happyError`. +> | CustomErrorHandler String +> -- ^ Call this handler on error. +> | ResumptiveErrorHandler String {- abort -} String {- addMessage -} +> -- ^ `ResumptiveErrorHandler abort reportError`: +> -- Calls non-fatal `reportError ... resume` with resumption `resume` to +> -- get more errors, ultimately failing with `abort` when parse can't be +> -- resumed. +> > data CommonOptions > = CommonOptions { @@ -20,6 +27,8 @@ The CommonOptions data type. > monad :: (Bool,String,String,String,String), > expect :: Maybe Int, > lexer :: Maybe (String,String), -> error_handler :: Maybe String, -> error_sig :: ErrorHandlerType +> error_handler :: ErrorHandlerInfo, +> error_expected :: Bool +> -- ^ Error handler expects a `[String]` as arg after current +> -- token carrying the pretty-printed expected tokens. > } diff --git a/packages/frontend/boot-src/Parser.ly b/packages/frontend/boot-src/Parser.ly index a18fabdf..e573a861 100644 --- a/packages/frontend/boot-src/Parser.ly +++ b/packages/frontend/boot-src/Parser.ly @@ -33,7 +33,7 @@ The parser. > spec_shift { TokenKW TokSpecId_Shift } > spec_expect { TokenKW TokSpecId_Expect } > spec_error { TokenKW TokSpecId_Error } -> spec_errorhandlertype { TokenKW TokSpecId_ErrorHandlerType } +> spec_errorexpected { TokenKW TokSpecId_ErrorExpected } > spec_attribute { TokenKW TokSpecId_Attribute } > spec_attributetype { TokenKW TokSpecId_Attributetype } > code { TokenInfo $$ TokCodeQuote } @@ -104,11 +104,11 @@ The parser. > | spec_shift { PrecShift } > | { PrecNone } -> tokInfos :: { [Directive String] } +> tokInfos :: { [Directive String] } > : tokInfos tokInfo { $2 : $1 } > | tokInfo { [$1] } -> tokInfo :: { Directive String } +> tokInfo :: { Directive String } > : spec_tokentype code { TokenType $2 } > | spec_token tokenSpecs { TokenSpec $2 } > | spec_name id optStart { TokenName $2 $3 False } @@ -123,8 +123,8 @@ The parser. > | spec_right ids { TokenRight $2 } > | spec_left ids { TokenLeft $2 } > | spec_expect int { TokenExpect $2 } -> | spec_error code { TokenError $2 } -> | spec_errorhandlertype id { TokenErrorHandlerType $2 } +> | spec_error code optCode { TokenError $2 $3 } +> | spec_errorexpected { TokenErrorExpected } > | spec_attributetype code { TokenAttributetype $2 } > | spec_attribute id code { TokenAttribute $2 $3 } diff --git a/packages/frontend/src/Happy/Frontend.hs b/packages/frontend/src/Happy/Frontend.hs index e0fd5dbc..a99fc146 100644 --- a/packages/frontend/src/Happy/Frontend.hs +++ b/packages/frontend/src/Happy/Frontend.hs @@ -30,3 +30,10 @@ deLitify = deLit where deLit2 ('\n':r) = '\n' : deLit r deLit2 (_:r) = deLit2 r deLit2 [] = [] +-- Iff happy is built with bootstrapping, attribute grammars are supported +supportsParsingAttributeGrammars :: Bool +#ifdef HAPPY_BOOTSTRAP +supportsParsingAttributeGrammars = True +#else +supportsParsingAttributeGrammars = False +#endif diff --git a/packages/frontend/src/Happy/Frontend/AbsSyn.lhs b/packages/frontend/src/Happy/Frontend/AbsSyn.lhs index 3b195a7f..af8651ec 100644 --- a/packages/frontend/src/Happy/Frontend/AbsSyn.lhs +++ b/packages/frontend/src/Happy/Frontend/AbsSyn.lhs @@ -10,13 +10,13 @@ Here is the abstract syntax of the language we parse. > BookendedAbsSyn(..), > AbsSyn(..), Directive(..), > getTokenType, getTokenSpec, getParserNames, getLexer, -> getImportedIdentity, getMonad, getError, -> getPrios, getPrioNames, getExpect, getErrorHandlerType, +> getImportedIdentity, getMonad, ErrorHandlerInfo(..), getError, +> getPrios, getPrioNames, getExpect, getErrorHandlerExpectedList, > getAttributes, getAttributetype, > Rule(..), Prod(..), Term(..), Prec(..) > ) where -> import Happy.CodeGen.Common.Options (ErrorHandlerType(..)) +> import Happy.CodeGen.Common.Options (ErrorHandlerInfo(..)) > data BookendedAbsSyn > = BookendedAbsSyn @@ -60,23 +60,23 @@ Parser Generator Directives. ToDo: find a consistent way to analyse all the directives together and generate some error messages. -> + > data Directive a > = TokenType String -- %tokentype > | TokenSpec [(a,String)] -- %token > | TokenName String (Maybe String) Bool -- %name/%partial (True <=> %partial) > | TokenLexer String String -- %lexer -> | TokenErrorHandlerType String -- %errorhandlertype > | TokenImportedIdentity -- %importedidentity > | TokenMonad String String String String -- %monad > | TokenNonassoc [String] -- %nonassoc > | TokenRight [String] -- %right > | TokenLeft [String] -- %left > | TokenExpect Int -- %expect -> | TokenError String -- %error +> | TokenError String (Maybe String) -- %error +> | TokenErrorExpected -- %error.expected > | TokenAttributetype String -- %attributetype > | TokenAttribute String String -- %attribute -> deriving Show +> deriving (Eq, Show) > getTokenType :: [Directive t] -> String > getTokenType ds @@ -134,22 +134,17 @@ generate some error messages. > [] -> Nothing > _ -> error "multiple expect directives" -> getError :: [Directive t] -> Maybe String +> getError :: [Directive t] -> ErrorHandlerInfo > getError ds -> = case [ a | (TokenError a) <- ds ] of -> [t] -> Just t -> [] -> Nothing +> = case [ (a, mb_b) | (TokenError a mb_b) <- ds ] of +> [] -> DefaultErrorHandler +> [(a,Nothing)] -> CustomErrorHandler a +> [(abort,Just addMessage)] -> ResumptiveErrorHandler abort addMessage > _ -> error "multiple error directives" -> getErrorHandlerType :: [Directive t] -> ErrorHandlerType -> getErrorHandlerType ds -> = case [ a | (TokenErrorHandlerType a) <- ds ] of -> [t] -> case t of -> "explist" -> ErrorHandlerTypeExpList -> "default" -> ErrorHandlerTypeDefault -> _ -> error "unsupported %errorhandlertype value" -> [] -> ErrorHandlerTypeDefault -> _ -> error "multiple errorhandlertype directives" +> getErrorHandlerExpectedList :: Eq t => [Directive t] -> Bool +> getErrorHandlerExpectedList ds +> = TokenErrorExpected `elem` ds > getAttributes :: [Directive t] -> [(String, String)] > getAttributes ds diff --git a/packages/frontend/src/Happy/Frontend/AttrGrammar/Parser.hs b/packages/frontend/src/Happy/Frontend/AttrGrammar/Parser.hs index 2a930f5c..d7076fb1 100644 --- a/packages/frontend/src/Happy/Frontend/AttrGrammar/Parser.hs +++ b/packages/frontend/src/Happy/Frontend/AttrGrammar/Parser.hs @@ -1,7 +1,11 @@ {-# OPTIONS_GHC -w #-} -{-# OPTIONS -XMagicHash -XBangPatterns -XTypeSynonymInstances -XFlexibleInstances -cpp #-} +{-# LANGUAGE CPP #-} +{-# LANGUAGE MagicHash #-} +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE TypeSynonymInstances #-} +{-# LANGUAGE FlexibleInstances #-} #if __GLASGOW_HASKELL__ >= 710 -{-# OPTIONS_GHC -XPartialTypeSignatures #-} +{-# LANGUAGE PartialTypeSignatures #-} #endif {-# OPTIONS_GHC -w #-} module Happy.Frontend.AttrGrammar.Parser (agParser) where @@ -9,12 +13,13 @@ import Happy.Frontend.ParseMonad.Class import Happy.Frontend.ParseMonad import Happy.Frontend.AttrGrammar import qualified Data.Array as Happy_Data_Array +import qualified Data.List as Happy_Data_List import qualified Data.Bits as Bits import qualified GHC.Exts as Happy_GHC_Exts import Control.Applicative(Applicative(..)) import Control.Monad (ap) --- parser produced by Happy Version 1.20.1.1 +-- parser produced by Happy Version 2.0 newtype HappyAbsSyn = HappyAbsSyn HappyAny #if __GLASGOW_HASKELL__ >= 607 @@ -22,13 +27,6 @@ type HappyAny = Happy_GHC_Exts.Any #else type HappyAny = forall a . a #endif -newtype HappyWrap4 = HappyWrap4 ([AgRule]) -happyIn4 :: ([AgRule]) -> (HappyAbsSyn ) -happyIn4 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap4 x) -{-# INLINE happyIn4 #-} -happyOut4 :: (HappyAbsSyn ) -> HappyWrap4 -happyOut4 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut4 #-} newtype HappyWrap5 = HappyWrap5 ([AgRule]) happyIn5 :: ([AgRule]) -> (HappyAbsSyn ) happyIn5 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap5 x) @@ -36,15 +34,15 @@ happyIn5 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap5 x) happyOut5 :: (HappyAbsSyn ) -> HappyWrap5 happyOut5 x = Happy_GHC_Exts.unsafeCoerce# x {-# INLINE happyOut5 #-} -newtype HappyWrap6 = HappyWrap6 (AgRule) -happyIn6 :: (AgRule) -> (HappyAbsSyn ) +newtype HappyWrap6 = HappyWrap6 ([AgRule]) +happyIn6 :: ([AgRule]) -> (HappyAbsSyn ) happyIn6 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap6 x) {-# INLINE happyIn6 #-} happyOut6 :: (HappyAbsSyn ) -> HappyWrap6 happyOut6 x = Happy_GHC_Exts.unsafeCoerce# x {-# INLINE happyOut6 #-} -newtype HappyWrap7 = HappyWrap7 ([AgToken]) -happyIn7 :: ([AgToken]) -> (HappyAbsSyn ) +newtype HappyWrap7 = HappyWrap7 (AgRule) +happyIn7 :: (AgRule) -> (HappyAbsSyn ) happyIn7 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap7 x) {-# INLINE happyIn7 #-} happyOut7 :: (HappyAbsSyn ) -> HappyWrap7 @@ -57,6 +55,13 @@ happyIn8 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap8 x) happyOut8 :: (HappyAbsSyn ) -> HappyWrap8 happyOut8 x = Happy_GHC_Exts.unsafeCoerce# x {-# INLINE happyOut8 #-} +newtype HappyWrap9 = HappyWrap9 ([AgToken]) +happyIn9 :: ([AgToken]) -> (HappyAbsSyn ) +happyIn9 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap9 x) +{-# INLINE happyIn9 #-} +happyOut9 :: (HappyAbsSyn ) -> HappyWrap9 +happyOut9 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut9 #-} happyInTok :: (AgToken) -> (HappyAbsSyn ) happyInTok x = Happy_GHC_Exts.unsafeCoerce# x {-# INLINE happyInTok #-} @@ -65,39 +70,22 @@ happyOutTok x = Happy_GHC_Exts.unsafeCoerce# x {-# INLINE happyOutTok #-} -happyExpList :: HappyAddr -happyExpList = HappyA# "\x00\xf0\x00\xc0\x03\x00\x00\x00\x01\x00\xe9\x01\x20\x00\x80\x00\x00\x02\x00\x00\x00\xa4\x07\x90\x1e\x40\x7a\x00\x00\x00\xb4\x07\x90\x1e\x40\x7a\x00\xe9\x01\xa4\x07\x90\x1e\x00\x3c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x20\x00\x40\x7b\x00\xed\x01\xb4\x07\xd0\x1e\x40\x7b\x00\xe9\x01\xb4\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x00\x00\xe9\x01\x00\x00\xd0\x1e\x00\x00\x00\x00"# - -{-# NOINLINE happyExpListPerState #-} -happyExpListPerState st = - token_strs_expected - where token_strs = ["error","%dummy","%start_agParser","agParser","rules","rule","code","code0","\"{\"","\"}\"","\";\"","\"=\"","where","selfRef","subRef","rightRef","unknown","%eof"] - bit_start = st Prelude.* 18 - bit_end = (st Prelude.+ 1) Prelude.* 18 - read_bit = readArrayBit happyExpList - bits = Prelude.map read_bit [bit_start..bit_end Prelude.- 1] - bits_indexed = Prelude.zip bits [0..17] - token_strs_expected = Prelude.concatMap f bits_indexed - f (Prelude.False, _) = [] - f (Prelude.True, nr) = [token_strs Prelude.!! nr] - +{-# NOINLINE happyTokenStrings #-} +happyTokenStrings = ["\"{\"","\"}\"","\";\"","\"=\"","where","selfRef","subRef","rightRef","unknown","%eof"] happyActOffsets :: HappyAddr -happyActOffsets = HappyA# "\x0f\x00\x0f\x00\x00\x00\xfe\xff\x0a\x00\xff\xff\x02\x00\x19\x00\x05\x00\x0a\x00\x0a\x00\x0a\x00\x00\x00\x01\x00\x0a\x00\x0a\x00\x0a\x00\x0a\x00\x0a\x00\x0f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1c\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x0a\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1d\x00\x0a\x00\x00\x00\x01\x00\x00\x00\x00\x00"# +happyActOffsets = HappyA# "\x0d\x00\x00\x00\x0d\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x08\x00\x00\x00\x09\x00\x00\x00\x18\x00\x00\x00\x1a\x00\x00\x00\xfa\xff\xff\xff\x08\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x08\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x0d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1b\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x08\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1d\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00"# happyGotoOffsets :: HappyAddr -happyGotoOffsets = HappyA# "\x18\x00\x0b\x00\x00\x00\x00\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1f\x00\x20\x00\x21\x00\x00\x00\x22\x00\x24\x00\x25\x00\x26\x00\x27\x00\x28\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x29\x00\x2a\x00\x2b\x00\x2c\x00\x2d\x00\x2f\x00\x30\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x32\x00\x00\x00\x33\x00\x00\x00\x00\x00"# - -happyAdjustOffset :: Happy_GHC_Exts.Int# -> Happy_GHC_Exts.Int# -happyAdjustOffset off = off +happyGotoOffsets = HappyA# "\x17\x00\x00\x00\x0a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1f\x00\x00\x00\x20\x00\x00\x00\x21\x00\x00\x00\x00\x00\x00\x00\x22\x00\x00\x00\x24\x00\x00\x00\x25\x00\x00\x00\x26\x00\x00\x00\x27\x00\x00\x00\x28\x00\x00\x00\x19\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x29\x00\x00\x00\x2a\x00\x00\x00\x2b\x00\x00\x00\x2c\x00\x00\x00\x2d\x00\x00\x00\x2f\x00\x00\x00\x30\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x32\x00\x00\x00\x00\x00\x00\x00\x33\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"# happyDefActions :: HappyAddr -happyDefActions = HappyA# "\xfb\xff\x00\x00\xfe\xff\xfc\xff\xf0\xff\x00\x00\x00\x00\x00\x00\x00\x00\xf0\xff\xf0\xff\xf0\xff\xf7\xff\xe8\xff\xf0\xff\xf0\xff\xf0\xff\xf0\xff\xf0\xff\xfb\xff\xfd\xff\xf1\xff\xf2\xff\xf3\xff\xf4\xff\xf5\xff\x00\x00\xe8\xff\xe8\xff\xe8\xff\xe8\xff\xe8\xff\xf0\xff\xe8\xff\xfa\xff\xf9\xff\xf8\xff\xe9\xff\xea\xff\xeb\xff\xec\xff\xee\xff\xed\xff\x00\x00\xf0\xff\xf6\xff\xe8\xff\xef\xff"# +happyDefActions = HappyA# "\xfb\xff\xff\xff\x00\x00\x00\x00\xfe\xff\xff\xff\xfc\xff\xff\xff\xf0\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf0\xff\xff\xff\xf0\xff\xff\xff\xf0\xff\xff\xff\xf7\xff\xff\xff\xe8\xff\xff\xff\xf0\xff\xff\xff\xf0\xff\xff\xff\xf0\xff\xff\xff\xf0\xff\xff\xff\xf0\xff\xff\xff\xfb\xff\xff\xff\xfd\xff\xff\xff\xf1\xff\xff\xff\xf2\xff\xff\xff\xf3\xff\xff\xff\xf4\xff\xff\xff\xf5\xff\xff\xff\x00\x00\x00\x00\xe8\xff\xff\xff\xe8\xff\xff\xff\xe8\xff\xff\xff\xe8\xff\xff\xff\xe8\xff\xff\xff\xf0\xff\xff\xff\xe8\xff\xff\xff\xfa\xff\xff\xff\xf9\xff\xff\xff\xf8\xff\xff\xff\xe9\xff\xff\xff\xea\xff\xff\xff\xeb\xff\xff\xff\xec\xff\xff\xff\xee\xff\xff\xff\xed\xff\xff\xff\x00\x00\x00\x00\xf0\xff\xff\xff\xf6\xff\xff\xff\xe8\xff\xff\xff\xef\xff\xff\xff"# happyCheck :: HappyAddr -happyCheck = HappyA# "\xff\xff\x03\x00\x01\x00\x04\x00\x03\x00\x04\x00\x04\x00\x06\x00\x07\x00\x08\x00\x09\x00\x01\x00\x01\x00\x02\x00\x04\x00\x0a\x00\x06\x00\x07\x00\x08\x00\x09\x00\x05\x00\x06\x00\x07\x00\x08\x00\x00\x00\x01\x00\x02\x00\x01\x00\x02\x00\x04\x00\x02\x00\x02\x00\xff\xff\x03\x00\x03\x00\x03\x00\x03\x00\xff\xff\x04\x00\x03\x00\x03\x00\x03\x00\x03\x00\x03\x00\xff\xff\x04\x00\x04\x00\x04\x00\x04\x00\x04\x00\x03\x00\xff\xff\x04\x00\x03\x00\xff\xff\x04\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"# +happyCheck = HappyA# "\xff\xff\xff\xff\x02\x00\x00\x00\x04\x00\x00\x00\x04\x00\x00\x00\x05\x00\x00\x00\x0b\x00\x00\x00\x07\x00\x00\x00\x08\x00\x00\x00\x09\x00\x00\x00\x0a\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x05\x00\x00\x00\x05\x00\x00\x00\x07\x00\x00\x00\x08\x00\x00\x00\x09\x00\x00\x00\x0a\x00\x00\x00\x06\x00\x00\x00\x07\x00\x00\x00\x08\x00\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\xff\xff\xff\xff\x05\x00\x00\x00\x03\x00\x00\x00\x05\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\xff\xff\xff\xff\x04\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\xff\xff\xff\xff\x04\x00\x00\x00\x04\x00\x00\x00\x04\x00\x00\x00\x04\x00\x00\x00\x04\x00\x00\x00\x03\x00\x00\x00\xff\xff\xff\xff\x04\x00\x00\x00\x03\x00\x00\x00\xff\xff\xff\xff\x04\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"# happyTable :: HappyAddr -happyTable = HappyA# "\x00\x00\x14\x00\x1c\x00\x0c\x00\x1d\x00\x1e\x00\x0b\x00\x1f\x00\x20\x00\x21\x00\x22\x00\x0e\x00\x02\x00\x03\x00\x0f\x00\xff\xff\x10\x00\x11\x00\x12\x00\x13\x00\x05\x00\x06\x00\x07\x00\x08\x00\x08\x00\x02\x00\x03\x00\x14\x00\x03\x00\x0a\x00\x2d\x00\x2f\x00\x00\x00\x0c\x00\x24\x00\x23\x00\x22\x00\x00\x00\x1a\x00\x19\x00\x18\x00\x17\x00\x16\x00\x15\x00\x00\x00\x2b\x00\x2a\x00\x29\x00\x28\x00\x27\x00\x26\x00\x00\x00\x25\x00\x2d\x00\x00\x00\x2f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"# +happyTable = HappyA# "\x00\x00\x00\x00\x1c\x00\x00\x00\x14\x00\x00\x00\x1d\x00\x00\x00\x1e\x00\x00\x00\xff\xff\xff\xff\x1f\x00\x00\x00\x20\x00\x00\x00\x21\x00\x00\x00\x22\x00\x00\x00\x0e\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x0f\x00\x00\x00\x0c\x00\x00\x00\x10\x00\x00\x00\x11\x00\x00\x00\x12\x00\x00\x00\x13\x00\x00\x00\x05\x00\x00\x00\x06\x00\x00\x00\x07\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x14\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x2d\x00\x00\x00\x0a\x00\x00\x00\x2f\x00\x00\x00\x0c\x00\x00\x00\x24\x00\x00\x00\x23\x00\x00\x00\x22\x00\x00\x00\x00\x00\x00\x00\x1a\x00\x00\x00\x19\x00\x00\x00\x18\x00\x00\x00\x17\x00\x00\x00\x16\x00\x00\x00\x15\x00\x00\x00\x00\x00\x00\x00\x2b\x00\x00\x00\x2a\x00\x00\x00\x29\x00\x00\x00\x28\x00\x00\x00\x27\x00\x00\x00\x26\x00\x00\x00\x00\x00\x00\x00\x25\x00\x00\x00\x2d\x00\x00\x00\x00\x00\x00\x00\x2f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"# happyReduceArr = Happy_Data_Array.array (1, 23) [ (1 , happyReduce_1), @@ -125,85 +113,93 @@ happyReduceArr = Happy_Data_Array.array (1, 23) [ (23 , happyReduce_23) ] -happy_n_terms = 11 :: Prelude.Int +happyRuleArr :: HappyAddr +happyRuleArr = HappyA# "\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x03\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x03\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x04\x00\x00\x00\x04\x00\x00\x00\x02\x00\x00\x00\x04\x00\x00\x00\x02\x00\x00\x00\x04\x00\x00\x00\x02\x00\x00\x00\x04\x00\x00\x00\x02\x00\x00\x00\x04\x00\x00\x00\x02\x00\x00\x00\x04\x00\x00\x00\x02\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00"# + +happyCatchStates :: [Int] +happyCatchStates = [] + +happy_n_terms = 12 :: Prelude.Int happy_n_nonterms = 5 :: Prelude.Int -happyReduce_1 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happy_n_starts = 1 :: Prelude.Int + +happyReduce_1 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_1 = happySpecReduce_1 0# happyReduction_1 happyReduction_1 happy_x_1 - = case happyOut5 happy_x_1 of { (HappyWrap5 happy_var_1) -> - happyIn4 + = case happyOut6 happy_x_1 of { (HappyWrap6 happy_var_1) -> + happyIn5 (happy_var_1 )} -happyReduce_2 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_2 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_2 = happySpecReduce_3 1# happyReduction_2 happyReduction_2 happy_x_3 happy_x_2 happy_x_1 - = case happyOut6 happy_x_1 of { (HappyWrap6 happy_var_1) -> - case happyOut5 happy_x_3 of { (HappyWrap5 happy_var_3) -> - happyIn5 + = case happyOut7 happy_x_1 of { (HappyWrap7 happy_var_1) -> + case happyOut6 happy_x_3 of { (HappyWrap6 happy_var_3) -> + happyIn6 (happy_var_1 : happy_var_3 )}} -happyReduce_3 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_3 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_3 = happySpecReduce_1 1# happyReduction_3 happyReduction_3 happy_x_1 - = case happyOut6 happy_x_1 of { (HappyWrap6 happy_var_1) -> - happyIn5 + = case happyOut7 happy_x_1 of { (HappyWrap7 happy_var_1) -> + happyIn6 (happy_var_1 : [] )} -happyReduce_4 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_4 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_4 = happySpecReduce_0 1# happyReduction_4 -happyReduction_4 = happyIn5 +happyReduction_4 = happyIn6 ([] ) -happyReduce_5 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_5 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_5 = happySpecReduce_3 2# happyReduction_5 happyReduction_5 happy_x_3 happy_x_2 happy_x_1 = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut7 happy_x_3 of { (HappyWrap7 happy_var_3) -> - happyIn6 + case happyOut8 happy_x_3 of { (HappyWrap8 happy_var_3) -> + happyIn7 (SelfAssign (selfRefVal happy_var_1) happy_var_3 )}} -happyReduce_6 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_6 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_6 = happySpecReduce_3 2# happyReduction_6 happyReduction_6 happy_x_3 happy_x_2 happy_x_1 = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut7 happy_x_3 of { (HappyWrap7 happy_var_3) -> - happyIn6 + case happyOut8 happy_x_3 of { (HappyWrap8 happy_var_3) -> + happyIn7 (SubAssign (subRefVal happy_var_1) happy_var_3 )}} -happyReduce_7 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_7 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_7 = happySpecReduce_3 2# happyReduction_7 happyReduction_7 happy_x_3 happy_x_2 happy_x_1 = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut7 happy_x_3 of { (HappyWrap7 happy_var_3) -> - happyIn6 + case happyOut8 happy_x_3 of { (HappyWrap8 happy_var_3) -> + happyIn7 (RightmostAssign (rightRefVal happy_var_1) happy_var_3 )}} -happyReduce_8 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_8 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_8 = happySpecReduce_2 2# happyReduction_8 happyReduction_8 happy_x_2 happy_x_1 - = case happyOut7 happy_x_2 of { (HappyWrap7 happy_var_2) -> - happyIn6 + = case happyOut8 happy_x_2 of { (HappyWrap8 happy_var_2) -> + happyIn7 (Conditional happy_var_2 )} -happyReduce_9 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_9 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_9 = happyReduce 4# 3# happyReduction_9 happyReduction_9 (happy_x_4 `HappyStk` happy_x_3 `HappyStk` @@ -211,70 +207,70 @@ happyReduction_9 (happy_x_4 `HappyStk` happy_x_1 `HappyStk` happyRest) = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut8 happy_x_2 of { (HappyWrap8 happy_var_2) -> + case happyOut9 happy_x_2 of { (HappyWrap9 happy_var_2) -> case happyOutTok happy_x_3 of { happy_var_3 -> - case happyOut7 happy_x_4 of { (HappyWrap7 happy_var_4) -> - happyIn7 + case happyOut8 happy_x_4 of { (HappyWrap8 happy_var_4) -> + happyIn8 ([happy_var_1] ++ happy_var_2 ++ [happy_var_3] ++ happy_var_4 ) `HappyStk` happyRest}}}} -happyReduce_10 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_10 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_10 = happySpecReduce_2 3# happyReduction_10 happyReduction_10 happy_x_2 happy_x_1 = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut7 happy_x_2 of { (HappyWrap7 happy_var_2) -> - happyIn7 + case happyOut8 happy_x_2 of { (HappyWrap8 happy_var_2) -> + happyIn8 (happy_var_1 : happy_var_2 )}} -happyReduce_11 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_11 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_11 = happySpecReduce_2 3# happyReduction_11 happyReduction_11 happy_x_2 happy_x_1 = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut7 happy_x_2 of { (HappyWrap7 happy_var_2) -> - happyIn7 + case happyOut8 happy_x_2 of { (HappyWrap8 happy_var_2) -> + happyIn8 (happy_var_1 : happy_var_2 )}} -happyReduce_12 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_12 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_12 = happySpecReduce_2 3# happyReduction_12 happyReduction_12 happy_x_2 happy_x_1 = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut7 happy_x_2 of { (HappyWrap7 happy_var_2) -> - happyIn7 + case happyOut8 happy_x_2 of { (HappyWrap8 happy_var_2) -> + happyIn8 (happy_var_1 : happy_var_2 )}} -happyReduce_13 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_13 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_13 = happySpecReduce_2 3# happyReduction_13 happyReduction_13 happy_x_2 happy_x_1 = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut7 happy_x_2 of { (HappyWrap7 happy_var_2) -> - happyIn7 + case happyOut8 happy_x_2 of { (HappyWrap8 happy_var_2) -> + happyIn8 (happy_var_1 : happy_var_2 )}} -happyReduce_14 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_14 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_14 = happySpecReduce_2 3# happyReduction_14 happyReduction_14 happy_x_2 happy_x_1 = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut7 happy_x_2 of { (HappyWrap7 happy_var_2) -> - happyIn7 + case happyOut8 happy_x_2 of { (HappyWrap8 happy_var_2) -> + happyIn8 (happy_var_1 : happy_var_2 )}} -happyReduce_15 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_15 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_15 = happySpecReduce_0 3# happyReduction_15 -happyReduction_15 = happyIn7 +happyReduction_15 = happyIn8 ([] ) -happyReduce_16 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_16 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_16 = happyReduce 4# 4# happyReduction_16 happyReduction_16 (happy_x_4 `HappyStk` happy_x_3 `HappyStk` @@ -282,209 +278,184 @@ happyReduction_16 (happy_x_4 `HappyStk` happy_x_1 `HappyStk` happyRest) = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut8 happy_x_2 of { (HappyWrap8 happy_var_2) -> + case happyOut9 happy_x_2 of { (HappyWrap9 happy_var_2) -> case happyOutTok happy_x_3 of { happy_var_3 -> - case happyOut8 happy_x_4 of { (HappyWrap8 happy_var_4) -> - happyIn8 + case happyOut9 happy_x_4 of { (HappyWrap9 happy_var_4) -> + happyIn9 ([happy_var_1] ++ happy_var_2 ++ [happy_var_3] ++ happy_var_4 ) `HappyStk` happyRest}}}} -happyReduce_17 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_17 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_17 = happySpecReduce_2 4# happyReduction_17 happyReduction_17 happy_x_2 happy_x_1 = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut8 happy_x_2 of { (HappyWrap8 happy_var_2) -> - happyIn8 + case happyOut9 happy_x_2 of { (HappyWrap9 happy_var_2) -> + happyIn9 (happy_var_1 : happy_var_2 )}} -happyReduce_18 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_18 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_18 = happySpecReduce_2 4# happyReduction_18 happyReduction_18 happy_x_2 happy_x_1 = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut8 happy_x_2 of { (HappyWrap8 happy_var_2) -> - happyIn8 + case happyOut9 happy_x_2 of { (HappyWrap9 happy_var_2) -> + happyIn9 (happy_var_1 : happy_var_2 )}} -happyReduce_19 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_19 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_19 = happySpecReduce_2 4# happyReduction_19 happyReduction_19 happy_x_2 happy_x_1 = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut8 happy_x_2 of { (HappyWrap8 happy_var_2) -> - happyIn8 + case happyOut9 happy_x_2 of { (HappyWrap9 happy_var_2) -> + happyIn9 (happy_var_1 : happy_var_2 )}} -happyReduce_20 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_20 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_20 = happySpecReduce_2 4# happyReduction_20 happyReduction_20 happy_x_2 happy_x_1 = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut8 happy_x_2 of { (HappyWrap8 happy_var_2) -> - happyIn8 + case happyOut9 happy_x_2 of { (HappyWrap9 happy_var_2) -> + happyIn9 (happy_var_1 : happy_var_2 )}} -happyReduce_21 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_21 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_21 = happySpecReduce_2 4# happyReduction_21 happyReduction_21 happy_x_2 happy_x_1 = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut7 happy_x_2 of { (HappyWrap7 happy_var_2) -> - happyIn8 + case happyOut8 happy_x_2 of { (HappyWrap8 happy_var_2) -> + happyIn9 (happy_var_1 : happy_var_2 )}} -happyReduce_22 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_22 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_22 = happySpecReduce_2 4# happyReduction_22 happyReduction_22 happy_x_2 happy_x_1 = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut8 happy_x_2 of { (HappyWrap8 happy_var_2) -> - happyIn8 + case happyOut9 happy_x_2 of { (HappyWrap9 happy_var_2) -> + happyIn9 (happy_var_1 : happy_var_2 )}} -happyReduce_23 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_23 :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_23 = happySpecReduce_0 4# happyReduction_23 -happyReduction_23 = happyIn8 +happyReduction_23 = happyIn9 ([] ) -happyNewToken action sts stk - = lexTokenP(\tk -> - let cont i = happyDoAction i tk action sts stk in - case tk of { - AgTok_EOF -> happyDoAction 10# tk action sts stk; - AgTok_LBrace -> cont 1#; - AgTok_RBrace -> cont 2#; - AgTok_Semicolon -> cont 3#; - AgTok_Eq -> cont 4#; - AgTok_Where -> cont 5#; - AgTok_SelfRef _ -> cont 6#; - AgTok_SubRef _ -> cont 7#; - AgTok_RightmostRef _ -> cont 8#; - AgTok_Unknown _ -> cont 9#; - _ -> happyError' (tk, []) - }) - -happyError_ explist 10# tk = happyError' (tk, explist) -happyError_ explist _ tk = happyError' (tk, explist) - -happyThen :: () => P a -> (a -> P b) -> P b +happyTerminalToTok term = case term of { + AgTok_EOF -> 11#; + AgTok_LBrace -> 2#; + AgTok_RBrace -> 3#; + AgTok_Semicolon -> 4#; + AgTok_Eq -> 5#; + AgTok_Where -> 6#; + AgTok_SelfRef _ -> 7#; + AgTok_SubRef _ -> 8#; + AgTok_RightmostRef _ -> 9#; + AgTok_Unknown _ -> 10#; + _ -> error "Encountered a token that was not declared to happy" + } +{-# NOINLINE happyTerminalToTok #-} + +happyEofTok = 11 + +happyLex kend kmore = lexTokenP (\tk -> case tk of { + AgTok_EOF -> kend tk; + _ -> kmore (happyTerminalToTok tk) tk }) +{-# INLINE happyLex #-} + +happyNewToken action sts stk = happyLex (\tk -> happyDoAction 11# tk action sts stk) (\i tk -> happyDoAction i tk action sts stk) + +happyReport 11# = happyReport' +happyReport _ = happyReport' + + +happyThen :: () => (P a) -> (a -> (P b)) -> (P b) happyThen = (Prelude.>>=) -happyReturn :: () => a -> P a +happyReturn :: () => a -> (P a) happyReturn = (Prelude.return) happyParse :: () => Happy_GHC_Exts.Int# -> P (HappyAbsSyn ) -happyNewToken :: () => Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyNewToken :: () => Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) -happyDoAction :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyDoAction :: () => Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) -happyReduceArr :: () => Happy_Data_Array.Array Prelude.Int (Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn )) +happyReduceArr :: () => Happy_Data_Array.Array Prelude.Int (Happy_GHC_Exts.Int# -> AgToken -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn ))) happyThen1 :: () => P a -> (a -> P b) -> P b happyThen1 = happyThen -happyReturn1 :: () => a -> P a +happyFmap1 f m = happyThen m (\a -> happyReturn (f a)) +happyReturn1 :: () => a -> (P a) happyReturn1 = happyReturn -happyError' :: () => ((AgToken), [Prelude.String]) -> P a -happyError' tk = (\(tokens, explist) -> happyError) tk +happyReport' :: () => (AgToken) -> [Prelude.String] -> (P a) -> (P a) +happyReport' = (\tokens expected resume -> happyError) + +happyAbort :: () => (P a) +happyAbort = error "Called abort handler in non-resumptive parser" + agParser = happySomeParser where - happySomeParser = happyThen (happyParse 0#) (\x -> happyReturn (let {(HappyWrap4 x') = happyOut4 x} in x')) + happySomeParser = happyThen (happyParse 0#) (\x -> happyReturn (let {(HappyWrap5 x') = happyOut5 x} in x')) happySeq = happyDontSeq happyError :: P a happyError = failP (\l -> show l ++ ": Parse error\n") -{-# LINE 1 "templates/GenericTemplate.hs" #-} +#define HAPPY_COERCE 1 -- $Id: GenericTemplate.hs,v 1.26 2005/01/14 14:47:22 simonmar Exp $ - - - - - - - - - - - +#if !defined(__GLASGOW_HASKELL__) +# error This code isn't being built with GHC. +#endif -- Do not remove this comment. Required to fix CPP parsing when using GCC and a clang-compiled alex. #if __GLASGOW_HASKELL__ > 706 -#define LT(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.<# m)) :: Prelude.Bool) -#define GTE(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.>=# m)) :: Prelude.Bool) -#define EQ(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.==# m)) :: Prelude.Bool) +# define LT(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.<# m)) :: Prelude.Bool) +# define GTE(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.>=# m)) :: Prelude.Bool) +# define EQ(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.==# m)) :: Prelude.Bool) #else -#define LT(n,m) (n Happy_GHC_Exts.<# m) -#define GTE(n,m) (n Happy_GHC_Exts.>=# m) -#define EQ(n,m) (n Happy_GHC_Exts.==# m) +# define LT(n,m) (n Happy_GHC_Exts.<# m) +# define GTE(n,m) (n Happy_GHC_Exts.>=# m) +# define EQ(n,m) (n Happy_GHC_Exts.==# m) #endif +#define PLUS(n,m) (n Happy_GHC_Exts.+# m) +#define MINUS(n,m) (n Happy_GHC_Exts.-# m) +#define TIMES(n,m) (n Happy_GHC_Exts.*# m) +#define NEGATE(n) (Happy_GHC_Exts.negateInt# (n)) +type Happy_Int = Happy_GHC_Exts.Int# +data Happy_IntList = HappyCons Happy_Int Happy_IntList +#define ERROR_TOK 0# +#define CATCH_TOK 1# +#if defined(HAPPY_COERCE) +# define GET_ERROR_TOKEN(x) (case Happy_GHC_Exts.unsafeCoerce# x of { (Happy_GHC_Exts.I# i) -> i }) +# define MK_ERROR_TOKEN(i) (Happy_GHC_Exts.unsafeCoerce# (Happy_GHC_Exts.I# i)) +# define MK_TOKEN(x) (happyInTok (x)) +#else +# define GET_ERROR_TOKEN(x) (case x of { HappyErrorToken (Happy_GHC_Exts.I# i) -> i }) +# define MK_ERROR_TOKEN(i) (HappyErrorToken (Happy_GHC_Exts.I# i)) +# define MK_TOKEN(x) (HappyTerminal (x)) +#endif - - - - - - - - - - - - - - - -data Happy_IntList = HappyCons Happy_GHC_Exts.Int# Happy_IntList - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +#if defined(HAPPY_DEBUG) +# define DEBUG_TRACE(s) (happyTrace (s)) $ +happyTrace string expr = Happy_System_IO_Unsafe.unsafePerformIO $ do + Happy_System_IO.hPutStr Happy_System_IO.stderr string + return expr +#else +# define DEBUG_TRACE(s) {- nothing -} +#endif infixr 9 `HappyStk` data HappyStk a = HappyStk a (HappyStk a) @@ -500,197 +471,309 @@ happyParse start_state = happyNewToken start_state notHappyAtAll notHappyAtAll -- If the current token is ERROR_TOK, it means we've just accepted a partial -- parse (a %partial parser). We must ignore the saved token on the top of -- the stack in this case. -happyAccept 0# tk st sts (_ `HappyStk` ans `HappyStk` _) = +happyAccept ERROR_TOK tk st sts (_ `HappyStk` ans `HappyStk` _) = happyReturn1 ans -happyAccept j tk st sts (HappyStk ans _) = +happyAccept j tk st sts (HappyStk ans _) = (happyTcHack j (happyTcHack st)) (happyReturn1 ans) ----------------------------------------------------------------------------- -- Arrays only: do the next action - - -happyDoAction i tk st - = {- nothing -} - case action of - 0# -> {- nothing -} - happyFail (happyExpListPerState ((Happy_GHC_Exts.I# (st)) :: Prelude.Int)) i tk st - -1# -> {- nothing -} - happyAccept i tk st - n | LT(n,(0# :: Happy_GHC_Exts.Int#)) -> {- nothing -} - (happyReduceArr Happy_Data_Array.! rule) i tk st - where rule = (Happy_GHC_Exts.I# ((Happy_GHC_Exts.negateInt# ((n Happy_GHC_Exts.+# (1# :: Happy_GHC_Exts.Int#)))))) - n -> {- nothing -} - happyShift new_state i tk st - where new_state = (n Happy_GHC_Exts.-# (1# :: Happy_GHC_Exts.Int#)) - where off = happyAdjustOffset (indexShortOffAddr happyActOffsets st) - off_i = (off Happy_GHC_Exts.+# i) - check = if GTE(off_i,(0# :: Happy_GHC_Exts.Int#)) - then EQ(indexShortOffAddr happyCheck off_i, i) - else Prelude.False - action - | check = indexShortOffAddr happyTable off_i - | Prelude.otherwise = indexShortOffAddr happyDefActions st - - - - -indexShortOffAddr (HappyA# arr) off = - Happy_GHC_Exts.narrow16Int# i +happyDoAction i tk st = + DEBUG_TRACE("state: " ++ show (Happy_GHC_Exts.I# st) ++ + ",\ttoken: " ++ show (Happy_GHC_Exts.I# i) ++ + ",\taction: ") + case happyDecodeAction (happyNextAction i st) of + HappyFail -> DEBUG_TRACE("failing.\n") + happyFail i tk st + HappyAccept -> DEBUG_TRACE("accept.\n") + happyAccept i tk st + HappyReduce rule -> DEBUG_TRACE("reduce (rule " ++ show (Happy_GHC_Exts.I# rule) ++ ")") + (happyReduceArr Happy_Data_Array.! (Happy_GHC_Exts.I# rule)) i tk st + HappyShift new_state -> DEBUG_TRACE("shift, enter state " ++ show (Happy_GHC_Exts.I# new_state) ++ "\n") + happyShift new_state i tk st + +{-# INLINE happyNextAction #-} +happyNextAction i st = case happyIndexActionTable i st of + Just (Happy_GHC_Exts.I# act) -> act + Nothing -> happyIndexOffAddr happyDefActions st + +{-# INLINE happyIndexActionTable #-} +happyIndexActionTable i st + | GTE(off, 0#), EQ(happyIndexOffAddr happyCheck off, i) + = Prelude.Just (Happy_GHC_Exts.I# (happyIndexOffAddr happyTable off)) + | otherwise + = Prelude.Nothing where - i = Happy_GHC_Exts.word2Int# (Happy_GHC_Exts.or# (Happy_GHC_Exts.uncheckedShiftL# high 8#) low) - high = Happy_GHC_Exts.int2Word# (Happy_GHC_Exts.ord# (Happy_GHC_Exts.indexCharOffAddr# arr (off' Happy_GHC_Exts.+# 1#))) - low = Happy_GHC_Exts.int2Word# (Happy_GHC_Exts.ord# (Happy_GHC_Exts.indexCharOffAddr# arr off')) - off' = off Happy_GHC_Exts.*# 2# - + off = PLUS(happyIndexOffAddr happyActOffsets st, i) + +data HappyAction + = HappyFail + | HappyAccept + | HappyReduce Happy_Int -- rule number + | HappyShift Happy_Int -- new state + +{-# INLINE happyDecodeAction #-} +happyDecodeAction :: Happy_Int -> HappyAction +happyDecodeAction 0# = HappyFail +happyDecodeAction -1# = HappyAccept +happyDecodeAction action | LT(action, 0#) = HappyReduce NEGATE(PLUS(action, 1#)) + | otherwise = HappyShift MINUS(action, 1#) + +{-# INLINE happyIndexGotoTable #-} +happyIndexGotoTable nt st = happyIndexOffAddr happyTable off + where + off = PLUS(happyIndexOffAddr happyGotoOffsets st, nt) +{-# INLINE happyIndexOffAddr #-} +happyIndexOffAddr :: HappyAddr -> Happy_Int -> Happy_Int +happyIndexOffAddr (HappyA# arr) off = +#ifdef WORDS_BIGENDIAN + Happy_GHC_Exts.narrow32Int# i + where + i = Happy_GHC_Exts.word2Int# ((b3 `Happy_GHC_Exts.uncheckedShiftL#` 24#) `Happy_GHC_Exts.or#` + (b2 `Happy_GHC_Exts.uncheckedShiftL#` 16#) `Happy_GHC_Exts.or#` + (b1 `Happy_GHC_Exts.uncheckedShiftL#` 8#) `Happy_GHC_Exts.or#` b0) + b3 = Happy_GHC_Exts.int2Word# (Happy_GHC_Exts.ord# (Happy_GHC_Exts.indexCharOffAddr# arr PLUS(off', 3#))) + b2 = Happy_GHC_Exts.int2Word# (Happy_GHC_Exts.ord# (Happy_GHC_Exts.indexCharOffAddr# arr PLUS(off', 2#))) + b1 = Happy_GHC_Exts.int2Word# (Happy_GHC_Exts.ord# (Happy_GHC_Exts.indexCharOffAddr# arr PLUS(off', 1#))) + b0 = Happy_GHC_Exts.int2Word# (Happy_GHC_Exts.ord# (Happy_GHC_Exts.indexCharOffAddr# arr off')) + off' = TIMES(off, 4#) +#else +#if __GLASGOW_HASKELL__ >= 901 + Happy_GHC_Exts.int32ToInt# +#endif + (Happy_GHC_Exts.indexInt32OffAddr# arr off) +#endif +indexRuleArr arr r = (Happy_GHC_Exts.I# nt, Happy_GHC_Exts.I# len) + where + (Happy_GHC_Exts.I# n_starts) = happy_n_starts + offs = TIMES(MINUS(r,n_starts),2#) + nt = happyIndexOffAddr arr offs + len = happyIndexOffAddr arr PLUS(offs,1#) {-# INLINE happyLt #-} happyLt x y = LT(x,y) - readArrayBit arr bit = - Bits.testBit (Happy_GHC_Exts.I# (indexShortOffAddr arr ((unbox_int bit) `Happy_GHC_Exts.iShiftRA#` 4#))) (bit `Prelude.mod` 16) + Bits.testBit (Happy_GHC_Exts.I# (happyIndexOffAddr arr ((unbox_int bit) `Happy_GHC_Exts.iShiftRA#` 5#))) (bit `Prelude.mod` 32) where unbox_int (Happy_GHC_Exts.I# x) = x - - - - - data HappyAddr = HappyA# Happy_GHC_Exts.Addr# - ------------------------------------------------------------------------------ --- HappyState data type (not arrays) - - - - - - - - - - - - - ----------------------------------------------------------------------------- -- Shifting a token -happyShift new_state 0# tk st sts stk@(x `HappyStk` _) = - let i = (case Happy_GHC_Exts.unsafeCoerce# x of { (Happy_GHC_Exts.I# (i)) -> i }) in --- trace "shifting the error token" $ - happyDoAction i tk new_state (HappyCons (st) (sts)) (stk) +happyShift new_state ERROR_TOK tk st sts stk@(x `HappyStk` _) = + let i = GET_ERROR_TOKEN(x) in + DEBUG_TRACE("shifting the error token") + happyDoAction i tk new_state (HappyCons st sts) stk +-- TODO: When `i` would enter error recovery again, we should instead +-- discard input until the lookahead is acceptable. Perhaps this is +-- simplest to implement in CodeGen for productions using `error`; +-- there we know the context and can implement local shift+discard actions. +-- still need to remember parser-defined error site, though. happyShift new_state i tk st sts stk = - happyNewToken new_state (HappyCons (st) (sts)) ((happyInTok (tk))`HappyStk`stk) + happyNewToken new_state (HappyCons st sts) (MK_TOKEN(tk) `HappyStk` stk) -- happyReduce is specialised for the common cases. -happySpecReduce_0 i fn 0# tk st sts stk - = happyFail [] 0# tk st sts stk -happySpecReduce_0 nt fn j tk st@((action)) sts stk - = happyGoto nt j tk st (HappyCons (st) (sts)) (fn `HappyStk` stk) +happySpecReduce_0 nt fn j tk st sts stk + = happySeq fn (happyGoto nt j tk st (HappyCons st sts) (fn `HappyStk` stk)) -happySpecReduce_1 i fn 0# tk st sts stk - = happyFail [] 0# tk st sts stk -happySpecReduce_1 nt fn j tk _ sts@((HappyCons (st@(action)) (_))) (v1`HappyStk`stk') +happySpecReduce_1 nt fn j tk old_st sts@(HappyCons st _) (v1 `HappyStk` stk') = let r = fn v1 in - happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')) + happyTcHack old_st (happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk'))) -happySpecReduce_2 i fn 0# tk st sts stk - = happyFail [] 0# tk st sts stk -happySpecReduce_2 nt fn j tk _ (HappyCons (_) (sts@((HappyCons (st@(action)) (_))))) (v1`HappyStk`v2`HappyStk`stk') +happySpecReduce_2 nt fn j tk old_st + (HappyCons _ sts@(HappyCons st _)) + (v1 `HappyStk` v2 `HappyStk` stk') = let r = fn v1 v2 in - happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')) + happyTcHack old_st (happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk'))) -happySpecReduce_3 i fn 0# tk st sts stk - = happyFail [] 0# tk st sts stk -happySpecReduce_3 nt fn j tk _ (HappyCons (_) ((HappyCons (_) (sts@((HappyCons (st@(action)) (_))))))) (v1`HappyStk`v2`HappyStk`v3`HappyStk`stk') +happySpecReduce_3 nt fn j tk old_st + (HappyCons _ (HappyCons _ sts@(HappyCons st _))) + (v1 `HappyStk` v2 `HappyStk` v3 `HappyStk` stk') = let r = fn v1 v2 v3 in - happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')) + happyTcHack old_st (happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk'))) -happyReduce k i fn 0# tk st sts stk - = happyFail [] 0# tk st sts stk happyReduce k nt fn j tk st sts stk - = case happyDrop (k Happy_GHC_Exts.-# (1# :: Happy_GHC_Exts.Int#)) sts of - sts1@((HappyCons (st1@(action)) (_))) -> - let r = fn stk in -- it doesn't hurt to always seq here... + = case happyDrop k (HappyCons st sts) of + sts1@(HappyCons st1 _) -> + let r = fn stk in -- it doesn't hurt to always seq here... happyDoSeq r (happyGoto nt j tk st1 sts1 r) -happyMonadReduce k nt fn 0# tk st sts stk - = happyFail [] 0# tk st sts stk happyMonadReduce k nt fn j tk st sts stk = - case happyDrop k (HappyCons (st) (sts)) of - sts1@((HappyCons (st1@(action)) (_))) -> + case happyDrop k (HappyCons st sts) of + sts1@(HappyCons st1 _) -> let drop_stk = happyDropStk k stk in - happyThen1 (fn stk tk) (\r -> happyGoto nt j tk st1 sts1 (r `HappyStk` drop_stk)) + happyThen1 (fn stk tk) + (\r -> happyGoto nt j tk st1 sts1 (r `HappyStk` drop_stk)) -happyMonad2Reduce k nt fn 0# tk st sts stk - = happyFail [] 0# tk st sts stk happyMonad2Reduce k nt fn j tk st sts stk = - case happyDrop k (HappyCons (st) (sts)) of - sts1@((HappyCons (st1@(action)) (_))) -> - let drop_stk = happyDropStk k stk - - off = happyAdjustOffset (indexShortOffAddr happyGotoOffsets st1) - off_i = (off Happy_GHC_Exts.+# nt) - new_state = indexShortOffAddr happyTable off_i - - - - + j `happyTcHack` case happyDrop k (HappyCons st sts) of + sts1@(HappyCons st1 _) -> + let drop_stk = happyDropStk k stk + new_state = happyIndexGotoTable nt st1 in - happyThen1 (fn stk tk) (\r -> happyNewToken new_state sts1 (r `HappyStk` drop_stk)) + happyThen1 (fn stk tk) + (\r -> happyNewToken new_state sts1 (r `HappyStk` drop_stk)) -happyDrop 0# l = l -happyDrop n (HappyCons (_) (t)) = happyDrop (n Happy_GHC_Exts.-# (1# :: Happy_GHC_Exts.Int#)) t +happyDrop 0# l = l +happyDrop n (HappyCons _ t) = happyDrop MINUS(n,(1# :: Happy_Int)) t -happyDropStk 0# l = l -happyDropStk n (x `HappyStk` xs) = happyDropStk (n Happy_GHC_Exts.-# (1#::Happy_GHC_Exts.Int#)) xs +happyDropStk 0# l = l +happyDropStk n (x `HappyStk` xs) = happyDropStk MINUS(n,(1#::Happy_Int)) xs ----------------------------------------------------------------------------- -- Moving to a new state after a reduction - -happyGoto nt j tk st = - {- nothing -} +happyGoto nt j tk st = + DEBUG_TRACE(", goto state " ++ show (Happy_GHC_Exts.I# new_state) ++ "\n") happyDoAction j tk new_state - where off = happyAdjustOffset (indexShortOffAddr happyGotoOffsets st) - off_i = (off Happy_GHC_Exts.+# nt) - new_state = indexShortOffAddr happyTable off_i - - - + where new_state = happyIndexGotoTable nt st ----------------------------------------------------------------------------- --- Error recovery (ERROR_TOK is the error token) - --- parse error if we are in recovery and we fail again -happyFail explist 0# tk old_st _ stk@(x `HappyStk` _) = - let i = (case Happy_GHC_Exts.unsafeCoerce# x of { (Happy_GHC_Exts.I# (i)) -> i }) in --- trace "failing" $ - happyError_ explist i tk - -{- We don't need state discarding for our restricted implementation of - "error". In fact, it can cause some bogus parses, so I've disabled it - for now --SDM - --- discard a state -happyFail ERROR_TOK tk old_st CONS(HAPPYSTATE(action),sts) - (saved_tok `HappyStk` _ `HappyStk` stk) = --- trace ("discarding state, depth " ++ show (length stk)) $ - DO_ACTION(action,ERROR_TOK,tk,sts,(saved_tok`HappyStk`stk)) --} - --- Enter error recovery: generate an error token, --- save the old token and carry on. -happyFail explist i tk (action) sts stk = --- trace "entering error recovery" $ - happyDoAction 0# tk action sts ((Happy_GHC_Exts.unsafeCoerce# (Happy_GHC_Exts.I# (i))) `HappyStk` stk) +-- Error recovery +-- +-- When there is no applicable action for the current lookahead token `tk`, +-- happy enters error recovery mode. It works in 2 phases: +-- +-- 1. Fixup: Try to see if there is an action for the error token (`errorTok`, +-- which is ERROR_TOK). If there is, do *not* emit an error and pretend +-- instead that an `errorTok` was inserted. +-- When there is no `errorTok` action, call the error handler +-- (e.g., `happyError`) with the resumption continuation `happyResume`. +-- 2. Error resumption mode: If the error handler wants to resume parsing in +-- order to report multiple parse errors, it will call the resumption +-- continuation (of result type `P (Maybe a)`). +-- In the absence of the %resumptive declaration, this resumption will +-- always (do a bit of work, and) `return Nothing`. +-- In the presence of the %resumptive declaration, the grammar author +-- can use the special `catch` terminal to declare where parsing should +-- resume after an error. +-- E.g., if `stmt : expr ';' | catch ';'` then the resumption will +-- +-- (a) Pop off the state stack until it finds an item +-- `stmt -> . catch ';'`. +-- Then, it will push a `catchTok` onto the stack, perform a shift and +-- end up in item `stmt -> catch . ';'`. +-- (b) Discard tokens from the lexer until it finds ';'. +-- (In general, it will discard until the lookahead has a non-default +-- action in the matches a token that applies +-- in the situation `P -> α catch . β`, where β might empty.) +-- +-- The `catch` resumption mechanism (2) is what usually is associated with +-- `error` in `bison` or `menhir`. Since `error` is used for the Fixup mechanism +-- (1) above, we call the corresponding token `catch`. + +-- Enter error Fixup: generate an error token, +-- save the old token and carry on. +-- When a `happyShift` accepts, we will pop off the error +-- token to resume parsing with the current lookahead `i`. +happyTryFixup i tk action sts stk = + DEBUG_TRACE("entering `error` fixup.\n") + happyDoAction ERROR_TOK tk action sts (MK_ERROR_TOKEN(i) `HappyStk` stk) + -- NB: `happyShift` will simply pop the error token and carry on with + -- `tk`. Hence we don't change `tk` in the call here + +-- parse error if we are in fixup and fail again +happyFixupFailed tk st sts (x `HappyStk` stk) = + let i = GET_ERROR_TOKEN(x) in + DEBUG_TRACE("`error` fixup failed.\n") + let resume = happyResume i tk st sts stk + expected = map happyTokenToString (happyExpectedTokens st sts) in + if happyAlreadyInResumption st sts + then resume + else happyReport i tk expected resume + +happyAlreadyInResumption st sts + | (Happy_GHC_Exts.I# n_starts) <- happy_n_starts, LT(st, n_starts) + = False -- end of the stack + | (Happy_GHC_Exts.I# st) `elem` happyCatchStates + = True + | HappyCons st1 sts1 <- sts + = happyAlreadyInResumption st1 sts1 + +happyFail ERROR_TOK = happyFixupFailed +happyFail i = happyTryFixup i + +happyResume i tk st sts stk = pop_items st sts stk + where + pop_items st sts stk + | HappyShift new_state <- happyDecodeAction (happyNextAction CATCH_TOK st) + = DEBUG_TRACE("shifting catch token " ++ show (Happy_GHC_Exts.I# st) + ++ " -> " ++ show (Happy_GHC_Exts.I# new_state) ++ "\n") + discard_input_until_exp i tk new_state (HappyCons st sts) (MK_ERROR_TOKEN(i) `HappyStk` stk) + | DEBUG_TRACE("can't shift catch in " ++ show (Happy_GHC_Exts.I# st) ++ ", ") True + , (Happy_GHC_Exts.I# n_starts) <- happy_n_starts, LT(st, n_starts) + = DEBUG_TRACE("because it is a start state. no resumption.\n") + happyAbort + | (HappyCons st1 sts1) <- sts, _ `HappyStk` stk1 <- stk + = DEBUG_TRACE("discarding.\n") + pop_items st1 sts1 stk1 + discard_input_until_exp i tk st sts stk + | ultimately_fails i st sts + = DEBUG_TRACE("discard token in state " ++ show (Happy_GHC_Exts.I# st) + ++ ": " ++ show (Happy_GHC_Exts.I# i) ++ "\n") + happyLex (\_eof_tk -> happyAbort) + (\i tk -> discard_input_until_exp i tk st sts stk) -- not eof + | otherwise + = DEBUG_TRACE("found expected token in state " ++ show (Happy_GHC_Exts.I# st) + ++ ": " ++ show (Happy_GHC_Exts.I# i) ++ "\n") + (happyDoAction i tk st sts stk) + + ultimately_fails i st sts = + DEBUG_TRACE("trying token " ++ show (Happy_GHC_Exts.I# i) + ++ " in state " ++ show (Happy_GHC_Exts.I# st) ++ ": ") + case happyDecodeAction (happyNextAction i st) of + HappyFail -> DEBUG_TRACE("fail.\n") True + HappyAccept -> DEBUG_TRACE("accept.\n") False + HappyShift _ -> DEBUG_TRACE("shift.\n") False + HappyReduce r -> case happySimulateReduce r st sts of + HappyCons st1 sts1 -> ultimately_fails i st1 sts1 + +happySimulateReduce r st sts = + DEBUG_TRACE("simulate reduction of rule " ++ show r ++ ", ") + let (Happy_GHC_Exts.I# nt, Happy_GHC_Exts.I# len) = indexRuleArr happyRuleArr r in + DEBUG_TRACE("nt " ++ show (Happy_GHC_Exts.I# nt) ++ ", len: " + ++ show (Happy_GHC_Exts.I# len) ++ ", new_st ") + let sts1@(HappyCons st1 _) = happyDrop len (HappyCons st sts) + new_st = happyIndexGotoTable nt st1 in + DEBUG_TRACE(show (Happy_GHC_Exts.I# new_st) ++ ".\n") + HappyCons new_st sts1 + +happyTokenToString i = happyTokenStrings Prelude.!! (i Prelude.- 2) +happyExpectedTokens st sts = + DEBUG_TRACE("constructing expected tokens.\n") + search_shifts st sts [] + where + search_shifts st sts shifts = foldr (add_action st sts) shifts (distinct_actions st) + add_action st sts (Happy_GHC_Exts.I# i, Happy_GHC_Exts.I# act) shifts = + DEBUG_TRACE("found action in state " ++ show (Happy_GHC_Exts.I# st) + ++ ", input " ++ show (Happy_GHC_Exts.I# i) ++ ", " + ++ show (happyDecodeAction act) ++ "\n") + case happyDecodeAction act of + HappyFail -> shifts + HappyAccept -> shifts -- This would always be %eof or error... Not helpful + HappyShift _ -> Happy_Data_List.insert (Happy_GHC_Exts.I# i) shifts + HappyReduce r -> case happySimulateReduce r st sts of + HappyCons st1 sts1 -> search_shifts st1 sts1 shifts + distinct_actions st + = ((-1), Happy_GHC_Exts.I# (happyIndexOffAddr happyDefActions st)) + : [ (i, act) | i <- [begin_i..happy_n_terms], act <- get_act row_off i ] + where + row_off = happyIndexOffAddr happyActOffsets st + begin_i = 2 -- +2: errorTok,catchTok + get_act off (Happy_GHC_Exts.I# i) + | let off_i = PLUS(off,i) + , GTE(off_i,0#) + , EQ(happyIndexOffAddr happyCheck off_i,i) + = [Happy_GHC_Exts.I# (happyIndexOffAddr happyTable off_i)] + | otherwise + = [] -- Internal happy errors: @@ -700,14 +783,12 @@ notHappyAtAll = Prelude.error "Internal Happy error\n" ----------------------------------------------------------------------------- -- Hack to get the typechecker to accept our action functions - -happyTcHack :: Happy_GHC_Exts.Int# -> a -> a +happyTcHack :: Happy_Int -> a -> a happyTcHack x y = y {-# INLINE happyTcHack #-} - ----------------------------------------------------------------------------- --- Seq-ing. If the --strict flag is given, then Happy emits +-- Seq-ing. If the --strict flag is given, then Happy emits -- happySeq = happyDoSeq -- otherwise it emits -- happySeq = happyDontSeq @@ -721,7 +802,6 @@ happyDontSeq a b = b -- of deciding to inline happyGoto everywhere, which increases the size of -- the generated parser quite a bit. - {-# NOINLINE happyDoAction #-} {-# NOINLINE happyTable #-} {-# NOINLINE happyCheck #-} diff --git a/packages/frontend/src/Happy/Frontend/Lexer.lhs b/packages/frontend/src/Happy/Frontend/Lexer.lhs index 8bfe65bd..365ce50c 100644 --- a/packages/frontend/src/Happy/Frontend/Lexer.lhs +++ b/packages/frontend/src/Happy/Frontend/Lexer.lhs @@ -37,7 +37,6 @@ The lexer. > | TokSpecId_Token -- %token > | TokSpecId_Name -- %name > | TokSpecId_Partial -- %partial -> | TokSpecId_ErrorHandlerType -- %errorhandlertype > | TokSpecId_Lexer -- %lexer > | TokSpecId_ImportedIdentity -- %importedidentity > | TokSpecId_Monad -- %monad @@ -48,6 +47,7 @@ The lexer. > | TokSpecId_Shift -- %shift > | TokSpecId_Expect -- %expect > | TokSpecId_Error -- %error +> | TokSpecId_ErrorExpected -- %error.expected > | TokSpecId_Attributetype -- %attributetype > | TokSpecId_Attribute -- %attribute > | TokCodeQuote -- stuff inside { .. } @@ -103,42 +103,45 @@ followed by a special identifier. > lexPercent :: (Token -> Pfunc a) -> [Char] -> Int -> ParseResult a > lexPercent cont s = case s of > '%':rest -> cont (TokenKW TokDoublePercent) rest -> 't':'o':'k':'e':'n':'t':'y':'p':'e':rest -> +> 't':'o':'k':'e':'n':'t':'y':'p':'e':rest | end_of_id rest -> > cont (TokenKW TokSpecId_TokenType) rest -> 't':'o':'k':'e':'n':rest -> +> 't':'o':'k':'e':'n':rest | end_of_id rest -> > cont (TokenKW TokSpecId_Token) rest -> 'n':'a':'m':'e':rest -> +> 'n':'a':'m':'e':rest | end_of_id rest -> > cont (TokenKW TokSpecId_Name) rest -> 'p':'a':'r':'t':'i':'a':'l':rest -> +> 'p':'a':'r':'t':'i':'a':'l':rest | end_of_id rest -> > cont (TokenKW TokSpecId_Partial) rest -> 'i':'m':'p':'o':'r':'t':'e':'d':'i':'d':'e':'n':'t':'i':'t':'y':rest -> +> 'i':'m':'p':'o':'r':'t':'e':'d':'i':'d':'e':'n':'t':'i':'t':'y':rest | end_of_id rest -> > cont (TokenKW TokSpecId_ImportedIdentity) rest -> 'm':'o':'n':'a':'d':rest -> +> 'm':'o':'n':'a':'d':rest | end_of_id rest -> > cont (TokenKW TokSpecId_Monad) rest -> 'l':'e':'x':'e':'r':rest -> +> 'l':'e':'x':'e':'r':rest | end_of_id rest -> > cont (TokenKW TokSpecId_Lexer) rest -> 'n':'o':'n':'a':'s':'s':'o':'c':rest -> +> 'n':'o':'n':'a':'s':'s':'o':'c':rest | end_of_id rest -> > cont (TokenKW TokSpecId_Nonassoc) rest -> 'l':'e':'f':'t':rest -> +> 'l':'e':'f':'t':rest | end_of_id rest -> > cont (TokenKW TokSpecId_Left) rest -> 'r':'i':'g':'h':'t':rest -> +> 'r':'i':'g':'h':'t':rest | end_of_id rest -> > cont (TokenKW TokSpecId_Right) rest -> 'p':'r':'e':'c':rest -> +> 'p':'r':'e':'c':rest | end_of_id rest -> > cont (TokenKW TokSpecId_Prec) rest -> 's':'h':'i':'f':'t':rest -> +> 's':'h':'i':'f':'t':rest | end_of_id rest -> > cont (TokenKW TokSpecId_Shift) rest -> 'e':'x':'p':'e':'c':'t':rest -> +> 'e':'x':'p':'e':'c':'t':rest | end_of_id rest -> > cont (TokenKW TokSpecId_Expect) rest -> 'e':'r':'r':'o':'r':'h':'a':'n':'d':'l':'e':'r':'t':'y':'p':'e':rest -> -> cont (TokenKW TokSpecId_ErrorHandlerType) rest -> 'e':'r':'r':'o':'r':rest -> +> 'e':'r':'r':'o':'r':'.':'e':'x':'p':'e':'c':'t':'e':'d':rest | end_of_id rest -> +> cont (TokenKW TokSpecId_ErrorExpected) rest +> 'e':'r':'r':'o':'r':rest | end_of_id rest -> > cont (TokenKW TokSpecId_Error) rest -> 'a':'t':'t':'r':'i':'b':'u':'t':'e':'t':'y':'p':'e':rest -> +> 'a':'t':'t':'r':'i':'b':'u':'t':'e':'t':'y':'p':'e':rest | end_of_id rest -> > cont (TokenKW TokSpecId_Attributetype) rest -> 'a':'t':'t':'r':'i':'b':'u':'t':'e':rest -> +> 'a':'t':'t':'r':'i':'b':'u':'t':'e':rest | end_of_id rest -> > cont (TokenKW TokSpecId_Attribute) rest > _ -> lexError ("unrecognised directive: %" ++ > takeWhile (not.isSpace) s) s +> where +> end_of_id (c:_) = not (isAlphaNum c) +> end_of_id [] = True > lexColon :: (Token -> Pfunc a) -> [Char] -> Int -> ParseResult a > lexColon cont (':':rest) = cont (TokenKW TokDoubleColon) rest @@ -177,24 +180,24 @@ here is a bit tricky, but should work in most cases. > -> ParseResult b > lexReadCode s n c = case s of > '\n':r -> \cont l -> lexReadCode r n ('\n':c) cont (l+1) -> + > '{' :r -> lexReadCode r (n+1) ('{':c) -> + > '}' :r > | n == 0 -> \cont -> cont (TokenInfo ( > cleanupCode (reverse c)) TokCodeQuote) r > | otherwise -> lexReadCode r (n-1) ('}':c) -> + > '"'{-"-}:r -> lexReadString r (\ str r' -> > lexReadCode r' n ('"' : (reverse str) ++ '"' : c)) -> + > a: '\'':r | isAlphaNum a -> lexReadCode r n ('\'':a:c) -> + > '\'' :r -> lexReadSingleChar r (\ str r' -> > lexReadCode r' n ((reverse str) ++ '\'' : c)) -> + > ch:r -> lexReadCode r n (ch:c) -> + > [] -> \_cont -> lexError "No closing '}' in code segment" [] ---------------------------------------------------------------------------- diff --git a/packages/frontend/src/Happy/Frontend/Mangler.lhs b/packages/frontend/src/Happy/Frontend/Mangler.lhs index c1108d2a..239f8674 100644 --- a/packages/frontend/src/Happy/Frontend/Mangler.lhs +++ b/packages/frontend/src/Happy/Frontend/Mangler.lhs @@ -12,7 +12,11 @@ Mangler converts AbsSyn to Grammar > import Happy.Grammar > import Happy.Frontend.AbsSyn > import Happy.Frontend.Mangler.Monad + +This is only supported in the bootstrapped version +#ifdef HAPPY_BOOTSTRAP > import Happy.Frontend.AttrGrammar.Mangler +#endif > import Happy.Frontend.ParamRules @@ -65,25 +69,32 @@ This bit is a real mess, mainly because of the error message support. > starts' = case getParserNames dirs of > [] -> [TokenName "happyParse" Nothing False] > ns -> ns +> error_resumptive | ResumptiveErrorHandler{} <- getError dirs = True +> | otherwise = False > + > start_strs = [ startName++'_':p | (TokenName p _ _) <- starts' ] Build up a mapping from name values to strings. > name_env = (errorTok, errorName) : +> (catchTok, catchName) : > (dummyTok, dummyName) : > zip start_names start_strs ++ > zip nonterm_names nonterm_strs ++ > zip terminal_names terminal_strs > lookupName :: String -> [Name] -> lookupName n = [ t | (t,r) <- name_env, r == n ] +> lookupName n = [ t | (t,r) <- name_env, r == n +> , t /= catchTok || error_resumptive ] +> -- hide catchName unless %errorresumptive is active +> -- issue93.y uses catch as a nonterminal, we should not steal it > mapToName str' = > case lookupName str' of > [a] -> return a > [] -> do addErr ("unknown identifier '" ++ str' ++ "'") -> return errorTok +> return errorTok -- SG: What a confusing use of errorTok.. Use dummyTok? > (a:_) -> do addErr ("multiple use of '" ++ str' ++ "'") > return a @@ -106,7 +117,7 @@ Start symbols... Deal with priorities... > priodir = zip [1..] (getPrios dirs) -> + > mkPrio :: Int -> Directive a -> Priority > mkPrio i (TokenNonassoc _) = Prio None i > mkPrio i (TokenRight _) = Prio RightAssoc i @@ -129,13 +140,13 @@ Translate the rules from string to name-based. > convNT (Rule1 nt prods ty) > = do nt' <- mapToName nt > return (nt', prods, ty) -> + > attrs = getAttributes dirs > attrType = fromMaybe "HappyAttrs" (getAttributetype dirs) -> + > transRule (nt, prods, _ty) > = mapM (finishRule nt) prods -> + > finishRule :: Name -> Prod1 -> Writer [ErrMsg] Production > finishRule nt (Prod1 lhs code line prec) > = mapWriter (\(a,e) -> (a, map (addLine line) e)) $ do @@ -145,7 +156,7 @@ Translate the rules from string to name-based. > Left s -> do addErr ("Undeclared precedence token: " ++ s) > return (Production nt lhs' code' No) > Right p -> return (Production nt lhs' code' p) -> + > mkPrec :: [Name] -> Prec -> Either String Priority > mkPrec lhs PrecNone = > case filter (flip elem terminal_names) lhs of @@ -157,9 +168,9 @@ Translate the rules from string to name-based. > case lookup s prioByString of > Nothing -> Left s > Just p -> Right p -> + > mkPrec _ PrecShift = Right PrioLowest -> + > -- in > rules1 <- mapM convNT rules @@ -168,7 +179,7 @@ Translate the rules from string to name-based. > let > type_env = [(nt, t) | Rule1 nt _ (Just (t,[])) <- rules] ++ > [(nt, getTokenType dirs) | nt <- terminal_strs] -- XXX: Doesn't handle $$ type! -> + > fixType (ty,s) = go "" ty > where go acc [] = return (reverse acc) > go acc (c:r) | isLower c = -- look for a run of alphanumerics starting with a lower case letter @@ -182,14 +193,14 @@ Translate the rules from string to name-based. > go1 (c:cs) > Just t -> go1 $ "(" ++ t ++ ")" > | otherwise = go (c:acc) r -> + > convType (nm, t) > = do t' <- fixType t > return (nm, t') -> + > -- in > tys <- mapM convType [ (nm, t) | (nm, _, Just t) <- rules1 ] -> + > let > type_array :: Array Int (Maybe String) @@ -215,7 +226,7 @@ Get the token specs in terms of Names. > lookup_prods :: Name -> [Int] > lookup_prods x | x >= firstStartTok && x < first_t = arr ! x > lookup_prods _ = error "lookup_prods" -> + > productions' = start_prods ++ concat rules2 > prod_array = listArray (0,length productions' - 1) productions' > -- in @@ -225,7 +236,7 @@ Get the token specs in terms of Names. > lookupProdNo = (prod_array !), > lookupProdsOfName = lookup_prods, > token_specs = tokspec, -> terminals = errorTok : terminal_names, +> terminals = errorTok : catchTok : terminal_names, > non_terminals = start_names ++ nonterm_names, > -- INCLUDES the %start tokens > starts = zip4 parser_names start_names start_toks @@ -244,7 +255,7 @@ Get the token specs in terms of Names. > monad = getMonad dirs, > lexer = getLexer dirs, > error_handler = getError dirs, -> error_sig = getErrorHandlerType dirs, +> error_expected = getErrorHandlerExpectedList dirs, > token_type = getTokenType dirs, > expect = getExpect dirs > }) @@ -258,7 +269,7 @@ Gofer-like stuff: > combine [] = [] > combine ((a,b):(c,d):r) | a == c = combine ((a,b++d) : r) > combine (a:r) = a : combine r -> + For combining actions with possible error messages. @@ -287,7 +298,13 @@ So is this. > checkCode :: Int -> [Name] -> [Name] -> String -> [(String,String)] -> M (String,[Int]) > checkCode arity _ _ code [] = doCheckCode arity code +#ifdef HAPPY_BOOTSTRAP > checkCode arity lhs nonterm_names code attrs = rewriteAttributeGrammar arity lhs nonterm_names code attrs +#else +> checkCode arity _ _ code (_:_) = do +> addErr "Attribute grammars are not supported in non-bootstrapped build" +> doCheckCode arity code +#endif ----------------------------------------------------------------------------- -- Check for every $i that i is <= the arity of the rule. @@ -300,7 +317,7 @@ So is this. > where go code acc used = > case code of > [] -> return (reverse acc, used) -> + > '"' :r -> case reads code :: [(String,String)] of > [] -> go r ('"':acc) used > (s,r'):_ -> go r' (reverse (show s) ++ acc) used @@ -309,13 +326,13 @@ So is this. > [] -> go r ('\'':acc) used > (c,r'):_ -> go r' (reverse (show c) ++ acc) used > '\\':'$':r -> go r ('$':acc) used -> + > '$':'>':r -- the "rightmost token" > | arity == 0 -> do addErr "$> in empty rule" > go r acc used > | otherwise -> go r (reverse (mkHappyVar arity) ++ acc) > (arity : used) -> + > '$':r@(i:_) | isDigit i -> > case reads r :: [(Int,String)] of > (j,r'):_ -> diff --git a/packages/frontend/src/Happy/Frontend/Parser.hs b/packages/frontend/src/Happy/Frontend/Parser.hs index a1062944..1be81891 100644 --- a/packages/frontend/src/Happy/Frontend/Parser.hs +++ b/packages/frontend/src/Happy/Frontend/Parser.hs @@ -1,7 +1,11 @@ {-# OPTIONS_GHC -w #-} -{-# OPTIONS -XMagicHash -XBangPatterns -XTypeSynonymInstances -XFlexibleInstances -cpp #-} +{-# LANGUAGE CPP #-} +{-# LANGUAGE MagicHash #-} +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE TypeSynonymInstances #-} +{-# LANGUAGE FlexibleInstances #-} #if __GLASGOW_HASKELL__ >= 710 -{-# OPTIONS_GHC -XPartialTypeSignatures #-} +{-# LANGUAGE PartialTypeSignatures #-} #endif {-# OPTIONS_GHC -w #-} module Happy.Frontend.Parser (ourParser) where @@ -10,12 +14,13 @@ import Happy.Frontend.ParseMonad import Happy.Frontend.AbsSyn import Happy.Frontend.Lexer import qualified Data.Array as Happy_Data_Array +import qualified Data.List as Happy_Data_List import qualified Data.Bits as Bits import qualified GHC.Exts as Happy_GHC_Exts import Control.Applicative(Applicative(..)) import Control.Monad (ap) --- parser produced by Happy Version 1.20.1.1 +-- parser produced by Happy Version 2.0 newtype HappyAbsSyn = HappyAbsSyn HappyAny #if __GLASGOW_HASKELL__ >= 607 @@ -23,36 +28,29 @@ type HappyAny = Happy_GHC_Exts.Any #else type HappyAny = forall a . a #endif -newtype HappyWrap4 = HappyWrap4 (BookendedAbsSyn) -happyIn4 :: (BookendedAbsSyn) -> (HappyAbsSyn ) -happyIn4 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap4 x) -{-# INLINE happyIn4 #-} -happyOut4 :: (HappyAbsSyn ) -> HappyWrap4 -happyOut4 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut4 #-} -newtype HappyWrap5 = HappyWrap5 (AbsSyn) -happyIn5 :: (AbsSyn) -> (HappyAbsSyn ) +newtype HappyWrap5 = HappyWrap5 (BookendedAbsSyn) +happyIn5 :: (BookendedAbsSyn) -> (HappyAbsSyn ) happyIn5 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap5 x) {-# INLINE happyIn5 #-} happyOut5 :: (HappyAbsSyn ) -> HappyWrap5 happyOut5 x = Happy_GHC_Exts.unsafeCoerce# x {-# INLINE happyOut5 #-} -newtype HappyWrap6 = HappyWrap6 ([Rule]) -happyIn6 :: ([Rule]) -> (HappyAbsSyn ) +newtype HappyWrap6 = HappyWrap6 (AbsSyn) +happyIn6 :: (AbsSyn) -> (HappyAbsSyn ) happyIn6 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap6 x) {-# INLINE happyIn6 #-} happyOut6 :: (HappyAbsSyn ) -> HappyWrap6 happyOut6 x = Happy_GHC_Exts.unsafeCoerce# x {-# INLINE happyOut6 #-} -newtype HappyWrap7 = HappyWrap7 (Rule) -happyIn7 :: (Rule) -> (HappyAbsSyn ) +newtype HappyWrap7 = HappyWrap7 ([Rule]) +happyIn7 :: ([Rule]) -> (HappyAbsSyn ) happyIn7 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap7 x) {-# INLINE happyIn7 #-} happyOut7 :: (HappyAbsSyn ) -> HappyWrap7 happyOut7 x = Happy_GHC_Exts.unsafeCoerce# x {-# INLINE happyOut7 #-} -newtype HappyWrap8 = HappyWrap8 ([String]) -happyIn8 :: ([String]) -> (HappyAbsSyn ) +newtype HappyWrap8 = HappyWrap8 (Rule) +happyIn8 :: (Rule) -> (HappyAbsSyn ) happyIn8 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap8 x) {-# INLINE happyIn8 #-} happyOut8 :: (HappyAbsSyn ) -> HappyWrap8 @@ -65,29 +63,29 @@ happyIn9 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap9 x) happyOut9 :: (HappyAbsSyn ) -> HappyWrap9 happyOut9 x = Happy_GHC_Exts.unsafeCoerce# x {-# INLINE happyOut9 #-} -newtype HappyWrap10 = HappyWrap10 ([Prod]) -happyIn10 :: ([Prod]) -> (HappyAbsSyn ) +newtype HappyWrap10 = HappyWrap10 ([String]) +happyIn10 :: ([String]) -> (HappyAbsSyn ) happyIn10 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap10 x) {-# INLINE happyIn10 #-} happyOut10 :: (HappyAbsSyn ) -> HappyWrap10 happyOut10 x = Happy_GHC_Exts.unsafeCoerce# x {-# INLINE happyOut10 #-} -newtype HappyWrap11 = HappyWrap11 (Prod) -happyIn11 :: (Prod) -> (HappyAbsSyn ) +newtype HappyWrap11 = HappyWrap11 ([Prod]) +happyIn11 :: ([Prod]) -> (HappyAbsSyn ) happyIn11 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap11 x) {-# INLINE happyIn11 #-} happyOut11 :: (HappyAbsSyn ) -> HappyWrap11 happyOut11 x = Happy_GHC_Exts.unsafeCoerce# x {-# INLINE happyOut11 #-} -newtype HappyWrap12 = HappyWrap12 (Term) -happyIn12 :: (Term) -> (HappyAbsSyn ) +newtype HappyWrap12 = HappyWrap12 (Prod) +happyIn12 :: (Prod) -> (HappyAbsSyn ) happyIn12 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap12 x) {-# INLINE happyIn12 #-} happyOut12 :: (HappyAbsSyn ) -> HappyWrap12 happyOut12 x = Happy_GHC_Exts.unsafeCoerce# x {-# INLINE happyOut12 #-} -newtype HappyWrap13 = HappyWrap13 ([Term]) -happyIn13 :: ([Term]) -> (HappyAbsSyn ) +newtype HappyWrap13 = HappyWrap13 (Term) +happyIn13 :: (Term) -> (HappyAbsSyn ) happyIn13 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap13 x) {-# INLINE happyIn13 #-} happyOut13 :: (HappyAbsSyn ) -> HappyWrap13 @@ -107,62 +105,69 @@ happyIn15 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap15 x) happyOut15 :: (HappyAbsSyn ) -> HappyWrap15 happyOut15 x = Happy_GHC_Exts.unsafeCoerce# x {-# INLINE happyOut15 #-} -newtype HappyWrap16 = HappyWrap16 (Prec) -happyIn16 :: (Prec) -> (HappyAbsSyn ) +newtype HappyWrap16 = HappyWrap16 ([Term]) +happyIn16 :: ([Term]) -> (HappyAbsSyn ) happyIn16 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap16 x) {-# INLINE happyIn16 #-} happyOut16 :: (HappyAbsSyn ) -> HappyWrap16 happyOut16 x = Happy_GHC_Exts.unsafeCoerce# x {-# INLINE happyOut16 #-} -newtype HappyWrap17 = HappyWrap17 ([Directive String]) -happyIn17 :: ([Directive String]) -> (HappyAbsSyn ) +newtype HappyWrap17 = HappyWrap17 (Prec) +happyIn17 :: (Prec) -> (HappyAbsSyn ) happyIn17 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap17 x) {-# INLINE happyIn17 #-} happyOut17 :: (HappyAbsSyn ) -> HappyWrap17 happyOut17 x = Happy_GHC_Exts.unsafeCoerce# x {-# INLINE happyOut17 #-} -newtype HappyWrap18 = HappyWrap18 (Directive String) -happyIn18 :: (Directive String) -> (HappyAbsSyn ) +newtype HappyWrap18 = HappyWrap18 ([Directive String]) +happyIn18 :: ([Directive String]) -> (HappyAbsSyn ) happyIn18 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap18 x) {-# INLINE happyIn18 #-} happyOut18 :: (HappyAbsSyn ) -> HappyWrap18 happyOut18 x = Happy_GHC_Exts.unsafeCoerce# x {-# INLINE happyOut18 #-} -newtype HappyWrap19 = HappyWrap19 (Maybe String) -happyIn19 :: (Maybe String) -> (HappyAbsSyn ) +newtype HappyWrap19 = HappyWrap19 (Directive String) +happyIn19 :: (Directive String) -> (HappyAbsSyn ) happyIn19 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap19 x) {-# INLINE happyIn19 #-} happyOut19 :: (HappyAbsSyn ) -> HappyWrap19 happyOut19 x = Happy_GHC_Exts.unsafeCoerce# x {-# INLINE happyOut19 #-} -newtype HappyWrap20 = HappyWrap20 ([(String,String)]) -happyIn20 :: ([(String,String)]) -> (HappyAbsSyn ) +newtype HappyWrap20 = HappyWrap20 (Maybe String) +happyIn20 :: (Maybe String) -> (HappyAbsSyn ) happyIn20 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap20 x) {-# INLINE happyIn20 #-} happyOut20 :: (HappyAbsSyn ) -> HappyWrap20 happyOut20 x = Happy_GHC_Exts.unsafeCoerce# x {-# INLINE happyOut20 #-} -newtype HappyWrap21 = HappyWrap21 ((String,String)) -happyIn21 :: ((String,String)) -> (HappyAbsSyn ) +newtype HappyWrap21 = HappyWrap21 ([(String,String)]) +happyIn21 :: ([(String,String)]) -> (HappyAbsSyn ) happyIn21 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap21 x) {-# INLINE happyIn21 #-} happyOut21 :: (HappyAbsSyn ) -> HappyWrap21 happyOut21 x = Happy_GHC_Exts.unsafeCoerce# x {-# INLINE happyOut21 #-} -newtype HappyWrap22 = HappyWrap22 ([String]) -happyIn22 :: ([String]) -> (HappyAbsSyn ) +newtype HappyWrap22 = HappyWrap22 ((String,String)) +happyIn22 :: ((String,String)) -> (HappyAbsSyn ) happyIn22 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap22 x) {-# INLINE happyIn22 #-} happyOut22 :: (HappyAbsSyn ) -> HappyWrap22 happyOut22 x = Happy_GHC_Exts.unsafeCoerce# x {-# INLINE happyOut22 #-} -newtype HappyWrap23 = HappyWrap23 (Maybe String) -happyIn23 :: (Maybe String) -> (HappyAbsSyn ) +newtype HappyWrap23 = HappyWrap23 ([String]) +happyIn23 :: ([String]) -> (HappyAbsSyn ) happyIn23 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap23 x) {-# INLINE happyIn23 #-} happyOut23 :: (HappyAbsSyn ) -> HappyWrap23 happyOut23 x = Happy_GHC_Exts.unsafeCoerce# x {-# INLINE happyOut23 #-} +newtype HappyWrap24 = HappyWrap24 (Maybe String) +happyIn24 :: (Maybe String) -> (HappyAbsSyn ) +happyIn24 x = Happy_GHC_Exts.unsafeCoerce# (HappyWrap24 x) +{-# INLINE happyIn24 #-} +happyOut24 :: (HappyAbsSyn ) -> HappyWrap24 +happyOut24 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut24 #-} happyInTok :: (Token) -> (HappyAbsSyn ) happyInTok x = Happy_GHC_Exts.unsafeCoerce# x {-# INLINE happyInTok #-} @@ -171,39 +176,22 @@ happyOutTok x = Happy_GHC_Exts.unsafeCoerce# x {-# INLINE happyOutTok #-} -happyExpList :: HappyAddr -happyExpList = HappyA# "\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x20\x00\x00\x00\x00\xff\xf3\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x20\x00\x00\x00\x00\xff\xf3\x41\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x08\x00\x00\x00\x00\x00\x80\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x08\x00\x00\x00\x00\x00\x80\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x20\x00\x00\x00\x80\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x20\x00\x00\x00\x80\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x20\x00\x00\x00\x00\x00\x00\x28\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x60\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"# - -{-# NOINLINE happyExpListPerState #-} -happyExpListPerState st = - token_strs_expected - where token_strs = ["error","%dummy","%start_ourParser","parser","core_parser","rules","rule","params","comma_ids","prods","prod","term","terms","terms_rev","comma_terms","prec","tokInfos","tokInfo","optStart","tokenSpecs","tokenSpec","ids","optCode","id","spec_tokentype","spec_token","spec_name","spec_partial","spec_lexer","spec_imported_identity","spec_monad","spec_nonassoc","spec_left","spec_right","spec_prec","spec_shift","spec_expect","spec_error","spec_errorhandlertype","spec_attribute","spec_attributetype","code","int","\":\"","\";\"","\"::\"","\"%%\"","\"|\"","\"(\"","\")\"","\",\"","%eof"] - bit_start = st Prelude.* 52 - bit_end = (st Prelude.+ 1) Prelude.* 52 - read_bit = readArrayBit happyExpList - bits = Prelude.map read_bit [bit_start..bit_end Prelude.- 1] - bits_indexed = Prelude.zip bits [0..51] - token_strs_expected = Prelude.concatMap f bits_indexed - f (Prelude.False, _) = [] - f (Prelude.True, nr) = [token_strs Prelude.!! nr] - +{-# NOINLINE happyTokenStrings #-} +happyTokenStrings = ["id","spec_tokentype","spec_token","spec_name","spec_partial","spec_lexer","spec_imported_identity","spec_monad","spec_nonassoc","spec_left","spec_right","spec_prec","spec_shift","spec_expect","spec_error","spec_errorexpected","spec_attribute","spec_attributetype","code","int","\":\"","\";\"","\"::\"","\"%%\"","\"|\"","\"(\"","\")\"","\",\"","%eof"] happyActOffsets :: HappyAddr -happyActOffsets = HappyA# "\x01\x00\x01\x00\x24\x00\x00\x00\xf9\xff\x32\x00\xff\xff\x00\x00\x37\x00\x4e\x00\x4f\x00\x50\x00\x3f\x00\x00\x00\x40\x00\x53\x00\x53\x00\x53\x00\x41\x00\x43\x00\x56\x00\x57\x00\x46\x00\x00\x00\x47\x00\x00\x00\x00\x00\x00\x00\x00\x00\x5a\x00\x00\x00\x00\x00\x49\x00\x4a\x00\x5d\x00\x5d\x00\x00\x00\x5e\x00\x4d\x00\x00\x00\x00\x00\x60\x00\x00\x00\x60\x00\x00\x00\x48\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x51\x00\x00\x00\x00\x00\x52\x00\xfe\xff\x62\x00\x00\x00\x15\x00\x00\x00\x65\x00\x54\x00\x00\x00\x0a\x00\x00\x00\x55\x00\x00\x00\x3a\x00\x67\x00\x58\x00\x00\x00\x68\x00\x00\x00\x69\x00\x00\x00\x59\x00\x6a\x00\x00\x00\x6c\x00\x5b\x00\x6e\x00\x00\x00\x6e\x00\x00\x00\x00\x00\x5f\x00\x00\x00\x27\x00\x00\x00\x70\x00\x00\x00\x00\x00\x00\x00\x00\x00"# +happyActOffsets = HappyA# "\x02\x00\x00\x00\x02\x00\x00\x00\x23\x00\x00\x00\x00\x00\x00\x00\xf6\xff\xff\xff\x35\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\x3b\x00\x00\x00\x4c\x00\x00\x00\x4e\x00\x00\x00\x4f\x00\x00\x00\x3e\x00\x00\x00\x00\x00\x00\x00\x3f\x00\x00\x00\x52\x00\x00\x00\x52\x00\x00\x00\x52\x00\x00\x00\x40\x00\x00\x00\x42\x00\x00\x00\x00\x00\x00\x00\x55\x00\x00\x00\x44\x00\x00\x00\x00\x00\x00\x00\x45\x00\x00\x00\x46\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x59\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x48\x00\x00\x00\x49\x00\x00\x00\x5c\x00\x00\x00\x5c\x00\x00\x00\x00\x00\x00\x00\x5d\x00\x00\x00\x4d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x5e\x00\x00\x00\x00\x00\x00\x00\x5e\x00\x00\x00\x00\x00\x00\x00\x47\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x50\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x51\x00\x00\x00\xfd\xff\xff\xff\x61\x00\x00\x00\x00\x00\x00\x00\x26\x00\x00\x00\x00\x00\x00\x00\x64\x00\x00\x00\x53\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00\x54\x00\x00\x00\x00\x00\x00\x00\x38\x00\x00\x00\x66\x00\x00\x00\x56\x00\x00\x00\x00\x00\x00\x00\x67\x00\x00\x00\x00\x00\x00\x00\x68\x00\x00\x00\x00\x00\x00\x00\x57\x00\x00\x00\x6a\x00\x00\x00\x00\x00\x00\x00\x6b\x00\x00\x00\x5a\x00\x00\x00\x6d\x00\x00\x00\x00\x00\x00\x00\x6d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x5b\x00\x00\x00\x00\x00\x00\x00\x2b\x00\x00\x00\x00\x00\x00\x00\x71\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"# happyGotoOffsets :: HappyAddr -happyGotoOffsets = HappyA# "\x12\x00\x61\x00\x0b\x00\x00\x00\x00\x00\x63\x00\x6b\x00\x00\x00\x00\x00\x38\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x66\x00\x6d\x00\x6f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x71\x00\x00\x00\x00\x00\x00\x00\x00\x00\x64\x00\x73\x00\x00\x00\x3b\x00\x00\x00\x00\x00\x00\x00\x4b\x00\x00\x00\x74\x00\x00\x00\x76\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x77\x00\x00\x00\x00\x00\x00\x00\x14\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x72\x00\x75\x00\x00\x00\x00\x00\x00\x00\x00\x00\x39\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1a\x00\x00\x00\x31\x00\x00\x00\x36\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x78\x00\x00\x00\x00\x00\x00\x00\x00\x00"# - -happyAdjustOffset :: Happy_GHC_Exts.Int# -> Happy_GHC_Exts.Int# -happyAdjustOffset off = off +happyGotoOffsets = HappyA# "\x12\x00\x00\x00\x62\x00\x00\x00\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x63\x00\x00\x00\x69\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x6c\x00\x00\x00\x6e\x00\x00\x00\x6f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x65\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x70\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x74\x00\x00\x00\x75\x00\x00\x00\x00\x00\x00\x00\x3a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x4a\x00\x00\x00\x00\x00\x00\x00\x76\x00\x00\x00\x00\x00\x00\x00\x77\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x78\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x14\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x73\x00\x00\x00\x72\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x39\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x31\x00\x00\x00\x00\x00\x00\x00\x36\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"# happyDefActions :: HappyAddr -happyDefActions = HappyA# "\xc8\xff\x00\x00\x00\x00\xc9\xff\x00\x00\xc8\xff\x00\x00\xe3\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xde\xff\x00\x00\xca\xff\xca\xff\xca\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd2\xff\x00\x00\xd3\xff\xd4\xff\xd5\xff\xd7\xff\xca\xff\xd6\xff\xd8\xff\xdc\xff\x00\x00\xcf\xff\xcf\xff\xe1\xff\xcd\xff\x00\x00\xe2\xff\xe4\xff\x00\x00\xfe\xff\xfd\xff\xfb\xff\xf6\xff\xcc\xff\xce\xff\xe0\xff\xd0\xff\xdf\xff\xdd\xff\xdb\xff\xcb\xff\xd1\xff\xda\xff\x00\x00\x00\x00\xfc\xff\x00\x00\xf5\xff\xec\xff\x00\x00\xd9\xff\x00\x00\xf8\xff\xf2\xff\xeb\xff\xe5\xff\xed\xff\xef\xff\xf7\xff\x00\x00\xf4\xff\x00\x00\xea\xff\x00\x00\x00\x00\xe6\xff\xec\xff\x00\x00\xec\xff\xfa\xff\xec\xff\xf3\xff\xe7\xff\xf0\xff\xe9\xff\x00\x00\xee\xff\x00\x00\xf1\xff\xf9\xff\xe8\xff"# +happyDefActions = HappyA# "\xc8\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xc9\xff\xff\xff\x00\x00\x00\x00\xc8\xff\xff\xff\x00\x00\x00\x00\xe3\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xde\xff\xff\xff\x00\x00\x00\x00\xca\xff\xff\xff\xca\xff\xff\xff\xca\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xd3\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xd2\xff\xff\xff\x00\x00\x00\x00\xc8\xff\xff\xff\xd5\xff\xff\xff\xd7\xff\xff\xff\xca\xff\xff\xff\xd6\xff\xff\xff\xd8\xff\xff\xff\xdc\xff\xff\xff\x00\x00\x00\x00\xcf\xff\xff\xff\xcf\xff\xff\xff\xe1\xff\xff\xff\xcd\xff\xff\xff\x00\x00\x00\x00\xe2\xff\xff\xff\xe4\xff\xff\xff\x00\x00\x00\x00\xfe\xff\xff\xff\xfd\xff\xff\xff\xfb\xff\xff\xff\xf6\xff\xff\xff\xcc\xff\xff\xff\xce\xff\xff\xff\xe0\xff\xff\xff\xd0\xff\xff\xff\xdf\xff\xff\xff\xdd\xff\xff\xff\xdb\xff\xff\xff\xcb\xff\xff\xff\xd4\xff\xff\xff\xd1\xff\xff\xff\xda\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xfc\xff\xff\xff\x00\x00\x00\x00\xf5\xff\xff\xff\xec\xff\xff\xff\x00\x00\x00\x00\xd9\xff\xff\xff\x00\x00\x00\x00\xf8\xff\xff\xff\xf2\xff\xff\xff\xeb\xff\xff\xff\xe5\xff\xff\xff\xed\xff\xff\xff\xef\xff\xff\xff\xf7\xff\xff\xff\x00\x00\x00\x00\xf4\xff\xff\xff\x00\x00\x00\x00\xea\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xe6\xff\xff\xff\xec\xff\xff\xff\x00\x00\x00\x00\xec\xff\xff\xff\xfa\xff\xff\xff\xec\xff\xff\xff\xf3\xff\xff\xff\xe7\xff\xff\xff\xf0\xff\xff\xff\xe9\xff\xff\xff\x00\x00\x00\x00\xee\xff\xff\xff\x00\x00\x00\x00\xf1\xff\xff\xff\xf9\xff\xff\xff\xe8\xff\xff\xff"# happyCheck :: HappyAddr -happyCheck = HappyA# "\xff\xff\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x01\x00\x01\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x00\x00\x15\x00\x13\x00\x17\x00\x1d\x00\x18\x00\x0d\x00\x0e\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x15\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x13\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x1b\x00\x1c\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x08\x00\x1b\x00\x1c\x00\x0b\x00\x13\x00\x0c\x00\x0d\x00\x10\x00\x11\x00\x13\x00\x10\x00\x11\x00\x02\x00\x03\x00\x01\x00\x01\x00\x01\x00\x13\x00\x13\x00\x01\x00\x14\x00\x13\x00\x01\x00\x01\x00\x13\x00\x13\x00\x01\x00\x13\x00\x13\x00\x01\x00\x01\x00\x13\x00\x01\x00\x1a\x00\x01\x00\x13\x00\x13\x00\x01\x00\x13\x00\x01\x00\x01\x00\x01\x00\x01\x00\x13\x00\x01\x00\x19\x00\x01\x00\x15\x00\x01\x00\x1a\x00\x0f\x00\x13\x00\x16\x00\x13\x00\x03\x00\x12\x00\x0e\x00\x04\x00\xff\xff\x05\x00\x08\x00\x0c\x00\x12\x00\x08\x00\x12\x00\x0f\x00\x12\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"# +happyCheck = HappyA# "\xff\xff\xff\xff\x03\x00\x00\x00\x04\x00\x00\x00\x05\x00\x00\x00\x06\x00\x00\x00\x07\x00\x00\x00\x08\x00\x00\x00\x09\x00\x00\x00\x0a\x00\x00\x00\x0b\x00\x00\x00\x0c\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x0f\x00\x00\x00\x10\x00\x00\x00\x11\x00\x00\x00\x12\x00\x00\x00\x13\x00\x00\x00\x00\x00\x00\x00\x16\x00\x00\x00\x1e\x00\x00\x00\x18\x00\x00\x00\x14\x00\x00\x00\x19\x00\x00\x00\x0d\x00\x00\x00\x0e\x00\x00\x00\x06\x00\x00\x00\x07\x00\x00\x00\x08\x00\x00\x00\x09\x00\x00\x00\x0a\x00\x00\x00\x16\x00\x00\x00\x06\x00\x00\x00\x07\x00\x00\x00\x08\x00\x00\x00\x09\x00\x00\x00\x0a\x00\x00\x00\x13\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x05\x00\x00\x00\x06\x00\x00\x00\x07\x00\x00\x00\x08\x00\x00\x00\x09\x00\x00\x00\x0a\x00\x00\x00\x0b\x00\x00\x00\x0c\x00\x00\x00\x10\x00\x00\x00\x11\x00\x00\x00\x0f\x00\x00\x00\x10\x00\x00\x00\x11\x00\x00\x00\x12\x00\x00\x00\x13\x00\x00\x00\x06\x00\x00\x00\x07\x00\x00\x00\x08\x00\x00\x00\x09\x00\x00\x00\x0a\x00\x00\x00\x06\x00\x00\x00\x07\x00\x00\x00\x08\x00\x00\x00\x09\x00\x00\x00\x0a\x00\x00\x00\x08\x00\x00\x00\x1c\x00\x00\x00\x1d\x00\x00\x00\x0b\x00\x00\x00\x0d\x00\x00\x00\x0e\x00\x00\x00\x1c\x00\x00\x00\x1d\x00\x00\x00\x14\x00\x00\x00\x10\x00\x00\x00\x11\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\x00\x00\x00\x14\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x14\x00\x00\x00\x14\x00\x00\x00\x02\x00\x00\x00\x15\x00\x00\x00\x14\x00\x00\x00\x02\x00\x00\x00\x14\x00\x00\x00\x14\x00\x00\x00\x14\x00\x00\x00\x02\x00\x00\x00\x14\x00\x00\x00\x14\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x14\x00\x00\x00\x1b\x00\x00\x00\x02\x00\x00\x00\x14\x00\x00\x00\x14\x00\x00\x00\x02\x00\x00\x00\x14\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x14\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x1a\x00\x00\x00\x02\x00\x00\x00\x16\x00\x00\x00\x1b\x00\x00\x00\x17\x00\x00\x00\x02\x00\x00\x00\xff\xff\xff\xff\x13\x00\x00\x00\x13\x00\x00\x00\x0e\x00\x00\x00\x13\x00\x00\x00\x03\x00\x00\x00\x08\x00\x00\x00\x04\x00\x00\x00\xff\xff\xff\xff\x05\x00\x00\x00\x12\x00\x00\x00\x0c\x00\x00\x00\x12\x00\x00\x00\x12\x00\x00\x00\x12\x00\x00\x00\x0f\x00\x00\x00\x0f\x00\x00\x00\x08\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"# happyTable :: HappyAddr -happyTable = HappyA# "\x00\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x51\x00\x05\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x04\x00\x3e\x00\x04\x00\x3f\x00\xff\xff\x2a\x00\x06\x00\x07\x00\x41\x00\x42\x00\x43\x00\x44\x00\x45\x00\x52\x00\x54\x00\x42\x00\x43\x00\x44\x00\x45\x00\x02\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x48\x00\x49\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x52\x00\x42\x00\x43\x00\x44\x00\x45\x00\x5c\x00\x42\x00\x43\x00\x44\x00\x45\x00\x57\x00\x5a\x00\x5b\x00\x58\x00\x04\x00\x4e\x00\x4f\x00\x24\x00\x25\x00\x28\x00\x2f\x00\x25\x00\x2b\x00\x2c\x00\x27\x00\x24\x00\x23\x00\x22\x00\x21\x00\x1e\x00\x1c\x00\x1b\x00\x1a\x00\x19\x00\x18\x00\x37\x00\x1e\x00\x35\x00\x34\x00\x32\x00\x27\x00\x2f\x00\x2e\x00\x3a\x00\x3d\x00\x38\x00\x40\x00\x47\x00\x41\x00\x47\x00\x4a\x00\x47\x00\x56\x00\x57\x00\x47\x00\x50\x00\x47\x00\x54\x00\x47\x00\x4b\x00\x32\x00\x02\x00\x5c\x00\x2a\x00\x3a\x00\x1f\x00\x28\x00\x38\x00\x00\x00\x3b\x00\x4b\x00\x4c\x00\x1e\x00\x5d\x00\x1c\x00\x30\x00\x35\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"# +happyTable = HappyA# "\x00\x00\x00\x00\x09\x00\x00\x00\x0a\x00\x00\x00\x0b\x00\x00\x00\x0c\x00\x00\x00\x0d\x00\x00\x00\x0e\x00\x00\x00\x0f\x00\x00\x00\x10\x00\x00\x00\x11\x00\x00\x00\x12\x00\x00\x00\x51\x00\x00\x00\x05\x00\x00\x00\x13\x00\x00\x00\x14\x00\x00\x00\x15\x00\x00\x00\x16\x00\x00\x00\x17\x00\x00\x00\x04\x00\x00\x00\x3e\x00\x00\x00\xff\xff\xff\xff\x3f\x00\x00\x00\x04\x00\x00\x00\x29\x00\x00\x00\x06\x00\x00\x00\x07\x00\x00\x00\x41\x00\x00\x00\x42\x00\x00\x00\x43\x00\x00\x00\x44\x00\x00\x00\x45\x00\x00\x00\x52\x00\x00\x00\x54\x00\x00\x00\x42\x00\x00\x00\x43\x00\x00\x00\x44\x00\x00\x00\x45\x00\x00\x00\x02\x00\x00\x00\x09\x00\x00\x00\x0a\x00\x00\x00\x0b\x00\x00\x00\x0c\x00\x00\x00\x0d\x00\x00\x00\x0e\x00\x00\x00\x0f\x00\x00\x00\x10\x00\x00\x00\x11\x00\x00\x00\x12\x00\x00\x00\x23\x00\x00\x00\x24\x00\x00\x00\x13\x00\x00\x00\x14\x00\x00\x00\x15\x00\x00\x00\x16\x00\x00\x00\x17\x00\x00\x00\x52\x00\x00\x00\x42\x00\x00\x00\x43\x00\x00\x00\x44\x00\x00\x00\x45\x00\x00\x00\x5c\x00\x00\x00\x42\x00\x00\x00\x43\x00\x00\x00\x44\x00\x00\x00\x45\x00\x00\x00\x57\x00\x00\x00\x48\x00\x00\x00\x49\x00\x00\x00\x58\x00\x00\x00\x4e\x00\x00\x00\x4f\x00\x00\x00\x5a\x00\x00\x00\x5b\x00\x00\x00\x04\x00\x00\x00\x2e\x00\x00\x00\x24\x00\x00\x00\x2a\x00\x00\x00\x2b\x00\x00\x00\x26\x00\x00\x00\x27\x00\x00\x00\x23\x00\x00\x00\x22\x00\x00\x00\x21\x00\x00\x00\x20\x00\x00\x00\x1d\x00\x00\x00\x1b\x00\x00\x00\x1a\x00\x00\x00\x19\x00\x00\x00\x18\x00\x00\x00\x37\x00\x00\x00\x04\x00\x00\x00\x1d\x00\x00\x00\x34\x00\x00\x00\x33\x00\x00\x00\x31\x00\x00\x00\x26\x00\x00\x00\x2d\x00\x00\x00\x2e\x00\x00\x00\x3a\x00\x00\x00\x3d\x00\x00\x00\x38\x00\x00\x00\x40\x00\x00\x00\x47\x00\x00\x00\x41\x00\x00\x00\x47\x00\x00\x00\x4a\x00\x00\x00\x47\x00\x00\x00\x57\x00\x00\x00\x56\x00\x00\x00\x47\x00\x00\x00\x50\x00\x00\x00\x47\x00\x00\x00\x54\x00\x00\x00\x4b\x00\x00\x00\x5c\x00\x00\x00\x47\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x29\x00\x00\x00\x27\x00\x00\x00\x35\x00\x00\x00\x3a\x00\x00\x00\x4b\x00\x00\x00\x38\x00\x00\x00\x00\x00\x00\x00\x3b\x00\x00\x00\x1e\x00\x00\x00\x4c\x00\x00\x00\x1d\x00\x00\x00\x1b\x00\x00\x00\x34\x00\x00\x00\x31\x00\x00\x00\x2f\x00\x00\x00\x5d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"# happyReduceArr = Happy_Data_Array.array (1, 55) [ (1 , happyReduce_1), @@ -263,51 +251,59 @@ happyReduceArr = Happy_Data_Array.array (1, 55) [ (55 , happyReduce_55) ] -happy_n_terms = 30 :: Prelude.Int +happyRuleArr :: HappyAddr +happyRuleArr = HappyA# "\x00\x00\x00\x00\x03\x00\x00\x00\x01\x00\x00\x00\x03\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x03\x00\x00\x00\x06\x00\x00\x00\x03\x00\x00\x00\x07\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x04\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x03\x00\x00\x00\x06\x00\x00\x00\x03\x00\x00\x00\x06\x00\x00\x00\x01\x00\x00\x00\x07\x00\x00\x00\x04\x00\x00\x00\x07\x00\x00\x00\x03\x00\x00\x00\x08\x00\x00\x00\x01\x00\x00\x00\x08\x00\x00\x00\x04\x00\x00\x00\x09\x00\x00\x00\x01\x00\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00\x0a\x00\x00\x00\x01\x00\x00\x00\x0a\x00\x00\x00\x02\x00\x00\x00\x0b\x00\x00\x00\x01\x00\x00\x00\x0b\x00\x00\x00\x03\x00\x00\x00\x0c\x00\x00\x00\x02\x00\x00\x00\x0c\x00\x00\x00\x01\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x00\x00\x0d\x00\x00\x00\x02\x00\x00\x00\x0d\x00\x00\x00\x01\x00\x00\x00\x0e\x00\x00\x00\x02\x00\x00\x00\x0e\x00\x00\x00\x02\x00\x00\x00\x0e\x00\x00\x00\x03\x00\x00\x00\x0e\x00\x00\x00\x03\x00\x00\x00\x0e\x00\x00\x00\x01\x00\x00\x00\x0e\x00\x00\x00\x03\x00\x00\x00\x0e\x00\x00\x00\x02\x00\x00\x00\x0e\x00\x00\x00\x03\x00\x00\x00\x0e\x00\x00\x00\x04\x00\x00\x00\x0e\x00\x00\x00\x05\x00\x00\x00\x0e\x00\x00\x00\x02\x00\x00\x00\x0e\x00\x00\x00\x02\x00\x00\x00\x0e\x00\x00\x00\x02\x00\x00\x00\x0e\x00\x00\x00\x02\x00\x00\x00\x0e\x00\x00\x00\x03\x00\x00\x00\x0e\x00\x00\x00\x01\x00\x00\x00\x0e\x00\x00\x00\x02\x00\x00\x00\x0e\x00\x00\x00\x03\x00\x00\x00\x0f\x00\x00\x00\x01\x00\x00\x00\x0f\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x02\x00\x00\x00\x10\x00\x00\x00\x01\x00\x00\x00\x11\x00\x00\x00\x02\x00\x00\x00\x12\x00\x00\x00\x02\x00\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x13\x00\x00\x00\x01\x00\x00\x00\x13\x00\x00\x00\x00\x00\x00\x00"# + +happyCatchStates :: [Int] +happyCatchStates = [] + +happy_n_terms = 31 :: Prelude.Int happy_n_nonterms = 20 :: Prelude.Int -happyReduce_1 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happy_n_starts = 1 :: Prelude.Int + +happyReduce_1 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_1 = happySpecReduce_3 0# happyReduction_1 happyReduction_1 happy_x_3 happy_x_2 happy_x_1 - = case happyOut23 happy_x_1 of { (HappyWrap23 happy_var_1) -> - case happyOut5 happy_x_2 of { (HappyWrap5 happy_var_2) -> - case happyOut23 happy_x_3 of { (HappyWrap23 happy_var_3) -> - happyIn4 + = case happyOut24 happy_x_1 of { (HappyWrap24 happy_var_1) -> + case happyOut6 happy_x_2 of { (HappyWrap6 happy_var_2) -> + case happyOut24 happy_x_3 of { (HappyWrap24 happy_var_3) -> + happyIn5 (BookendedAbsSyn happy_var_1 happy_var_2 happy_var_3 )}}} -happyReduce_2 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_2 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_2 = happySpecReduce_3 1# happyReduction_2 happyReduction_2 happy_x_3 happy_x_2 happy_x_1 - = case happyOut17 happy_x_1 of { (HappyWrap17 happy_var_1) -> - case happyOut6 happy_x_3 of { (HappyWrap6 happy_var_3) -> - happyIn5 + = case happyOut18 happy_x_1 of { (HappyWrap18 happy_var_1) -> + case happyOut7 happy_x_3 of { (HappyWrap7 happy_var_3) -> + happyIn6 (AbsSyn (reverse happy_var_1) (reverse happy_var_3) )}} -happyReduce_3 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_3 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_3 = happySpecReduce_2 2# happyReduction_3 happyReduction_3 happy_x_2 happy_x_1 - = case happyOut6 happy_x_1 of { (HappyWrap6 happy_var_1) -> - case happyOut7 happy_x_2 of { (HappyWrap7 happy_var_2) -> - happyIn6 + = case happyOut7 happy_x_1 of { (HappyWrap7 happy_var_1) -> + case happyOut8 happy_x_2 of { (HappyWrap8 happy_var_2) -> + happyIn7 (happy_var_2 : happy_var_1 )}} -happyReduce_4 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_4 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_4 = happySpecReduce_1 2# happyReduction_4 happyReduction_4 happy_x_1 - = case happyOut7 happy_x_1 of { (HappyWrap7 happy_var_1) -> - happyIn6 + = case happyOut8 happy_x_1 of { (HappyWrap8 happy_var_1) -> + happyIn7 ([happy_var_1] )} -happyReduce_5 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_5 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_5 = happyReduce 6# 3# happyReduction_5 happyReduction_5 (happy_x_6 `HappyStk` happy_x_5 `HappyStk` @@ -317,14 +313,14 @@ happyReduction_5 (happy_x_6 `HappyStk` happy_x_1 `HappyStk` happyRest) = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> - case happyOut8 happy_x_2 of { (HappyWrap8 happy_var_2) -> + case happyOut9 happy_x_2 of { (HappyWrap9 happy_var_2) -> case happyOutTok happy_x_4 of { (TokenInfo happy_var_4 TokCodeQuote) -> - case happyOut10 happy_x_6 of { (HappyWrap10 happy_var_6) -> - happyIn7 + case happyOut11 happy_x_6 of { (HappyWrap11 happy_var_6) -> + happyIn8 (Rule happy_var_1 happy_var_2 happy_var_6 (Just happy_var_4) ) `HappyStk` happyRest}}}} -happyReduce_6 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_6 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_6 = happyReduce 7# 3# happyReduction_6 happyReduction_6 (happy_x_7 `HappyStk` happy_x_6 `HappyStk` @@ -335,14 +331,14 @@ happyReduction_6 (happy_x_7 `HappyStk` happy_x_1 `HappyStk` happyRest) = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> - case happyOut8 happy_x_2 of { (HappyWrap8 happy_var_2) -> + case happyOut9 happy_x_2 of { (HappyWrap9 happy_var_2) -> case happyOutTok happy_x_4 of { (TokenInfo happy_var_4 TokCodeQuote) -> - case happyOut10 happy_x_7 of { (HappyWrap10 happy_var_7) -> - happyIn7 + case happyOut11 happy_x_7 of { (HappyWrap11 happy_var_7) -> + happyIn8 (Rule happy_var_1 happy_var_2 happy_var_7 (Just happy_var_4) ) `HappyStk` happyRest}}}} -happyReduce_7 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_7 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_7 = happyReduce 4# 3# happyReduction_7 happyReduction_7 (happy_x_4 `HappyStk` happy_x_3 `HappyStk` @@ -350,100 +346,100 @@ happyReduction_7 (happy_x_4 `HappyStk` happy_x_1 `HappyStk` happyRest) = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> - case happyOut8 happy_x_2 of { (HappyWrap8 happy_var_2) -> - case happyOut10 happy_x_4 of { (HappyWrap10 happy_var_4) -> - happyIn7 + case happyOut9 happy_x_2 of { (HappyWrap9 happy_var_2) -> + case happyOut11 happy_x_4 of { (HappyWrap11 happy_var_4) -> + happyIn8 (Rule happy_var_1 happy_var_2 happy_var_4 Nothing ) `HappyStk` happyRest}}} -happyReduce_8 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_8 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_8 = happySpecReduce_3 4# happyReduction_8 happyReduction_8 happy_x_3 happy_x_2 happy_x_1 - = case happyOut9 happy_x_2 of { (HappyWrap9 happy_var_2) -> - happyIn8 + = case happyOut10 happy_x_2 of { (HappyWrap10 happy_var_2) -> + happyIn9 (reverse happy_var_2 )} -happyReduce_9 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_9 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_9 = happySpecReduce_0 4# happyReduction_9 -happyReduction_9 = happyIn8 +happyReduction_9 = happyIn9 ([] ) -happyReduce_10 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_10 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_10 = happySpecReduce_1 5# happyReduction_10 happyReduction_10 happy_x_1 = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> - happyIn9 + happyIn10 ([happy_var_1] )} -happyReduce_11 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_11 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_11 = happySpecReduce_3 5# happyReduction_11 happyReduction_11 happy_x_3 happy_x_2 happy_x_1 - = case happyOut9 happy_x_1 of { (HappyWrap9 happy_var_1) -> + = case happyOut10 happy_x_1 of { (HappyWrap10 happy_var_1) -> case happyOutTok happy_x_3 of { (TokenInfo happy_var_3 TokId) -> - happyIn9 + happyIn10 (happy_var_3 : happy_var_1 )}} -happyReduce_12 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_12 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_12 = happySpecReduce_3 6# happyReduction_12 happyReduction_12 happy_x_3 happy_x_2 happy_x_1 - = case happyOut11 happy_x_1 of { (HappyWrap11 happy_var_1) -> - case happyOut10 happy_x_3 of { (HappyWrap10 happy_var_3) -> - happyIn10 + = case happyOut12 happy_x_1 of { (HappyWrap12 happy_var_1) -> + case happyOut11 happy_x_3 of { (HappyWrap11 happy_var_3) -> + happyIn11 (happy_var_1 : happy_var_3 )}} -happyReduce_13 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_13 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_13 = happySpecReduce_1 6# happyReduction_13 happyReduction_13 happy_x_1 - = case happyOut11 happy_x_1 of { (HappyWrap11 happy_var_1) -> - happyIn10 + = case happyOut12 happy_x_1 of { (HappyWrap12 happy_var_1) -> + happyIn11 ([happy_var_1] )} -happyReduce_14 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_14 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_14 = happyMonadReduce 4# 7# happyReduction_14 happyReduction_14 (happy_x_4 `HappyStk` happy_x_3 `HappyStk` happy_x_2 `HappyStk` happy_x_1 `HappyStk` happyRest) tk - = happyThen ((case happyOut13 happy_x_1 of { (HappyWrap13 happy_var_1) -> - case happyOut16 happy_x_2 of { (HappyWrap16 happy_var_2) -> + = happyThen ((case happyOut14 happy_x_1 of { (HappyWrap14 happy_var_1) -> + case happyOut17 happy_x_2 of { (HappyWrap17 happy_var_2) -> case happyOutTok happy_x_3 of { (TokenInfo happy_var_3 TokCodeQuote) -> ( lineP >>= \l -> return (Prod happy_var_1 happy_var_3 l happy_var_2))}}}) - ) (\r -> happyReturn (happyIn11 r)) + ) (\r -> happyReturn (happyIn12 r)) -happyReduce_15 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_15 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_15 = happyMonadReduce 3# 7# happyReduction_15 happyReduction_15 (happy_x_3 `HappyStk` happy_x_2 `HappyStk` happy_x_1 `HappyStk` happyRest) tk - = happyThen ((case happyOut13 happy_x_1 of { (HappyWrap13 happy_var_1) -> - case happyOut16 happy_x_2 of { (HappyWrap16 happy_var_2) -> + = happyThen ((case happyOut14 happy_x_1 of { (HappyWrap14 happy_var_1) -> + case happyOut17 happy_x_2 of { (HappyWrap17 happy_var_2) -> case happyOutTok happy_x_3 of { (TokenInfo happy_var_3 TokCodeQuote) -> ( lineP >>= \l -> return (Prod happy_var_1 happy_var_3 l happy_var_2))}}}) - ) (\r -> happyReturn (happyIn11 r)) + ) (\r -> happyReturn (happyIn12 r)) -happyReduce_16 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_16 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_16 = happySpecReduce_1 8# happyReduction_16 happyReduction_16 happy_x_1 = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> - happyIn12 + happyIn13 (App happy_var_1 [] )} -happyReduce_17 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_17 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_17 = happyReduce 4# 8# happyReduction_17 happyReduction_17 (happy_x_4 `HappyStk` happy_x_3 `HappyStk` @@ -451,181 +447,181 @@ happyReduction_17 (happy_x_4 `HappyStk` happy_x_1 `HappyStk` happyRest) = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> - case happyOut15 happy_x_3 of { (HappyWrap15 happy_var_3) -> - happyIn12 + case happyOut16 happy_x_3 of { (HappyWrap16 happy_var_3) -> + happyIn13 (App happy_var_1 (reverse happy_var_3) ) `HappyStk` happyRest}} -happyReduce_18 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_18 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_18 = happySpecReduce_1 9# happyReduction_18 happyReduction_18 happy_x_1 - = case happyOut14 happy_x_1 of { (HappyWrap14 happy_var_1) -> - happyIn13 + = case happyOut15 happy_x_1 of { (HappyWrap15 happy_var_1) -> + happyIn14 (reverse happy_var_1 )} -happyReduce_19 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_19 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_19 = happySpecReduce_0 9# happyReduction_19 -happyReduction_19 = happyIn13 +happyReduction_19 = happyIn14 ([] ) -happyReduce_20 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_20 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_20 = happySpecReduce_1 10# happyReduction_20 happyReduction_20 happy_x_1 - = case happyOut12 happy_x_1 of { (HappyWrap12 happy_var_1) -> - happyIn14 + = case happyOut13 happy_x_1 of { (HappyWrap13 happy_var_1) -> + happyIn15 ([happy_var_1] )} -happyReduce_21 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_21 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_21 = happySpecReduce_2 10# happyReduction_21 happyReduction_21 happy_x_2 happy_x_1 - = case happyOut14 happy_x_1 of { (HappyWrap14 happy_var_1) -> - case happyOut12 happy_x_2 of { (HappyWrap12 happy_var_2) -> - happyIn14 + = case happyOut15 happy_x_1 of { (HappyWrap15 happy_var_1) -> + case happyOut13 happy_x_2 of { (HappyWrap13 happy_var_2) -> + happyIn15 (happy_var_2 : happy_var_1 )}} -happyReduce_22 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_22 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_22 = happySpecReduce_1 11# happyReduction_22 happyReduction_22 happy_x_1 - = case happyOut12 happy_x_1 of { (HappyWrap12 happy_var_1) -> - happyIn15 + = case happyOut13 happy_x_1 of { (HappyWrap13 happy_var_1) -> + happyIn16 ([happy_var_1] )} -happyReduce_23 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_23 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_23 = happySpecReduce_3 11# happyReduction_23 happyReduction_23 happy_x_3 happy_x_2 happy_x_1 - = case happyOut15 happy_x_1 of { (HappyWrap15 happy_var_1) -> - case happyOut12 happy_x_3 of { (HappyWrap12 happy_var_3) -> - happyIn15 + = case happyOut16 happy_x_1 of { (HappyWrap16 happy_var_1) -> + case happyOut13 happy_x_3 of { (HappyWrap13 happy_var_3) -> + happyIn16 (happy_var_3 : happy_var_1 )}} -happyReduce_24 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_24 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_24 = happySpecReduce_2 12# happyReduction_24 happyReduction_24 happy_x_2 happy_x_1 = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokId) -> - happyIn16 + happyIn17 (PrecId happy_var_2 )} -happyReduce_25 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_25 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_25 = happySpecReduce_1 12# happyReduction_25 happyReduction_25 happy_x_1 - = happyIn16 + = happyIn17 (PrecShift ) -happyReduce_26 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_26 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_26 = happySpecReduce_0 12# happyReduction_26 -happyReduction_26 = happyIn16 +happyReduction_26 = happyIn17 (PrecNone ) -happyReduce_27 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_27 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_27 = happySpecReduce_2 13# happyReduction_27 happyReduction_27 happy_x_2 happy_x_1 - = case happyOut17 happy_x_1 of { (HappyWrap17 happy_var_1) -> - case happyOut18 happy_x_2 of { (HappyWrap18 happy_var_2) -> - happyIn17 + = case happyOut18 happy_x_1 of { (HappyWrap18 happy_var_1) -> + case happyOut19 happy_x_2 of { (HappyWrap19 happy_var_2) -> + happyIn18 (happy_var_2 : happy_var_1 )}} -happyReduce_28 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_28 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_28 = happySpecReduce_1 13# happyReduction_28 happyReduction_28 happy_x_1 - = case happyOut18 happy_x_1 of { (HappyWrap18 happy_var_1) -> - happyIn17 + = case happyOut19 happy_x_1 of { (HappyWrap19 happy_var_1) -> + happyIn18 ([happy_var_1] )} -happyReduce_29 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_29 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_29 = happySpecReduce_2 14# happyReduction_29 happyReduction_29 happy_x_2 happy_x_1 = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> - happyIn18 + happyIn19 (TokenType happy_var_2 )} -happyReduce_30 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_30 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_30 = happySpecReduce_2 14# happyReduction_30 happyReduction_30 happy_x_2 happy_x_1 - = case happyOut20 happy_x_2 of { (HappyWrap20 happy_var_2) -> - happyIn18 + = case happyOut21 happy_x_2 of { (HappyWrap21 happy_var_2) -> + happyIn19 (TokenSpec happy_var_2 )} -happyReduce_31 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_31 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_31 = happySpecReduce_3 14# happyReduction_31 happyReduction_31 happy_x_3 happy_x_2 happy_x_1 = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokId) -> - case happyOut19 happy_x_3 of { (HappyWrap19 happy_var_3) -> - happyIn18 + case happyOut20 happy_x_3 of { (HappyWrap20 happy_var_3) -> + happyIn19 (TokenName happy_var_2 happy_var_3 False )}} -happyReduce_32 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_32 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_32 = happySpecReduce_3 14# happyReduction_32 happyReduction_32 happy_x_3 happy_x_2 happy_x_1 = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokId) -> - case happyOut19 happy_x_3 of { (HappyWrap19 happy_var_3) -> - happyIn18 + case happyOut20 happy_x_3 of { (HappyWrap20 happy_var_3) -> + happyIn19 (TokenName happy_var_2 happy_var_3 True )}} -happyReduce_33 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_33 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_33 = happySpecReduce_1 14# happyReduction_33 happyReduction_33 happy_x_1 - = happyIn18 + = happyIn19 (TokenImportedIdentity ) -happyReduce_34 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_34 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_34 = happySpecReduce_3 14# happyReduction_34 happyReduction_34 happy_x_3 happy_x_2 happy_x_1 = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> case happyOutTok happy_x_3 of { (TokenInfo happy_var_3 TokCodeQuote) -> - happyIn18 + happyIn19 (TokenLexer happy_var_2 happy_var_3 )}} -happyReduce_35 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_35 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_35 = happySpecReduce_2 14# happyReduction_35 happyReduction_35 happy_x_2 happy_x_1 = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> - happyIn18 + happyIn19 (TokenMonad "()" happy_var_2 "Prelude.>>=" "Prelude.return" )} -happyReduce_36 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_36 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_36 = happySpecReduce_3 14# happyReduction_36 happyReduction_36 happy_x_3 happy_x_2 happy_x_1 = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> case happyOutTok happy_x_3 of { (TokenInfo happy_var_3 TokCodeQuote) -> - happyIn18 + happyIn19 (TokenMonad happy_var_2 happy_var_3 "Prelude.>>=" "Prelude.return" )}} -happyReduce_37 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_37 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_37 = happyReduce 4# 14# happyReduction_37 happyReduction_37 (happy_x_4 `HappyStk` happy_x_3 `HappyStk` @@ -635,11 +631,11 @@ happyReduction_37 (happy_x_4 `HappyStk` = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> case happyOutTok happy_x_3 of { (TokenInfo happy_var_3 TokCodeQuote) -> case happyOutTok happy_x_4 of { (TokenInfo happy_var_4 TokCodeQuote) -> - happyIn18 + happyIn19 (TokenMonad "()" happy_var_2 happy_var_3 happy_var_4 ) `HappyStk` happyRest}}} -happyReduce_38 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_38 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_38 = happyReduce 5# 14# happyReduction_38 happyReduction_38 (happy_x_5 `HappyStk` happy_x_4 `HappyStk` @@ -651,305 +647,280 @@ happyReduction_38 (happy_x_5 `HappyStk` case happyOutTok happy_x_3 of { (TokenInfo happy_var_3 TokCodeQuote) -> case happyOutTok happy_x_4 of { (TokenInfo happy_var_4 TokCodeQuote) -> case happyOutTok happy_x_5 of { (TokenInfo happy_var_5 TokCodeQuote) -> - happyIn18 + happyIn19 (TokenMonad happy_var_2 happy_var_3 happy_var_4 happy_var_5 ) `HappyStk` happyRest}}}} -happyReduce_39 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_39 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_39 = happySpecReduce_2 14# happyReduction_39 happyReduction_39 happy_x_2 happy_x_1 - = case happyOut22 happy_x_2 of { (HappyWrap22 happy_var_2) -> - happyIn18 + = case happyOut23 happy_x_2 of { (HappyWrap23 happy_var_2) -> + happyIn19 (TokenNonassoc happy_var_2 )} -happyReduce_40 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_40 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_40 = happySpecReduce_2 14# happyReduction_40 happyReduction_40 happy_x_2 happy_x_1 - = case happyOut22 happy_x_2 of { (HappyWrap22 happy_var_2) -> - happyIn18 + = case happyOut23 happy_x_2 of { (HappyWrap23 happy_var_2) -> + happyIn19 (TokenRight happy_var_2 )} -happyReduce_41 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_41 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_41 = happySpecReduce_2 14# happyReduction_41 happyReduction_41 happy_x_2 happy_x_1 - = case happyOut22 happy_x_2 of { (HappyWrap22 happy_var_2) -> - happyIn18 + = case happyOut23 happy_x_2 of { (HappyWrap23 happy_var_2) -> + happyIn19 (TokenLeft happy_var_2 )} -happyReduce_42 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_42 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_42 = happySpecReduce_2 14# happyReduction_42 happyReduction_42 happy_x_2 happy_x_1 = case happyOutTok happy_x_2 of { (TokenNum happy_var_2 TokNum) -> - happyIn18 + happyIn19 (TokenExpect happy_var_2 )} -happyReduce_43 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) -happyReduce_43 = happySpecReduce_2 14# happyReduction_43 -happyReduction_43 happy_x_2 +happyReduce_43 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) +happyReduce_43 = happySpecReduce_3 14# happyReduction_43 +happyReduction_43 happy_x_3 + happy_x_2 happy_x_1 = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> - happyIn18 - (TokenError happy_var_2 - )} + case happyOut24 happy_x_3 of { (HappyWrap24 happy_var_3) -> + happyIn19 + (TokenError happy_var_2 happy_var_3 + )}} -happyReduce_44 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) -happyReduce_44 = happySpecReduce_2 14# happyReduction_44 -happyReduction_44 happy_x_2 - happy_x_1 - = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokId) -> - happyIn18 - (TokenErrorHandlerType happy_var_2 - )} +happyReduce_44 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) +happyReduce_44 = happySpecReduce_1 14# happyReduction_44 +happyReduction_44 happy_x_1 + = happyIn19 + (TokenErrorExpected + ) -happyReduce_45 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_45 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_45 = happySpecReduce_2 14# happyReduction_45 happyReduction_45 happy_x_2 happy_x_1 = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> - happyIn18 + happyIn19 (TokenAttributetype happy_var_2 )} -happyReduce_46 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_46 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_46 = happySpecReduce_3 14# happyReduction_46 happyReduction_46 happy_x_3 happy_x_2 happy_x_1 = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokId) -> case happyOutTok happy_x_3 of { (TokenInfo happy_var_3 TokCodeQuote) -> - happyIn18 + happyIn19 (TokenAttribute happy_var_2 happy_var_3 )}} -happyReduce_47 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_47 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_47 = happySpecReduce_1 15# happyReduction_47 happyReduction_47 happy_x_1 = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> - happyIn19 + happyIn20 (Just happy_var_1 )} -happyReduce_48 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_48 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_48 = happySpecReduce_0 15# happyReduction_48 -happyReduction_48 = happyIn19 +happyReduction_48 = happyIn20 (Nothing ) -happyReduce_49 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_49 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_49 = happySpecReduce_2 16# happyReduction_49 happyReduction_49 happy_x_2 happy_x_1 - = case happyOut21 happy_x_1 of { (HappyWrap21 happy_var_1) -> - case happyOut20 happy_x_2 of { (HappyWrap20 happy_var_2) -> - happyIn20 + = case happyOut22 happy_x_1 of { (HappyWrap22 happy_var_1) -> + case happyOut21 happy_x_2 of { (HappyWrap21 happy_var_2) -> + happyIn21 (happy_var_1:happy_var_2 )}} -happyReduce_50 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_50 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_50 = happySpecReduce_1 16# happyReduction_50 happyReduction_50 happy_x_1 - = case happyOut21 happy_x_1 of { (HappyWrap21 happy_var_1) -> - happyIn20 + = case happyOut22 happy_x_1 of { (HappyWrap22 happy_var_1) -> + happyIn21 ([happy_var_1] )} -happyReduce_51 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_51 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_51 = happySpecReduce_2 17# happyReduction_51 happyReduction_51 happy_x_2 happy_x_1 = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> - happyIn21 + happyIn22 ((happy_var_1,happy_var_2) )}} -happyReduce_52 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_52 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_52 = happySpecReduce_2 18# happyReduction_52 happyReduction_52 happy_x_2 happy_x_1 = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> - case happyOut22 happy_x_2 of { (HappyWrap22 happy_var_2) -> - happyIn22 + case happyOut23 happy_x_2 of { (HappyWrap23 happy_var_2) -> + happyIn23 (happy_var_1 : happy_var_2 )}} -happyReduce_53 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_53 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_53 = happySpecReduce_0 18# happyReduction_53 -happyReduction_53 = happyIn22 +happyReduction_53 = happyIn23 ([] ) -happyReduce_54 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_54 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_54 = happySpecReduce_1 19# happyReduction_54 happyReduction_54 happy_x_1 = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokCodeQuote) -> - happyIn23 + happyIn24 (Just happy_var_1 )} -happyReduce_55 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyReduce_55 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) happyReduce_55 = happySpecReduce_0 19# happyReduction_55 -happyReduction_55 = happyIn23 +happyReduction_55 = happyIn24 (Nothing ) -happyNewToken action sts stk - = lexTokenP(\tk -> - let cont i = happyDoAction i tk action sts stk in - case tk of { - TokenEOF -> happyDoAction 29# tk action sts stk; - TokenInfo happy_dollar_dollar TokId -> cont 1#; - TokenKW TokSpecId_TokenType -> cont 2#; - TokenKW TokSpecId_Token -> cont 3#; - TokenKW TokSpecId_Name -> cont 4#; - TokenKW TokSpecId_Partial -> cont 5#; - TokenKW TokSpecId_Lexer -> cont 6#; - TokenKW TokSpecId_ImportedIdentity -> cont 7#; - TokenKW TokSpecId_Monad -> cont 8#; - TokenKW TokSpecId_Nonassoc -> cont 9#; - TokenKW TokSpecId_Left -> cont 10#; - TokenKW TokSpecId_Right -> cont 11#; - TokenKW TokSpecId_Prec -> cont 12#; - TokenKW TokSpecId_Shift -> cont 13#; - TokenKW TokSpecId_Expect -> cont 14#; - TokenKW TokSpecId_Error -> cont 15#; - TokenKW TokSpecId_ErrorHandlerType -> cont 16#; - TokenKW TokSpecId_Attribute -> cont 17#; - TokenKW TokSpecId_Attributetype -> cont 18#; - TokenInfo happy_dollar_dollar TokCodeQuote -> cont 19#; - TokenNum happy_dollar_dollar TokNum -> cont 20#; - TokenKW TokColon -> cont 21#; - TokenKW TokSemiColon -> cont 22#; - TokenKW TokDoubleColon -> cont 23#; - TokenKW TokDoublePercent -> cont 24#; - TokenKW TokBar -> cont 25#; - TokenKW TokParenL -> cont 26#; - TokenKW TokParenR -> cont 27#; - TokenKW TokComma -> cont 28#; - _ -> happyError' (tk, []) - }) - -happyError_ explist 29# tk = happyError' (tk, explist) -happyError_ explist _ tk = happyError' (tk, explist) - -happyThen :: () => P a -> (a -> P b) -> P b +happyTerminalToTok term = case term of { + TokenEOF -> 30#; + TokenInfo happy_dollar_dollar TokId -> 2#; + TokenKW TokSpecId_TokenType -> 3#; + TokenKW TokSpecId_Token -> 4#; + TokenKW TokSpecId_Name -> 5#; + TokenKW TokSpecId_Partial -> 6#; + TokenKW TokSpecId_Lexer -> 7#; + TokenKW TokSpecId_ImportedIdentity -> 8#; + TokenKW TokSpecId_Monad -> 9#; + TokenKW TokSpecId_Nonassoc -> 10#; + TokenKW TokSpecId_Left -> 11#; + TokenKW TokSpecId_Right -> 12#; + TokenKW TokSpecId_Prec -> 13#; + TokenKW TokSpecId_Shift -> 14#; + TokenKW TokSpecId_Expect -> 15#; + TokenKW TokSpecId_Error -> 16#; + TokenKW TokSpecId_ErrorExpected -> 17#; + TokenKW TokSpecId_Attribute -> 18#; + TokenKW TokSpecId_Attributetype -> 19#; + TokenInfo happy_dollar_dollar TokCodeQuote -> 20#; + TokenNum happy_dollar_dollar TokNum -> 21#; + TokenKW TokColon -> 22#; + TokenKW TokSemiColon -> 23#; + TokenKW TokDoubleColon -> 24#; + TokenKW TokDoublePercent -> 25#; + TokenKW TokBar -> 26#; + TokenKW TokParenL -> 27#; + TokenKW TokParenR -> 28#; + TokenKW TokComma -> 29#; + _ -> error "Encountered a token that was not declared to happy" + } +{-# NOINLINE happyTerminalToTok #-} + +happyEofTok = 30 + +happyLex kend kmore = lexTokenP (\tk -> case tk of { + TokenEOF -> kend tk; + _ -> kmore (happyTerminalToTok tk) tk }) +{-# INLINE happyLex #-} + +happyNewToken action sts stk = happyLex (\tk -> happyDoAction 30# tk action sts stk) (\i tk -> happyDoAction i tk action sts stk) + +happyReport 30# = happyReport' +happyReport _ = happyReport' + + +happyThen :: () => (P a) -> (a -> (P b)) -> (P b) happyThen = (Prelude.>>=) -happyReturn :: () => a -> P a +happyReturn :: () => a -> (P a) happyReturn = (Prelude.return) happyParse :: () => Happy_GHC_Exts.Int# -> P (HappyAbsSyn ) -happyNewToken :: () => Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyNewToken :: () => Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) -happyDoAction :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn ) +happyDoAction :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn )) -happyReduceArr :: () => Happy_Data_Array.Array Prelude.Int (Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> P (HappyAbsSyn )) +happyReduceArr :: () => Happy_Data_Array.Array Prelude.Int (Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn ) -> (P (HappyAbsSyn ))) happyThen1 :: () => P a -> (a -> P b) -> P b happyThen1 = happyThen -happyReturn1 :: () => a -> P a +happyFmap1 f m = happyThen m (\a -> happyReturn (f a)) +happyReturn1 :: () => a -> (P a) happyReturn1 = happyReturn -happyError' :: () => ((Token), [Prelude.String]) -> P a -happyError' tk = (\(tokens, explist) -> happyError) tk +happyReport' :: () => (Token) -> [Prelude.String] -> (P a) -> (P a) +happyReport' = (\tokens expected resume -> happyError) + +happyAbort :: () => (P a) +happyAbort = error "Called abort handler in non-resumptive parser" + ourParser = happySomeParser where - happySomeParser = happyThen (happyParse 0#) (\x -> happyReturn (let {(HappyWrap4 x') = happyOut4 x} in x')) + happySomeParser = happyThen (happyParse 0#) (\x -> happyReturn (let {(HappyWrap5 x') = happyOut5 x} in x')) happySeq = happyDontSeq happyError :: P a happyError = failP (\l -> show l ++ ": Parse error\n") -{-# LINE 1 "templates/GenericTemplate.hs" #-} +#define HAPPY_COERCE 1 -- $Id: GenericTemplate.hs,v 1.26 2005/01/14 14:47:22 simonmar Exp $ - - - - - - - - - - - +#if !defined(__GLASGOW_HASKELL__) +# error This code isn't being built with GHC. +#endif -- Do not remove this comment. Required to fix CPP parsing when using GCC and a clang-compiled alex. #if __GLASGOW_HASKELL__ > 706 -#define LT(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.<# m)) :: Prelude.Bool) -#define GTE(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.>=# m)) :: Prelude.Bool) -#define EQ(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.==# m)) :: Prelude.Bool) +# define LT(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.<# m)) :: Prelude.Bool) +# define GTE(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.>=# m)) :: Prelude.Bool) +# define EQ(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.==# m)) :: Prelude.Bool) #else -#define LT(n,m) (n Happy_GHC_Exts.<# m) -#define GTE(n,m) (n Happy_GHC_Exts.>=# m) -#define EQ(n,m) (n Happy_GHC_Exts.==# m) +# define LT(n,m) (n Happy_GHC_Exts.<# m) +# define GTE(n,m) (n Happy_GHC_Exts.>=# m) +# define EQ(n,m) (n Happy_GHC_Exts.==# m) #endif +#define PLUS(n,m) (n Happy_GHC_Exts.+# m) +#define MINUS(n,m) (n Happy_GHC_Exts.-# m) +#define TIMES(n,m) (n Happy_GHC_Exts.*# m) +#define NEGATE(n) (Happy_GHC_Exts.negateInt# (n)) +type Happy_Int = Happy_GHC_Exts.Int# +data Happy_IntList = HappyCons Happy_Int Happy_IntList +#define ERROR_TOK 0# +#define CATCH_TOK 1# +#if defined(HAPPY_COERCE) +# define GET_ERROR_TOKEN(x) (case Happy_GHC_Exts.unsafeCoerce# x of { (Happy_GHC_Exts.I# i) -> i }) +# define MK_ERROR_TOKEN(i) (Happy_GHC_Exts.unsafeCoerce# (Happy_GHC_Exts.I# i)) +# define MK_TOKEN(x) (happyInTok (x)) +#else +# define GET_ERROR_TOKEN(x) (case x of { HappyErrorToken (Happy_GHC_Exts.I# i) -> i }) +# define MK_ERROR_TOKEN(i) (HappyErrorToken (Happy_GHC_Exts.I# i)) +# define MK_TOKEN(x) (HappyTerminal (x)) +#endif - - - - - - - - - - - - - - - -data Happy_IntList = HappyCons Happy_GHC_Exts.Int# Happy_IntList - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +#if defined(HAPPY_DEBUG) +# define DEBUG_TRACE(s) (happyTrace (s)) $ +happyTrace string expr = Happy_System_IO_Unsafe.unsafePerformIO $ do + Happy_System_IO.hPutStr Happy_System_IO.stderr string + return expr +#else +# define DEBUG_TRACE(s) {- nothing -} +#endif infixr 9 `HappyStk` data HappyStk a = HappyStk a (HappyStk a) @@ -965,197 +936,309 @@ happyParse start_state = happyNewToken start_state notHappyAtAll notHappyAtAll -- If the current token is ERROR_TOK, it means we've just accepted a partial -- parse (a %partial parser). We must ignore the saved token on the top of -- the stack in this case. -happyAccept 0# tk st sts (_ `HappyStk` ans `HappyStk` _) = +happyAccept ERROR_TOK tk st sts (_ `HappyStk` ans `HappyStk` _) = happyReturn1 ans -happyAccept j tk st sts (HappyStk ans _) = +happyAccept j tk st sts (HappyStk ans _) = (happyTcHack j (happyTcHack st)) (happyReturn1 ans) ----------------------------------------------------------------------------- -- Arrays only: do the next action - - -happyDoAction i tk st - = {- nothing -} - case action of - 0# -> {- nothing -} - happyFail (happyExpListPerState ((Happy_GHC_Exts.I# (st)) :: Prelude.Int)) i tk st - -1# -> {- nothing -} - happyAccept i tk st - n | LT(n,(0# :: Happy_GHC_Exts.Int#)) -> {- nothing -} - (happyReduceArr Happy_Data_Array.! rule) i tk st - where rule = (Happy_GHC_Exts.I# ((Happy_GHC_Exts.negateInt# ((n Happy_GHC_Exts.+# (1# :: Happy_GHC_Exts.Int#)))))) - n -> {- nothing -} - happyShift new_state i tk st - where new_state = (n Happy_GHC_Exts.-# (1# :: Happy_GHC_Exts.Int#)) - where off = happyAdjustOffset (indexShortOffAddr happyActOffsets st) - off_i = (off Happy_GHC_Exts.+# i) - check = if GTE(off_i,(0# :: Happy_GHC_Exts.Int#)) - then EQ(indexShortOffAddr happyCheck off_i, i) - else Prelude.False - action - | check = indexShortOffAddr happyTable off_i - | Prelude.otherwise = indexShortOffAddr happyDefActions st - - - - -indexShortOffAddr (HappyA# arr) off = - Happy_GHC_Exts.narrow16Int# i +happyDoAction i tk st = + DEBUG_TRACE("state: " ++ show (Happy_GHC_Exts.I# st) ++ + ",\ttoken: " ++ show (Happy_GHC_Exts.I# i) ++ + ",\taction: ") + case happyDecodeAction (happyNextAction i st) of + HappyFail -> DEBUG_TRACE("failing.\n") + happyFail i tk st + HappyAccept -> DEBUG_TRACE("accept.\n") + happyAccept i tk st + HappyReduce rule -> DEBUG_TRACE("reduce (rule " ++ show (Happy_GHC_Exts.I# rule) ++ ")") + (happyReduceArr Happy_Data_Array.! (Happy_GHC_Exts.I# rule)) i tk st + HappyShift new_state -> DEBUG_TRACE("shift, enter state " ++ show (Happy_GHC_Exts.I# new_state) ++ "\n") + happyShift new_state i tk st + +{-# INLINE happyNextAction #-} +happyNextAction i st = case happyIndexActionTable i st of + Just (Happy_GHC_Exts.I# act) -> act + Nothing -> happyIndexOffAddr happyDefActions st + +{-# INLINE happyIndexActionTable #-} +happyIndexActionTable i st + | GTE(off, 0#), EQ(happyIndexOffAddr happyCheck off, i) + = Prelude.Just (Happy_GHC_Exts.I# (happyIndexOffAddr happyTable off)) + | otherwise + = Prelude.Nothing where - i = Happy_GHC_Exts.word2Int# (Happy_GHC_Exts.or# (Happy_GHC_Exts.uncheckedShiftL# high 8#) low) - high = Happy_GHC_Exts.int2Word# (Happy_GHC_Exts.ord# (Happy_GHC_Exts.indexCharOffAddr# arr (off' Happy_GHC_Exts.+# 1#))) - low = Happy_GHC_Exts.int2Word# (Happy_GHC_Exts.ord# (Happy_GHC_Exts.indexCharOffAddr# arr off')) - off' = off Happy_GHC_Exts.*# 2# - + off = PLUS(happyIndexOffAddr happyActOffsets st, i) + +data HappyAction + = HappyFail + | HappyAccept + | HappyReduce Happy_Int -- rule number + | HappyShift Happy_Int -- new state + +{-# INLINE happyDecodeAction #-} +happyDecodeAction :: Happy_Int -> HappyAction +happyDecodeAction 0# = HappyFail +happyDecodeAction -1# = HappyAccept +happyDecodeAction action | LT(action, 0#) = HappyReduce NEGATE(PLUS(action, 1#)) + | otherwise = HappyShift MINUS(action, 1#) + +{-# INLINE happyIndexGotoTable #-} +happyIndexGotoTable nt st = happyIndexOffAddr happyTable off + where + off = PLUS(happyIndexOffAddr happyGotoOffsets st, nt) +{-# INLINE happyIndexOffAddr #-} +happyIndexOffAddr :: HappyAddr -> Happy_Int -> Happy_Int +happyIndexOffAddr (HappyA# arr) off = +#ifdef WORDS_BIGENDIAN + Happy_GHC_Exts.narrow32Int# i + where + i = Happy_GHC_Exts.word2Int# ((b3 `Happy_GHC_Exts.uncheckedShiftL#` 24#) `Happy_GHC_Exts.or#` + (b2 `Happy_GHC_Exts.uncheckedShiftL#` 16#) `Happy_GHC_Exts.or#` + (b1 `Happy_GHC_Exts.uncheckedShiftL#` 8#) `Happy_GHC_Exts.or#` b0) + b3 = Happy_GHC_Exts.int2Word# (Happy_GHC_Exts.ord# (Happy_GHC_Exts.indexCharOffAddr# arr PLUS(off', 3#))) + b2 = Happy_GHC_Exts.int2Word# (Happy_GHC_Exts.ord# (Happy_GHC_Exts.indexCharOffAddr# arr PLUS(off', 2#))) + b1 = Happy_GHC_Exts.int2Word# (Happy_GHC_Exts.ord# (Happy_GHC_Exts.indexCharOffAddr# arr PLUS(off', 1#))) + b0 = Happy_GHC_Exts.int2Word# (Happy_GHC_Exts.ord# (Happy_GHC_Exts.indexCharOffAddr# arr off')) + off' = TIMES(off, 4#) +#else +#if __GLASGOW_HASKELL__ >= 901 + Happy_GHC_Exts.int32ToInt# +#endif + (Happy_GHC_Exts.indexInt32OffAddr# arr off) +#endif +indexRuleArr arr r = (Happy_GHC_Exts.I# nt, Happy_GHC_Exts.I# len) + where + (Happy_GHC_Exts.I# n_starts) = happy_n_starts + offs = TIMES(MINUS(r,n_starts),2#) + nt = happyIndexOffAddr arr offs + len = happyIndexOffAddr arr PLUS(offs,1#) {-# INLINE happyLt #-} happyLt x y = LT(x,y) - readArrayBit arr bit = - Bits.testBit (Happy_GHC_Exts.I# (indexShortOffAddr arr ((unbox_int bit) `Happy_GHC_Exts.iShiftRA#` 4#))) (bit `Prelude.mod` 16) + Bits.testBit (Happy_GHC_Exts.I# (happyIndexOffAddr arr ((unbox_int bit) `Happy_GHC_Exts.iShiftRA#` 5#))) (bit `Prelude.mod` 32) where unbox_int (Happy_GHC_Exts.I# x) = x - - - - - data HappyAddr = HappyA# Happy_GHC_Exts.Addr# - ------------------------------------------------------------------------------ --- HappyState data type (not arrays) - - - - - - - - - - - - - ----------------------------------------------------------------------------- -- Shifting a token -happyShift new_state 0# tk st sts stk@(x `HappyStk` _) = - let i = (case Happy_GHC_Exts.unsafeCoerce# x of { (Happy_GHC_Exts.I# (i)) -> i }) in --- trace "shifting the error token" $ - happyDoAction i tk new_state (HappyCons (st) (sts)) (stk) +happyShift new_state ERROR_TOK tk st sts stk@(x `HappyStk` _) = + let i = GET_ERROR_TOKEN(x) in + DEBUG_TRACE("shifting the error token") + happyDoAction i tk new_state (HappyCons st sts) stk +-- TODO: When `i` would enter error recovery again, we should instead +-- discard input until the lookahead is acceptable. Perhaps this is +-- simplest to implement in CodeGen for productions using `error`; +-- there we know the context and can implement local shift+discard actions. +-- still need to remember parser-defined error site, though. happyShift new_state i tk st sts stk = - happyNewToken new_state (HappyCons (st) (sts)) ((happyInTok (tk))`HappyStk`stk) + happyNewToken new_state (HappyCons st sts) (MK_TOKEN(tk) `HappyStk` stk) -- happyReduce is specialised for the common cases. -happySpecReduce_0 i fn 0# tk st sts stk - = happyFail [] 0# tk st sts stk -happySpecReduce_0 nt fn j tk st@((action)) sts stk - = happyGoto nt j tk st (HappyCons (st) (sts)) (fn `HappyStk` stk) +happySpecReduce_0 nt fn j tk st sts stk + = happySeq fn (happyGoto nt j tk st (HappyCons st sts) (fn `HappyStk` stk)) -happySpecReduce_1 i fn 0# tk st sts stk - = happyFail [] 0# tk st sts stk -happySpecReduce_1 nt fn j tk _ sts@((HappyCons (st@(action)) (_))) (v1`HappyStk`stk') +happySpecReduce_1 nt fn j tk old_st sts@(HappyCons st _) (v1 `HappyStk` stk') = let r = fn v1 in - happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')) + happyTcHack old_st (happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk'))) -happySpecReduce_2 i fn 0# tk st sts stk - = happyFail [] 0# tk st sts stk -happySpecReduce_2 nt fn j tk _ (HappyCons (_) (sts@((HappyCons (st@(action)) (_))))) (v1`HappyStk`v2`HappyStk`stk') +happySpecReduce_2 nt fn j tk old_st + (HappyCons _ sts@(HappyCons st _)) + (v1 `HappyStk` v2 `HappyStk` stk') = let r = fn v1 v2 in - happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')) + happyTcHack old_st (happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk'))) -happySpecReduce_3 i fn 0# tk st sts stk - = happyFail [] 0# tk st sts stk -happySpecReduce_3 nt fn j tk _ (HappyCons (_) ((HappyCons (_) (sts@((HappyCons (st@(action)) (_))))))) (v1`HappyStk`v2`HappyStk`v3`HappyStk`stk') +happySpecReduce_3 nt fn j tk old_st + (HappyCons _ (HappyCons _ sts@(HappyCons st _))) + (v1 `HappyStk` v2 `HappyStk` v3 `HappyStk` stk') = let r = fn v1 v2 v3 in - happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')) + happyTcHack old_st (happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk'))) -happyReduce k i fn 0# tk st sts stk - = happyFail [] 0# tk st sts stk happyReduce k nt fn j tk st sts stk - = case happyDrop (k Happy_GHC_Exts.-# (1# :: Happy_GHC_Exts.Int#)) sts of - sts1@((HappyCons (st1@(action)) (_))) -> - let r = fn stk in -- it doesn't hurt to always seq here... + = case happyDrop k (HappyCons st sts) of + sts1@(HappyCons st1 _) -> + let r = fn stk in -- it doesn't hurt to always seq here... happyDoSeq r (happyGoto nt j tk st1 sts1 r) -happyMonadReduce k nt fn 0# tk st sts stk - = happyFail [] 0# tk st sts stk happyMonadReduce k nt fn j tk st sts stk = - case happyDrop k (HappyCons (st) (sts)) of - sts1@((HappyCons (st1@(action)) (_))) -> + case happyDrop k (HappyCons st sts) of + sts1@(HappyCons st1 _) -> let drop_stk = happyDropStk k stk in - happyThen1 (fn stk tk) (\r -> happyGoto nt j tk st1 sts1 (r `HappyStk` drop_stk)) + happyThen1 (fn stk tk) + (\r -> happyGoto nt j tk st1 sts1 (r `HappyStk` drop_stk)) -happyMonad2Reduce k nt fn 0# tk st sts stk - = happyFail [] 0# tk st sts stk happyMonad2Reduce k nt fn j tk st sts stk = - case happyDrop k (HappyCons (st) (sts)) of - sts1@((HappyCons (st1@(action)) (_))) -> - let drop_stk = happyDropStk k stk - - off = happyAdjustOffset (indexShortOffAddr happyGotoOffsets st1) - off_i = (off Happy_GHC_Exts.+# nt) - new_state = indexShortOffAddr happyTable off_i - - - - + j `happyTcHack` case happyDrop k (HappyCons st sts) of + sts1@(HappyCons st1 _) -> + let drop_stk = happyDropStk k stk + new_state = happyIndexGotoTable nt st1 in - happyThen1 (fn stk tk) (\r -> happyNewToken new_state sts1 (r `HappyStk` drop_stk)) + happyThen1 (fn stk tk) + (\r -> happyNewToken new_state sts1 (r `HappyStk` drop_stk)) -happyDrop 0# l = l -happyDrop n (HappyCons (_) (t)) = happyDrop (n Happy_GHC_Exts.-# (1# :: Happy_GHC_Exts.Int#)) t +happyDrop 0# l = l +happyDrop n (HappyCons _ t) = happyDrop MINUS(n,(1# :: Happy_Int)) t -happyDropStk 0# l = l -happyDropStk n (x `HappyStk` xs) = happyDropStk (n Happy_GHC_Exts.-# (1#::Happy_GHC_Exts.Int#)) xs +happyDropStk 0# l = l +happyDropStk n (x `HappyStk` xs) = happyDropStk MINUS(n,(1#::Happy_Int)) xs ----------------------------------------------------------------------------- -- Moving to a new state after a reduction - -happyGoto nt j tk st = - {- nothing -} +happyGoto nt j tk st = + DEBUG_TRACE(", goto state " ++ show (Happy_GHC_Exts.I# new_state) ++ "\n") happyDoAction j tk new_state - where off = happyAdjustOffset (indexShortOffAddr happyGotoOffsets st) - off_i = (off Happy_GHC_Exts.+# nt) - new_state = indexShortOffAddr happyTable off_i - - - + where new_state = happyIndexGotoTable nt st ----------------------------------------------------------------------------- --- Error recovery (ERROR_TOK is the error token) - --- parse error if we are in recovery and we fail again -happyFail explist 0# tk old_st _ stk@(x `HappyStk` _) = - let i = (case Happy_GHC_Exts.unsafeCoerce# x of { (Happy_GHC_Exts.I# (i)) -> i }) in --- trace "failing" $ - happyError_ explist i tk - -{- We don't need state discarding for our restricted implementation of - "error". In fact, it can cause some bogus parses, so I've disabled it - for now --SDM - --- discard a state -happyFail ERROR_TOK tk old_st CONS(HAPPYSTATE(action),sts) - (saved_tok `HappyStk` _ `HappyStk` stk) = --- trace ("discarding state, depth " ++ show (length stk)) $ - DO_ACTION(action,ERROR_TOK,tk,sts,(saved_tok`HappyStk`stk)) --} - --- Enter error recovery: generate an error token, --- save the old token and carry on. -happyFail explist i tk (action) sts stk = --- trace "entering error recovery" $ - happyDoAction 0# tk action sts ((Happy_GHC_Exts.unsafeCoerce# (Happy_GHC_Exts.I# (i))) `HappyStk` stk) +-- Error recovery +-- +-- When there is no applicable action for the current lookahead token `tk`, +-- happy enters error recovery mode. It works in 2 phases: +-- +-- 1. Fixup: Try to see if there is an action for the error token (`errorTok`, +-- which is ERROR_TOK). If there is, do *not* emit an error and pretend +-- instead that an `errorTok` was inserted. +-- When there is no `errorTok` action, call the error handler +-- (e.g., `happyError`) with the resumption continuation `happyResume`. +-- 2. Error resumption mode: If the error handler wants to resume parsing in +-- order to report multiple parse errors, it will call the resumption +-- continuation (of result type `P (Maybe a)`). +-- In the absence of the %resumptive declaration, this resumption will +-- always (do a bit of work, and) `return Nothing`. +-- In the presence of the %resumptive declaration, the grammar author +-- can use the special `catch` terminal to declare where parsing should +-- resume after an error. +-- E.g., if `stmt : expr ';' | catch ';'` then the resumption will +-- +-- (a) Pop off the state stack until it finds an item +-- `stmt -> . catch ';'`. +-- Then, it will push a `catchTok` onto the stack, perform a shift and +-- end up in item `stmt -> catch . ';'`. +-- (b) Discard tokens from the lexer until it finds ';'. +-- (In general, it will discard until the lookahead has a non-default +-- action in the matches a token that applies +-- in the situation `P -> α catch . β`, where β might empty.) +-- +-- The `catch` resumption mechanism (2) is what usually is associated with +-- `error` in `bison` or `menhir`. Since `error` is used for the Fixup mechanism +-- (1) above, we call the corresponding token `catch`. + +-- Enter error Fixup: generate an error token, +-- save the old token and carry on. +-- When a `happyShift` accepts, we will pop off the error +-- token to resume parsing with the current lookahead `i`. +happyTryFixup i tk action sts stk = + DEBUG_TRACE("entering `error` fixup.\n") + happyDoAction ERROR_TOK tk action sts (MK_ERROR_TOKEN(i) `HappyStk` stk) + -- NB: `happyShift` will simply pop the error token and carry on with + -- `tk`. Hence we don't change `tk` in the call here + +-- parse error if we are in fixup and fail again +happyFixupFailed tk st sts (x `HappyStk` stk) = + let i = GET_ERROR_TOKEN(x) in + DEBUG_TRACE("`error` fixup failed.\n") + let resume = happyResume i tk st sts stk + expected = map happyTokenToString (happyExpectedTokens st sts) in + if happyAlreadyInResumption st sts + then resume + else happyReport i tk expected resume + +happyAlreadyInResumption st sts + | (Happy_GHC_Exts.I# n_starts) <- happy_n_starts, LT(st, n_starts) + = False -- end of the stack + | (Happy_GHC_Exts.I# st) `elem` happyCatchStates + = True + | HappyCons st1 sts1 <- sts + = happyAlreadyInResumption st1 sts1 + +happyFail ERROR_TOK = happyFixupFailed +happyFail i = happyTryFixup i + +happyResume i tk st sts stk = pop_items st sts stk + where + pop_items st sts stk + | HappyShift new_state <- happyDecodeAction (happyNextAction CATCH_TOK st) + = DEBUG_TRACE("shifting catch token " ++ show (Happy_GHC_Exts.I# st) + ++ " -> " ++ show (Happy_GHC_Exts.I# new_state) ++ "\n") + discard_input_until_exp i tk new_state (HappyCons st sts) (MK_ERROR_TOKEN(i) `HappyStk` stk) + | DEBUG_TRACE("can't shift catch in " ++ show (Happy_GHC_Exts.I# st) ++ ", ") True + , (Happy_GHC_Exts.I# n_starts) <- happy_n_starts, LT(st, n_starts) + = DEBUG_TRACE("because it is a start state. no resumption.\n") + happyAbort + | (HappyCons st1 sts1) <- sts, _ `HappyStk` stk1 <- stk + = DEBUG_TRACE("discarding.\n") + pop_items st1 sts1 stk1 + discard_input_until_exp i tk st sts stk + | ultimately_fails i st sts + = DEBUG_TRACE("discard token in state " ++ show (Happy_GHC_Exts.I# st) + ++ ": " ++ show (Happy_GHC_Exts.I# i) ++ "\n") + happyLex (\_eof_tk -> happyAbort) + (\i tk -> discard_input_until_exp i tk st sts stk) -- not eof + | otherwise + = DEBUG_TRACE("found expected token in state " ++ show (Happy_GHC_Exts.I# st) + ++ ": " ++ show (Happy_GHC_Exts.I# i) ++ "\n") + (happyDoAction i tk st sts stk) + + ultimately_fails i st sts = + DEBUG_TRACE("trying token " ++ show (Happy_GHC_Exts.I# i) + ++ " in state " ++ show (Happy_GHC_Exts.I# st) ++ ": ") + case happyDecodeAction (happyNextAction i st) of + HappyFail -> DEBUG_TRACE("fail.\n") True + HappyAccept -> DEBUG_TRACE("accept.\n") False + HappyShift _ -> DEBUG_TRACE("shift.\n") False + HappyReduce r -> case happySimulateReduce r st sts of + HappyCons st1 sts1 -> ultimately_fails i st1 sts1 + +happySimulateReduce r st sts = + DEBUG_TRACE("simulate reduction of rule " ++ show r ++ ", ") + let (Happy_GHC_Exts.I# nt, Happy_GHC_Exts.I# len) = indexRuleArr happyRuleArr r in + DEBUG_TRACE("nt " ++ show (Happy_GHC_Exts.I# nt) ++ ", len: " + ++ show (Happy_GHC_Exts.I# len) ++ ", new_st ") + let sts1@(HappyCons st1 _) = happyDrop len (HappyCons st sts) + new_st = happyIndexGotoTable nt st1 in + DEBUG_TRACE(show (Happy_GHC_Exts.I# new_st) ++ ".\n") + HappyCons new_st sts1 + +happyTokenToString i = happyTokenStrings Prelude.!! (i Prelude.- 2) +happyExpectedTokens st sts = + DEBUG_TRACE("constructing expected tokens.\n") + search_shifts st sts [] + where + search_shifts st sts shifts = foldr (add_action st sts) shifts (distinct_actions st) + add_action st sts (Happy_GHC_Exts.I# i, Happy_GHC_Exts.I# act) shifts = + DEBUG_TRACE("found action in state " ++ show (Happy_GHC_Exts.I# st) + ++ ", input " ++ show (Happy_GHC_Exts.I# i) ++ ", " + ++ show (happyDecodeAction act) ++ "\n") + case happyDecodeAction act of + HappyFail -> shifts + HappyAccept -> shifts -- This would always be %eof or error... Not helpful + HappyShift _ -> Happy_Data_List.insert (Happy_GHC_Exts.I# i) shifts + HappyReduce r -> case happySimulateReduce r st sts of + HappyCons st1 sts1 -> search_shifts st1 sts1 shifts + distinct_actions st + = ((-1), Happy_GHC_Exts.I# (happyIndexOffAddr happyDefActions st)) + : [ (i, act) | i <- [begin_i..happy_n_terms], act <- get_act row_off i ] + where + row_off = happyIndexOffAddr happyActOffsets st + begin_i = 2 -- +2: errorTok,catchTok + get_act off (Happy_GHC_Exts.I# i) + | let off_i = PLUS(off,i) + , GTE(off_i,0#) + , EQ(happyIndexOffAddr happyCheck off_i,i) + = [Happy_GHC_Exts.I# (happyIndexOffAddr happyTable off_i)] + | otherwise + = [] -- Internal happy errors: @@ -1165,14 +1248,12 @@ notHappyAtAll = Prelude.error "Internal Happy error\n" ----------------------------------------------------------------------------- -- Hack to get the typechecker to accept our action functions - -happyTcHack :: Happy_GHC_Exts.Int# -> a -> a +happyTcHack :: Happy_Int -> a -> a happyTcHack x y = y {-# INLINE happyTcHack #-} - ----------------------------------------------------------------------------- --- Seq-ing. If the --strict flag is given, then Happy emits +-- Seq-ing. If the --strict flag is given, then Happy emits -- happySeq = happyDoSeq -- otherwise it emits -- happySeq = happyDontSeq @@ -1186,7 +1267,6 @@ happyDontSeq a b = b -- of deciding to inline happyGoto everywhere, which increases the size of -- the generated parser quite a bit. - {-# NOINLINE happyDoAction #-} {-# NOINLINE happyTable #-} {-# NOINLINE happyCheck #-} diff --git a/packages/grammar/src/Happy/Grammar.lhs b/packages/grammar/src/Happy/Grammar.lhs index d088b0b3..594f554c 100644 --- a/packages/grammar/src/Happy/Grammar.lhs +++ b/packages/grammar/src/Happy/Grammar.lhs @@ -6,14 +6,15 @@ The Grammar data type. > module Happy.Grammar ( > Name, -> + > Production(..), Grammar(..), > Priority(..), > Assoc(..), -> -> errorName, errorTok, startName, dummyName, firstStartTok, dummyTok, + +> errorName, errorTok, catchName, catchTok, +> startName, dummyName, firstStartTok, dummyTok, > eofName, epsilonTok, -> + > mapDollarDollar > ) where @@ -111,15 +112,17 @@ In normal and GHC-based parsers, these numbers are also used in the generated grammar itself, except that the error token is mapped to -1. For array-based parsers, see the note in Tabular/LALR.lhs. -> startName, eofName, errorName, dummyName :: String +> startName, eofName, errorName, catchName, dummyName :: String > startName = "%start" -- with a suffix, like %start_1, %start_2 etc. > eofName = "%eof" > errorName = "error" +> catchName = "catch" > dummyName = "%dummy" -- shouldn't occur in the grammar anywhere -> firstStartTok, dummyTok, errorTok, epsilonTok :: Name -> firstStartTok = 3 -> dummyTok = 2 +> firstStartTok, dummyTok, errorTok, catchTok, epsilonTok :: Name +> firstStartTok = 4 +> dummyTok = 3 +> catchTok = 2 > errorTok = 1 > epsilonTok = 0 @@ -131,7 +134,7 @@ Replace $$ with an arbitrary string, being careful to avoid ".." and '.'. > where go code acc = > case code of > [] -> Nothing -> + > '"' :r -> case reads code :: [(String,String)] of > [] -> go r ('"':acc) > (s,r'):_ -> go r' (reverse (show s) ++ acc) diff --git a/packages/tabular/src/Happy/Tabular.lhs b/packages/tabular/src/Happy/Tabular.lhs index f86d1c78..860d76cc 100644 --- a/packages/tabular/src/Happy/Tabular.lhs +++ b/packages/tabular/src/Happy/Tabular.lhs @@ -64,7 +64,7 @@ Find unused rules and tokens > start_rules = [ 0 .. (length starts' - 1) ] > used_rules = start_rules ++ > nub [ r | (_,a) <- actions, r <- extract_reductions a ] -> used_tokens = errorTok : eof : +> used_tokens = errorTok : catchTok : eof : > nub [ t | (t,a) <- actions, is_shift a ] > n_prods = length productions' > unused_terminals = filter (`notElem` used_tokens) terms diff --git a/packages/tabular/src/Happy/Tabular/First.lhs b/packages/tabular/src/Happy/Tabular/First.lhs index 7c0da2ca..3e3e049b 100644 --- a/packages/tabular/src/Happy/Tabular/First.lhs +++ b/packages/tabular/src/Happy/Tabular/First.lhs @@ -49,7 +49,7 @@ This will never terminate. > getNext fst_term prodNo prodsOfName env = > [ (nm, next nm) | (nm,_) <- env ] > where -> fn t | t == errorTok || t >= fst_term = Set.singleton t +> fn t | t == errorTok || t == catchTok || t >= fst_term = Set.singleton t > fn x = maybe (error "attempted FIRST(e) :-(") id (lookup x env) > next :: Name -> NameSet @@ -58,4 +58,3 @@ This will never terminate. > [ joinSymSets fn lhs > | rl <- prodsOfName n > , let Production _ lhs _ _ = prodNo rl ] - diff --git a/packages/tabular/src/Happy/Tabular/LALR.lhs b/packages/tabular/src/Happy/Tabular/LALR.lhs index 8597a405..2cd32648 100644 --- a/packages/tabular/src/Happy/Tabular/LALR.lhs +++ b/packages/tabular/src/Happy/Tabular/LALR.lhs @@ -96,7 +96,7 @@ using a memo table so that no work is repeated. > precalcClosure0 g = > \n -> maybe [] id (lookup n info') > where -> + > info' :: [(Name, RuleList)] > info' = map (\(n,rules) -> (n,map (\rule -> Lr0 rule 0) (NameSet.toAscList rules))) info @@ -122,7 +122,7 @@ using a memo table so that no work is repeated. > where > fst_term = first_term g > addRules rule set' = Set.union (Set.fromList (rule : closureOfRule rule)) set' -> + > closureOfRule (Lr0 rule dot) = > case findRule g rule dot of > (Just nt) | nt >= firstStartTok && nt < fst_term @@ -238,10 +238,10 @@ information about which sets were generated by which others. > addItems :: ([ItemSetWithGotos], [Set Lr0Item]) > -> ([ItemSetWithGotos], [Set Lr0Item]) -> + > addItems (oldSets,newSets) = (newOldSets, reverse newNewSets) > where -> + > newOldSets = oldSets ++ (zip newSets intgotos) > itemSets = map fst oldSets ++ newSets @@ -274,7 +274,7 @@ Unfortunately, the code's a little opaque. > [[(Name,Int)]], > [Set Lr0Item]) > -> (Int, [[(Name,Int)]], [Set Lr0Item]) -> + > numberSets [] (i,gotos',newSets') = (i,([]:gotos'),newSets') > numberSets ((x,gotoix):rest) (i,g':gotos',newSets') > = numberSets rest @@ -484,7 +484,7 @@ Generating the goto table doesn't need lookahead info. > Grammar{ first_nonterm = fst_nonterm, > first_term = fst_term, > non_terminals = non_terms } = g -> + > -- goto array doesn't include %start symbols > gotoTable = listArray (0,length sets-1) > [ @@ -518,7 +518,7 @@ Generate the action table > possAction goto _set (Lr1 rule pos la) = > case findRule g rule pos of -> Just t | t >= fst_term || t == errorTok -> +> Just t | t >= fst_term || t == errorTok || t == catchTok -> > let f j = (t,LR'Shift j p) > p = maybe No id (lookup t prios) > in map f $ maybeToList (lookup t goto) @@ -627,12 +627,12 @@ Count the conflicts > countConflicts :: ActionTable -> (Array Int (Int,Int), (Int,Int)) > countConflicts action > = (conflictArray, foldl' (\(a,b) (c,d) -> let ac = a + c; bd = b + d in ac `seq` bd `seq` (ac,bd)) (0,0) conflictList) -> + > where -> + > conflictArray = listArray (Array.bounds action) conflictList > conflictList = map countConflictsState (assocs action) -> + > countConflictsState (_state, actions) > = foldr countMultiples (0,0) (elems actions) > where diff --git a/tests/issue265.y b/tests/issue265.y new file mode 100644 index 00000000..9cbfcc21 --- /dev/null +++ b/tests/issue265.y @@ -0,0 +1,80 @@ +{ +{-# LANGUAGE FunctionalDependencies #-} +{-# LANGUAGE FlexibleInstances #-} +-- For ancient GHC 7.0.4 +{-# LANGUAGE MultiParamTypeClasses #-} +module Main where + +import Control.Monad (when) +import Data.Char +import System.Exit +} + +%name parseStmts +%tokentype { Token } +%errorhandlertype explist +%error { handleError } + +%monad { ParseM } { (>>=) } { return } + +%token + '1' { TOne } + '+' { TPlus } + ';' { TSemi } + +%% + +Stmts : {- empty -} { [] } + | Stmt { [$1] } + | Stmts ';' Stmt { $1 ++ [$3] } + +Stmt : Exp { ExpStmt $1 } + +Exp : '1' { One } + | Exp '+' Exp %shift { Plus $1 $3 } + +{ +data Token = TOne | TPlus | TSemi + deriving (Eq,Show) + +type Stmts = [Stmt] +data Stmt = ExpStmt Exp + deriving (Eq, Show) +data Exp = One | Plus Exp Exp + deriving (Eq, Show) + +type ParseM = Either ParseError + +data ParseError + = ParseError [String] + deriving Eq +instance Show ParseError where + show (ParseError exp) = "Parse error. Expected: " ++ show exp + +recordParseError :: [String] -> ParseM a +recordParseError expected = Left (ParseError expected) + +handleError :: ([Token], [String]) -> ParseM a +handleError (ts, expected) = recordParseError expected + +lexer :: String -> [Token] +lexer [] = [] +lexer (c:cs) + | isSpace c = lexer cs + | c == '1' = TOne:(lexer cs) + | c == '+' = TPlus:(lexer cs) + | c == ';' = TSemi:(lexer cs) + | otherwise = error "lexer error" + +main :: IO () +main = do + test "11;1" $ \res -> res == Left (ParseError ["';'","'+'"]) + where + test inp p = do + putStrLn $ "testing " ++ inp + let tokens = lexer inp + let res = parseStmts tokens + when (not (p res)) $ do + print res + exitWith (ExitFailure 1) +} diff --git a/tests/monaderror-explist.y b/tests/monaderror-explist.y index 558f28ee..dd84d23c 100644 --- a/tests/monaderror-explist.y +++ b/tests/monaderror-explist.y @@ -14,8 +14,8 @@ import Data.List (isPrefixOf) %name parseFoo %tokentype { Token } -%errorhandlertype explist %error { handleErrorExpList } +%error.expected %monad { ParseM } { (>>=) } { return } @@ -46,9 +46,9 @@ data Token | TokenTest deriving (Eq,Show) -handleErrorExpList :: ([Token], [String]) -> ParseM a -handleErrorExpList ([], _) = throwError $ ParseError Nothing -handleErrorExpList (ts, explist) = throwError $ ParseError $ Just $ (head ts, explist) +handleErrorExpList :: [Token] -> [String] -> ParseM a +handleErrorExpList [] _ = throwError $ ParseError Nothing +handleErrorExpList ts explist = throwError $ ParseError $ Just $ (head ts, explist) lexer :: String -> [Token] lexer [] = [] diff --git a/tests/monaderror-resume.y b/tests/monaderror-resume.y new file mode 100644 index 00000000..46ebf9fc --- /dev/null +++ b/tests/monaderror-resume.y @@ -0,0 +1,116 @@ +{ +{-# LANGUAGE FunctionalDependencies #-} +{-# LANGUAGE FlexibleInstances #-} +-- For ancient GHC 7.0.4 +{-# LANGUAGE MultiParamTypeClasses #-} +module Main where + +import Control.Monad (when) +import Data.Char +import System.Exit +} + +%name parseStmts Stmts +%name parseExp Exp +%tokentype { Token } +%error { \_ -> abort } { reportError } -- the entire point of this test +%error.expected -- as in monaderror-explist.y + +%monad { ParseM } { (>>=) } { return } + +%token + '1' { TOne } + '+' { TPlus } + ';' { TSemi } + +%% + +Stmts : {- empty -} { [] } + | Stmt { [$1] } + | Stmts ';' Stmt { $1 ++ [$3] } + | catch ';' Stmt %shift { [$3] } -- Could insert error AST token here in place of $1 + +Stmt : Exp { ExpStmt $1 } + +Exp : '1' { One } + | Exp '+' Exp %shift { Plus $1 $3 } + +{ +data Token = TOne | TPlus | TSemi + deriving (Eq,Show) + +type Stmts = [Stmt] +data Stmt = ExpStmt Exp + deriving (Eq, Show) +data Exp = One | Plus Exp Exp + deriving (Eq, Show) + +----------- Validation monad +data Validate e a = V e (Maybe a) + deriving Functor +instance Monoid e => Applicative (Validate e) where + pure a = V mempty (Just a) + V e1 f <*> V e2 a = V (e1 <> e2) (f <*> a) +instance Monoid e => Monad (Validate e) where + V e Nothing >>= _ = V e Nothing -- fatal + V e1 (Just a) >>= k | V e2 b <- k a = V (e1 <> e2) b -- non-fatal + +abort :: Monoid e => Validate e a -- this would be mzero from MonadPlus +abort = V mempty Nothing + +recordError :: e -> Validate e () -- this would be tell from MonadWriter +recordError e = V e (Just ()) + +runValidate (V e mb_a) = (e, mb_a) +----------- + +type ParseM = Validate [ParseError] + +data ParseError + = ParseError [String] + deriving Eq +instance Show ParseError where + show (ParseError exp) = "Parse error. Expected: " ++ show exp + +recordParseError :: [String] -> ParseM () +recordParseError expected = recordError [ParseError expected] + +reportError :: [Token] -> [String] -> ([Token] -> ParseM a) -> ParseM a +reportError ts expected resume = do + recordParseError expected + resume ts + +lexer :: String -> [Token] +lexer [] = [] +lexer (c:cs) + | isSpace c = lexer cs + | c == '1' = TOne:(lexer cs) + | c == '+' = TPlus:(lexer cs) + | c == ';' = TSemi:(lexer cs) + | otherwise = error "lexer error" + +main :: IO () +main = do + test "1+1;1" $ \(_,mb_ast) -> mb_ast == Just [ExpStmt (One `Plus` One), ExpStmt One] + test "1++1;1" $ \(errs,_) -> errs == [ParseError ["'1'"]] + test "1++1;1;+" $ \(errs,_) -> errs == [ParseError ["'1'"], ParseError ["'1'"]] + test "11;1" $ \(errs,_) -> errs == [ParseError ["';'"]] + test "11;1;++" $ \(errs,_) -> errs == [ParseError ["';'"], ParseError ["'1'"]] + test "11;1;1++" $ \(errs,_) -> errs == [ParseError ["';'"], ParseError ["'1'"]] + testExp "11" $ \(errs,_) -> errs == [ParseError ["'+'"]] + where + test inp p = do + putStrLn $ "testing " ++ inp + let tokens = lexer inp + let res = runValidate $ parseStmts tokens + when (not (p res)) $ do + print res + exitWith (ExitFailure 1) + testExp inp p = do + putStrLn $ "testing Exp " ++ inp + let tokens = lexer inp + let res = runValidate $ parseExp tokens + when (not (p res)) $ do + print res + exitWith (ExitFailure 1) +}