Skip to content
Snippets Groups Projects
Commit d03edc1a authored by pm3g19's avatar pm3g19
Browse files

Lexer, parser and pre-evaluation now work

parent bc7c1f70
Branches
No related tags found
No related merge requests found
...@@ -3,18 +3,24 @@ import Debug ...@@ -3,18 +3,24 @@ import Debug
import Eval import Eval
import System.IO import System.IO
import ReadCSV import ReadCSV
import Lexer
import Parser
parse :: String -> Program parse :: String -> Program
parse = notImplemented parse = parseSource.alexScanTokens
--outline --outline
main = interpret "sampleprogram.txt"
interpret :: FilePath -> IO () -- the main function, takes in file name, prints out result interpret :: FilePath -> IO () -- the main function, takes in file name, prints out result
interpret sourceFName = do interpret sourceFName = do
source <- readFile sourceFName source <- readFile sourceFName
let program = parse source -- main abstract syntax tree let program = parse source -- main abstract syntax tree
(env, mainExpr) <- prepare program (env, mainExpr) <- prepare program
print $ "Main expression " ++ show mainExpr
let output = eval env mainExpr let output = eval env mainExpr
print $ "Output " ++ show output
let finalOutput = evalFinal output let finalOutput = evalFinal output
showFinal finalOutput showFinal finalOutput
......
Lexer.hs 0 → 100644
This diff is collapsed.
{ {
module Lexer where module Lexer where
import Data.List
} }
%wrapper "basic" %wrapper "basic"
...@@ -9,30 +10,29 @@ $lower = [a-z] ...@@ -9,30 +10,29 @@ $lower = [a-z]
$upper = [A-Z] $upper = [A-Z]
tokens :- tokens :-
$white+ ; $white+ ;
"--".* ;
filter {\s -> TokenFilter } filter {\s -> TokenFilter }
$upper[$alpha]* {\s -> TokenSetName s }
Func {\s -> TokenFunc }
$digit+ {\s -> TokenNat (read s) }
$lower [$lower $digit \_ \']* {\s -> TokenVarName s }
Record {\s -> TokenRecord }
true {\s -> TokenTrue } true {\s -> TokenTrue }
false {\s -> TokenFalse } false {\s -> TokenFalse }
$[$alpha $digit]* {\s -> TokenString s } \.in {\s -> TokenInSet }
'[' {\s -> TokenLeftSqBracket } \.out {\s -> TokenOutSet }
']' {\s -> TokenRightSqBracket } \[ {\s -> TokenLeftSqBracket }
\] {\s -> TokenRightSqBracket }
"->" {\s -> TokenArrow } "->" {\s -> TokenArrow }
"==" {\s -> TokenisEqual } "==" {\s -> TokenisEqual }
"/=" {\s -> TokenisNotEqual } "/=" {\s -> TokenisNotEqual }
'(' {\s -> TokenLeftBracket } \( {\s -> TokenLeftBracket }
')' {\s -> TokenRightBracket } \) {\s -> TokenRightBracket }
';' {\s -> TokenApp } \; {\s -> TokenSemiCol }
'\\' {\s -> TokenLambda } \\ {\s -> TokenLambda }
',' {\s -> TokenComma } --\, {\s -> TokenComma }
'.' {\s -> TokenFullstop } \. {\s -> TokenFullStop }
in {\s -> TokenInSet } $lower [$lower $digit \_ \']* {\s -> TokenVarName s }
out {\s -> TokenOutSet } $upper[$alpha]* {\s -> TokenSetName s }
$digit+ {\s -> TokenNat (read s) }
\"[$alpha $digit]+\" {\s -> ((TokenString).init.tail) s }
{ {
--token type: --token type:
data Token = data Token =
...@@ -41,7 +41,6 @@ data Token = ...@@ -41,7 +41,6 @@ data Token =
TokenFunc | TokenFunc |
TokenNat Int | TokenNat Int |
TokenVarName String | TokenVarName String |
TokenRecord |
TokenTrue | TokenTrue |
TokenFalse | TokenFalse |
TokenString String | TokenString String |
...@@ -52,7 +51,7 @@ data Token = ...@@ -52,7 +51,7 @@ data Token =
TokenisNotEqual | TokenisNotEqual |
TokenLeftBracket | TokenLeftBracket |
TokenRightBracket | TokenRightBracket |
TokenApp | TokenSemiCol |
TokenLambda | TokenLambda |
TokenComma | TokenComma |
TokenFullStop | TokenFullStop |
......
Parser.hs 0 → 100644
{-# OPTIONS_GHC -w #-}
module Parser where
import Lexer
import Types
import qualified Data.Array as Happy_Data_Array
import qualified Data.Bits as Bits
import Control.Applicative(Applicative(..))
import Control.Monad (ap)
-- parser produced by Happy Version 1.20.0
data HappyAbsSyn t4 t5 t6 t7 t8 t9 t10 t11 t12
= HappyTerminal (Token)
| HappyErrorToken Prelude.Int
| HappyAbsSyn4 t4
| HappyAbsSyn5 t5
| HappyAbsSyn6 t6
| HappyAbsSyn7 t7
| HappyAbsSyn8 t8
| HappyAbsSyn9 t9
| HappyAbsSyn10 t10
| HappyAbsSyn11 t11
| HappyAbsSyn12 t12
happyExpList :: Happy_Data_Array.Array Prelude.Int Prelude.Int
happyExpList = Happy_Data_Array.listArray (0,75) ([8192,0,16384,0,0,2,0,0,0,4,0,0,8,32,0,8,0,0,0,0,32,32768,0,0,0,2048,0,512,0,0,0,0,32,0,1024,0,16384,0,8192,0,8192,0,32,0,0,0,0,2,0,32,512,0,0,1,63488,0,0,0,0,18,0,0,0,0,0,0,0,0,0,0,0,248,0,2304,2,1024,0,32,0,3968,0,0,0,16384,0,0,0,63488,0,0,0,0,0,0
])
{-# NOINLINE happyExpListPerState #-}
happyExpListPerState st =
token_strs_expected
where token_strs = ["error","%dummy","%start_parseSource","Prog","SetNames","VarNames","SetFuncCalls","SetFuncCall","Func","Expr","Record","Exprs","filter","in","out","SetName","Nat","VarName","true","false","Str","'['","']'","\"->\"","\"==\"","\"/=\"","'('","')'","';'","'\\\\'","','","'.'","%eof"]
bit_start = st Prelude.* 33
bit_end = (st Prelude.+ 1) Prelude.* 33
read_bit = readArrayBit happyExpList
bits = Prelude.map read_bit [bit_start..bit_end Prelude.- 1]
bits_indexed = Prelude.zip bits [0..32]
token_strs_expected = Prelude.concatMap f bits_indexed
f (Prelude.False, _) = []
f (Prelude.True, nr) = [token_strs Prelude.!! nr]
action_0 (14) = happyShift action_2
action_0 (4) = happyGoto action_3
action_0 _ = happyFail (happyExpListPerState 0)
action_1 (14) = happyShift action_2
action_1 _ = happyFail (happyExpListPerState 1)
action_2 (16) = happyShift action_5
action_2 (5) = happyGoto action_4
action_2 _ = happyFail (happyExpListPerState 2)
action_3 (33) = happyAccept
action_3 _ = happyFail (happyExpListPerState 3)
action_4 (15) = happyShift action_7
action_4 _ = happyFail (happyExpListPerState 4)
action_5 (31) = happyShift action_6
action_5 _ = happyReduce_2
action_6 (16) = happyShift action_5
action_6 (5) = happyGoto action_11
action_6 _ = happyFail (happyExpListPerState 6)
action_7 (13) = happyShift action_10
action_7 (7) = happyGoto action_8
action_7 (8) = happyGoto action_9
action_7 _ = happyFail (happyExpListPerState 7)
action_8 _ = happyReduce_1
action_9 (29) = happyShift action_13
action_9 _ = happyReduce_6
action_10 (22) = happyShift action_12
action_10 _ = happyFail (happyExpListPerState 10)
action_11 _ = happyReduce_3
action_12 (16) = happyShift action_15
action_12 _ = happyFail (happyExpListPerState 12)
action_13 (13) = happyShift action_10
action_13 (7) = happyGoto action_14
action_13 (8) = happyGoto action_9
action_13 _ = happyFail (happyExpListPerState 13)
action_14 _ = happyReduce_7
action_15 (23) = happyShift action_16
action_15 _ = happyFail (happyExpListPerState 15)
action_16 (27) = happyShift action_17
action_16 _ = happyFail (happyExpListPerState 16)
action_17 (30) = happyShift action_19
action_17 (9) = happyGoto action_18
action_17 _ = happyFail (happyExpListPerState 17)
action_18 (28) = happyShift action_21
action_18 _ = happyFail (happyExpListPerState 18)
action_19 (27) = happyShift action_20
action_19 _ = happyFail (happyExpListPerState 19)
action_20 (18) = happyShift action_23
action_20 (6) = happyGoto action_22
action_20 _ = happyFail (happyExpListPerState 20)
action_21 _ = happyReduce_8
action_22 (28) = happyShift action_25
action_22 _ = happyFail (happyExpListPerState 22)
action_23 (31) = happyShift action_24
action_23 _ = happyReduce_4
action_24 (18) = happyShift action_23
action_24 (6) = happyGoto action_27
action_24 _ = happyFail (happyExpListPerState 24)
action_25 (24) = happyShift action_26
action_25 _ = happyFail (happyExpListPerState 25)
action_26 (18) = happyShift action_30
action_26 (19) = happyShift action_31
action_26 (20) = happyShift action_32
action_26 (21) = happyShift action_33
action_26 (22) = happyShift action_34
action_26 (10) = happyGoto action_28
action_26 (11) = happyGoto action_29
action_26 _ = happyFail (happyExpListPerState 26)
action_27 _ = happyReduce_5
action_28 (22) = happyShift action_37
action_28 (25) = happyShift action_38
action_28 _ = happyReduce_9
action_29 _ = happyReduce_14
action_30 _ = happyReduce_13
action_31 _ = happyReduce_15
action_32 _ = happyReduce_16
action_33 _ = happyReduce_12
action_34 (18) = happyShift action_30
action_34 (19) = happyShift action_31
action_34 (20) = happyShift action_32
action_34 (21) = happyShift action_33
action_34 (22) = happyShift action_34
action_34 (10) = happyGoto action_35
action_34 (11) = happyGoto action_29
action_34 (12) = happyGoto action_36
action_34 _ = happyFail (happyExpListPerState 34)
action_35 (22) = happyShift action_37
action_35 (25) = happyShift action_38
action_35 (31) = happyShift action_42
action_35 _ = happyReduce_18
action_36 (23) = happyShift action_41
action_36 _ = happyFail (happyExpListPerState 36)
action_37 (17) = happyShift action_40
action_37 _ = happyFail (happyExpListPerState 37)
action_38 (18) = happyShift action_30
action_38 (19) = happyShift action_31
action_38 (20) = happyShift action_32
action_38 (21) = happyShift action_33
action_38 (22) = happyShift action_34
action_38 (10) = happyGoto action_39
action_38 (11) = happyGoto action_29
action_38 _ = happyFail (happyExpListPerState 38)
action_39 (22) = happyShift action_37
action_39 _ = happyReduce_10
action_40 (23) = happyShift action_44
action_40 _ = happyFail (happyExpListPerState 40)
action_41 _ = happyReduce_17
action_42 (18) = happyShift action_30
action_42 (19) = happyShift action_31
action_42 (20) = happyShift action_32
action_42 (21) = happyShift action_33
action_42 (22) = happyShift action_34
action_42 (10) = happyGoto action_35
action_42 (11) = happyGoto action_29
action_42 (12) = happyGoto action_43
action_42 _ = happyFail (happyExpListPerState 42)
action_43 _ = happyReduce_19
action_44 _ = happyReduce_11
happyReduce_1 = happyReduce 4 4 happyReduction_1
happyReduction_1 ((HappyAbsSyn7 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn5 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn4
((happy_var_2,happy_var_4)
) `HappyStk` happyRest
happyReduce_2 = happySpecReduce_1 5 happyReduction_2
happyReduction_2 (HappyTerminal (TokenSetName happy_var_1))
= HappyAbsSyn5
([happy_var_1]
)
happyReduction_2 _ = notHappyAtAll
happyReduce_3 = happySpecReduce_3 5 happyReduction_3
happyReduction_3 (HappyAbsSyn5 happy_var_3)
_
(HappyTerminal (TokenSetName happy_var_1))
= HappyAbsSyn5
(happy_var_1:happy_var_3
)
happyReduction_3 _ _ _ = notHappyAtAll
happyReduce_4 = happySpecReduce_1 6 happyReduction_4
happyReduction_4 (HappyTerminal (TokenVarName happy_var_1))
= HappyAbsSyn6
([happy_var_1]
)
happyReduction_4 _ = notHappyAtAll
happyReduce_5 = happySpecReduce_3 6 happyReduction_5
happyReduction_5 (HappyAbsSyn6 happy_var_3)
_
(HappyTerminal (TokenVarName happy_var_1))
= HappyAbsSyn6
(happy_var_1:happy_var_3
)
happyReduction_5 _ _ _ = notHappyAtAll
happyReduce_6 = happySpecReduce_1 7 happyReduction_6
happyReduction_6 (HappyAbsSyn8 happy_var_1)
= HappyAbsSyn7
([happy_var_1]
)
happyReduction_6 _ = notHappyAtAll
happyReduce_7 = happySpecReduce_3 7 happyReduction_7
happyReduction_7 (HappyAbsSyn7 happy_var_3)
_
(HappyAbsSyn8 happy_var_1)
= HappyAbsSyn7
(happy_var_1:happy_var_3
)
happyReduction_7 _ _ _ = notHappyAtAll
happyReduce_8 = happyReduce 7 8 happyReduction_8
happyReduction_8 (_ `HappyStk`
(HappyAbsSyn9 happy_var_6) `HappyStk`
_ `HappyStk`
_ `HappyStk`
(HappyTerminal (TokenSetName happy_var_3)) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn8
(FuncCall (PredefFunc Filter) [Var happy_var_3] [happy_var_6]
) `HappyStk` happyRest
happyReduce_9 = happyReduce 6 9 happyReduction_9
happyReduction_9 ((HappyAbsSyn10 happy_var_6) `HappyStk`
_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn6 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn9
(FuncDef [] happy_var_3 happy_var_6
) `HappyStk` happyRest
happyReduce_10 = happySpecReduce_3 10 happyReduction_10
happyReduction_10 (HappyAbsSyn10 happy_var_3)
_
(HappyAbsSyn10 happy_var_1)
= HappyAbsSyn10
(FuncCall (PredefFunc IsEqual) [] [happy_var_1, happy_var_3]
)
happyReduction_10 _ _ _ = notHappyAtAll
happyReduce_11 = happyReduce 4 10 happyReduction_11
happyReduction_11 (_ `HappyStk`
(HappyTerminal (TokenNat happy_var_3)) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn10
(FuncCall (PredefFunc RecordIndex) [] [Types.Int happy_var_3]
) `HappyStk` happyRest
happyReduce_12 = happySpecReduce_1 10 happyReduction_12
happyReduction_12 (HappyTerminal (TokenString happy_var_1))
= HappyAbsSyn10
(Types.String happy_var_1
)
happyReduction_12 _ = notHappyAtAll
happyReduce_13 = happySpecReduce_1 10 happyReduction_13
happyReduction_13 (HappyTerminal (TokenVarName happy_var_1))
= HappyAbsSyn10
(Var happy_var_1
)
happyReduction_13 _ = notHappyAtAll
happyReduce_14 = happySpecReduce_1 10 happyReduction_14
happyReduction_14 (HappyAbsSyn11 happy_var_1)
= HappyAbsSyn10
(happy_var_1
)
happyReduction_14 _ = notHappyAtAll
happyReduce_15 = happySpecReduce_1 10 happyReduction_15
happyReduction_15 _
= HappyAbsSyn10
(Boolean True
)
happyReduce_16 = happySpecReduce_1 10 happyReduction_16
happyReduction_16 _
= HappyAbsSyn10
(Boolean False
)
happyReduce_17 = happySpecReduce_3 11 happyReduction_17
happyReduction_17 _
(HappyAbsSyn12 happy_var_2)
_
= HappyAbsSyn11
(Record happy_var_2
)
happyReduction_17 _ _ _ = notHappyAtAll
happyReduce_18 = happySpecReduce_1 12 happyReduction_18
happyReduction_18 (HappyAbsSyn10 happy_var_1)
= HappyAbsSyn12
([happy_var_1]
)
happyReduction_18 _ = notHappyAtAll
happyReduce_19 = happySpecReduce_3 12 happyReduction_19
happyReduction_19 (HappyAbsSyn12 happy_var_3)
_
(HappyAbsSyn10 happy_var_1)
= HappyAbsSyn12
(happy_var_1:happy_var_3
)
happyReduction_19 _ _ _ = notHappyAtAll
happyNewToken action sts stk [] =
action 33 33 notHappyAtAll (HappyState action) sts stk []
happyNewToken action sts stk (tk:tks) =
let cont i = action i i tk (HappyState action) sts stk tks in
case tk of {
TokenFilter -> cont 13;
TokenInSet -> cont 14;
TokenOutSet -> cont 15;
TokenSetName happy_dollar_dollar -> cont 16;
TokenNat happy_dollar_dollar -> cont 17;
TokenVarName happy_dollar_dollar -> cont 18;
TokenTrue -> cont 19;
TokenFalse -> cont 20;
TokenString happy_dollar_dollar -> cont 21;
TokenLeftSqBracket -> cont 22;
TokenRightSqBracket -> cont 23;
TokenArrow -> cont 24;
TokenisEqual -> cont 25;
TokenisNotEqual -> cont 26;
TokenLeftBracket -> cont 27;
TokenRightBracket -> cont 28;
TokenSemiCol -> cont 29;
TokenLambda -> cont 30;
TokenComma -> cont 31;
TokenFullStop -> cont 32;
_ -> happyError' ((tk:tks), [])
}
happyError_ explist 33 tk tks = happyError' (tks, explist)
happyError_ explist _ tk tks = happyError' ((tk:tks), explist)
newtype HappyIdentity a = HappyIdentity a
happyIdentity = HappyIdentity
happyRunIdentity (HappyIdentity a) = a
instance Prelude.Functor HappyIdentity where
fmap f (HappyIdentity a) = HappyIdentity (f a)
instance Applicative HappyIdentity where
pure = HappyIdentity
(<*>) = ap
instance Prelude.Monad HappyIdentity where
return = pure
(HappyIdentity p) >>= q = q p
happyThen :: () => HappyIdentity a -> (a -> HappyIdentity b) -> HappyIdentity b
happyThen = (Prelude.>>=)
happyReturn :: () => a -> HappyIdentity a
happyReturn = (Prelude.return)
happyThen1 m k tks = (Prelude.>>=) m (\a -> k a tks)
happyReturn1 :: () => a -> b -> HappyIdentity a
happyReturn1 = \a tks -> (Prelude.return) a
happyError' :: () => ([(Token)], [Prelude.String]) -> HappyIdentity a
happyError' = HappyIdentity Prelude.. (\(tokens, _) -> parseError tokens)
parseSource tks = happyRunIdentity happySomeParser where
happySomeParser = happyThen (happyParse action_0 tks) (\x -> case x of {HappyAbsSyn4 z -> happyReturn z; _other -> notHappyAtAll })
happySeq = happyDontSeq
parseError :: [Token] -> a
parseError _ = error "Parse error"
{-# LINE 1 "templates/GenericTemplate.hs" #-}
-- $Id: GenericTemplate.hs,v 1.26 2005/01/14 14:47:22 simonmar Exp $
data Happy_IntList = HappyCons Prelude.Int Happy_IntList
infixr 9 `HappyStk`
data HappyStk a = HappyStk a (HappyStk a)
-----------------------------------------------------------------------------
-- starting the parse
happyParse start_state = happyNewToken start_state notHappyAtAll notHappyAtAll
-----------------------------------------------------------------------------
-- Accepting the parse
-- If the current token is ERROR_TOK, it means we've just accepted a partial
-- parse (a %partial parser). We must ignore the saved token on the top of
-- the stack in this case.
happyAccept (1) tk st sts (_ `HappyStk` ans `HappyStk` _) =
happyReturn1 ans
happyAccept j tk st sts (HappyStk ans _) =
(happyReturn1 ans)
-----------------------------------------------------------------------------
-- Arrays only: do the next action
indexShortOffAddr arr off = arr Happy_Data_Array.! off
{-# INLINE happyLt #-}
happyLt x y = (x Prelude.< y)
readArrayBit arr bit =
Bits.testBit (indexShortOffAddr arr (bit `Prelude.div` 16)) (bit `Prelude.mod` 16)
-----------------------------------------------------------------------------
-- HappyState data type (not arrays)
newtype HappyState b c = HappyState
(Prelude.Int -> -- token number
Prelude.Int -> -- token number (yes, again)
b -> -- token semantic value
HappyState b c -> -- current state
[HappyState b c] -> -- state stack
c)
-----------------------------------------------------------------------------
-- Shifting a token
happyShift new_state (1) tk st sts stk@(x `HappyStk` _) =
let i = (case x of { HappyErrorToken (i) -> i }) in
-- trace "shifting the error token" $
new_state i i tk (HappyState (new_state)) ((st):(sts)) (stk)
happyShift new_state i tk st sts stk =
happyNewToken new_state ((st):(sts)) ((HappyTerminal (tk))`HappyStk`stk)
-- happyReduce is specialised for the common cases.
happySpecReduce_0 i fn (1) tk st sts stk
= happyFail [] (1) tk st sts stk
happySpecReduce_0 nt fn j tk st@((HappyState (action))) sts stk
= action nt j tk st ((st):(sts)) (fn `HappyStk` stk)
happySpecReduce_1 i fn (1) tk st sts stk
= happyFail [] (1) tk st sts stk
happySpecReduce_1 nt fn j tk _ sts@(((st@(HappyState (action))):(_))) (v1`HappyStk`stk')
= let r = fn v1 in
happySeq r (action nt j tk st sts (r `HappyStk` stk'))
happySpecReduce_2 i fn (1) tk st sts stk
= happyFail [] (1) tk st sts stk
happySpecReduce_2 nt fn j tk _ ((_):(sts@(((st@(HappyState (action))):(_))))) (v1`HappyStk`v2`HappyStk`stk')
= let r = fn v1 v2 in
happySeq r (action nt j tk st sts (r `HappyStk` stk'))
happySpecReduce_3 i fn (1) tk st sts stk
= happyFail [] (1) tk st sts stk
happySpecReduce_3 nt fn j tk _ ((_):(((_):(sts@(((st@(HappyState (action))):(_))))))) (v1`HappyStk`v2`HappyStk`v3`HappyStk`stk')
= let r = fn v1 v2 v3 in
happySeq r (action nt j tk st sts (r `HappyStk` stk'))
happyReduce k i fn (1) tk st sts stk
= happyFail [] (1) tk st sts stk
happyReduce k nt fn j tk st sts stk
= case happyDrop (k Prelude.- ((1) :: Prelude.Int)) sts of
sts1@(((st1@(HappyState (action))):(_))) ->
let r = fn stk in -- it doesn't hurt to always seq here...
happyDoSeq r (action nt j tk st1 sts1 r)
happyMonadReduce k nt fn (1) tk st sts stk
= happyFail [] (1) tk st sts stk
happyMonadReduce k nt fn j tk st sts stk =
case happyDrop k ((st):(sts)) of
sts1@(((st1@(HappyState (action))):(_))) ->
let drop_stk = happyDropStk k stk in
happyThen1 (fn stk tk) (\r -> action nt j tk st1 sts1 (r `HappyStk` drop_stk))
happyMonad2Reduce k nt fn (1) tk st sts stk
= happyFail [] (1) tk st sts stk
happyMonad2Reduce k nt fn j tk st sts stk =
case happyDrop k ((st):(sts)) of
sts1@(((st1@(HappyState (action))):(_))) ->
let drop_stk = happyDropStk k stk
_ = nt :: Prelude.Int
new_state = action
in
happyThen1 (fn stk tk) (\r -> happyNewToken new_state sts1 (r `HappyStk` drop_stk))
happyDrop (0) l = l
happyDrop n ((_):(t)) = happyDrop (n Prelude.- ((1) :: Prelude.Int)) t
happyDropStk (0) l = l
happyDropStk n (x `HappyStk` xs) = happyDropStk (n Prelude.- ((1)::Prelude.Int)) xs
-----------------------------------------------------------------------------
-- Moving to a new state after a reduction
happyGoto action j tk st = action j j tk (HappyState action)
-----------------------------------------------------------------------------
-- Error recovery (ERROR_TOK is the error token)
-- parse error if we are in recovery and we fail again
happyFail explist (1) tk old_st _ stk@(x `HappyStk` _) =
let i = (case x of { HappyErrorToken (i) -> i }) in
-- trace "failing" $
happyError_ explist i tk
{- We don't need state discarding for our restricted implementation of
"error". In fact, it can cause some bogus parses, so I've disabled it
for now --SDM
-- discard a state
happyFail ERROR_TOK tk old_st CONS(HAPPYSTATE(action),sts)
(saved_tok `HappyStk` _ `HappyStk` stk) =
-- trace ("discarding state, depth " ++ show (length stk)) $
DO_ACTION(action,ERROR_TOK,tk,sts,(saved_tok`HappyStk`stk))
-}
-- Enter error recovery: generate an error token,
-- save the old token and carry on.
happyFail explist i tk (HappyState (action)) sts stk =
-- trace "entering error recovery" $
action (1) (1) tk (HappyState (action)) sts ((HappyErrorToken (i)) `HappyStk` stk)
-- Internal happy errors:
notHappyAtAll :: a
notHappyAtAll = Prelude.error "Internal Happy error\n"
-----------------------------------------------------------------------------
-- Hack to get the typechecker to accept our action functions
-----------------------------------------------------------------------------
-- Seq-ing. If the --strict flag is given, then Happy emits
-- happySeq = happyDoSeq
-- otherwise it emits
-- happySeq = happyDontSeq
happyDoSeq, happyDontSeq :: a -> b -> b
happyDoSeq a b = a `Prelude.seq` b
happyDontSeq a b = b
-----------------------------------------------------------------------------
-- Don't inline any functions from the template. GHC has a nasty habit
-- of deciding to inline happyGoto everywhere, which increases the size of
-- the generated parser quite a bit.
{-# NOINLINE happyShift #-}
{-# NOINLINE happySpecReduce_0 #-}
{-# NOINLINE happySpecReduce_1 #-}
{-# NOINLINE happySpecReduce_2 #-}
{-# NOINLINE happySpecReduce_3 #-}
{-# NOINLINE happyReduce #-}
{-# NOINLINE happyMonadReduce #-}
{-# NOINLINE happyGoto #-}
{-# NOINLINE happyFail #-}
-- end of Happy Template.
{ {
module Parser where module Parser where
import Lexer import Lexer
import Types
} }
%name parseCalc %name parseSource
%tokentype {Token} %tokentype {Token}
%error {parseError} %error {parseError}
...@@ -11,14 +12,12 @@ import Lexer ...@@ -11,14 +12,12 @@ import Lexer
filter { TokenFilter } filter { TokenFilter }
in { TokenInSet } in { TokenInSet }
out { TokenOutSet } out { TokenOutSet }
Setname { TokenSetName $$ } SetName { TokenSetName $$ }
function { TokenFunc } Nat { TokenNat $$ }
nat { TokenNat $$ } VarName { TokenVarName $$ }
var { TokenVarName $$ }
Record { TokenRecord }
true { TokenTrue } true { TokenTrue }
false { TokenFalse } false { TokenFalse }
string { TokenString s } Str { TokenString $$ }
'[' { TokenLeftSqBracket } '[' { TokenLeftSqBracket }
']' { TokenRightSqBracket } ']' { TokenRightSqBracket }
"->" { TokenArrow } "->" { TokenArrow }
...@@ -26,49 +25,46 @@ import Lexer ...@@ -26,49 +25,46 @@ import Lexer
"/=" { TokenisNotEqual } "/=" { TokenisNotEqual }
'(' { TokenLeftBracket } '(' { TokenLeftBracket }
')' { TokenRightBracket } ')' { TokenRightBracket }
';' { TokenSemiCo } ';' { TokenSemiCol }
'\\' { TokenLambda } '\\' { TokenLambda }
',' { TokenComma } ',' { TokenComma }
'.' { TokenFullstop } '.' { TokenFullStop }
%right "->" %right "->"
%left "/=" "==" ';' %left "/=" "==" ';'
%% %%
Prog : in SetNames out SetFuncCalls {($2,$4)}
Prog : '.' in SetNames '.' out SetFuncCall {($3,$6)} SetNames : SetName {[$1]}
| SetName ',' SetNames { $1:$3}
SetNames : Setname {[$1]}
| SetNames ',' setName { }
VarNames : VarName {[$1]}
| VarName ',' VarNames {$1:$3}
VarNames : var {Var $1} SetFuncCalls : SetFuncCall {[$1]}
| var ',' VarNames {Var $1 $3} | SetFuncCall';' SetFuncCalls {$1:$3}
SetSetFuncCalls : SetFuncCall {[SetFuncCall]} SetFuncCall : filter '['SetName']' '('Func')' {FuncCall (PredefFunc Filter) [Var $3] [$6]}
| SetFuncCall; SetFuncCalls {SetFuncCall:SetFuncCalls}
SetFuncCall : filter '['SetName']' '('Func')' {FuncCall (PredefFunc Filter) [$3] [$6]} Func : '\\' '(' VarNames ')' "->" Expr {FuncDef [] $3 $6}
Func : \'('VarNames')' -> Expr {FuncDef [] $3 $6}
Expr : Expr "==" Expr {FuncCall (PredefFunc IsEqual) [] [$1, $3]}
Exp : '.' in var '.' out Exp {Prog $3 $6} | Expr'['Nat']' {FuncCall (PredefFunc RecordIndex) [] [Types.Int $3]}
| filter '[' Setname ']' Exp {Filter $3 $5} | Str {Types.String $1}
| '(' '\\'
Expr : Expr == Expr {FuncCall (PredefFunc IsEqual) [] [$1, $3]}
| Expr'['Nat']' {FuncCall (PredefFunc RecordIndex) [] [$3]}
| String {String $1}
| VarName {Var $1} | VarName {Var $1}
| Record {$1} | Record {$1}
| true {Boolean True} | true {Boolean True}
| false {Boolean False} | false {Boolean False}
Record : '['Exprs']' {Record $2} Record : '['Exprs']' {Record $2}
Exprs : Expr {[$1]} Exprs : Expr {[$1]}
| Expr','Exprs {$1:$2} | Expr','Exprs {$1:$3}
{ {
parseError :: [Token] -> a
parseError _ = error "Parse error"
} }
...@@ -2,7 +2,7 @@ import Data.List ...@@ -2,7 +2,7 @@ import Data.List
import Data.Char import Data.Char
import Data.Sequence import Data.Sequence
-- | Function print2DListLex -- | Function print2aaaDListLex
-- 1. takes in a list of records -- 1. takes in a list of records
-- 2. prints them in lexicographical order -- 2. prints them in lexicographical order
print2DListLex :: [[String]] -> IO() print2DListLex :: [[String]] -> IO()
...@@ -10,4 +10,4 @@ print2DListLex (record:records) = sortBy () records ...@@ -10,4 +10,4 @@ print2DListLex (record:records) = sortBy () records
-- | rearran -- | rearran
rearrange :: [[String]] -> [[String]] rearrange :: [[String]] -> [[String]]
rearrange records rearrange record
\ No newline at end of file \ No newline at end of file
hello ,tree
big ,apple
hello,world
he,good
hello,good bye
.in
SampleSet
.out
filter[A](\(r) -> r[1] == "hello")
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment