-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathparser.y
More file actions
149 lines (126 loc) · 6.7 KB
/
parser.y
File metadata and controls
149 lines (126 loc) · 6.7 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
{
module Main where
import Data.Char
import LambdaCalculus
import Reductions
import SimpleTypes
import Rank2IntersectionTypes
import LinearTypes
import LinearRank2QuantitativeTypes
}
%name parse
%tokentype { Token }
%error { parseError }
%token
'\\' { TokenLambda }
'.' { TokenPoint }
' ' { TokenSpace }
var { TokenVar $$ }
'(' { TokenOB }
')' { TokenCB }
typeinf0 { TokenInf0 }
typeinf2 { TokenInf2 }
qtypeinf2 { TokenQInf2 }
reduceMax { TokenReduceMax }
reduceNorm { TokenReduceNorm }
reduceApp { TokenReduceApp }
steps { TokenSteps }
count { TokenCount }
%left '.'
%left ' '
%%
Exp : TyInf { TyInf $1 }
| Term { Term $1 }
| Reduction { Reduction $1 }
Term : var { Var (TeVar $1) }
| Abs { $1 }
| App { $1 }
| '(' Term ')' { $2 }
Abs : '\\' var '.' Term { Abs (TeVar $2) $4 }
App : Term ' ' Term { App $1 $3 }
TyInf : typeinf0 ' ' '(' Term ')' { TyInf0 $4 (simpleTypeInf $4 0) }
| typeinf2 ' ' '(' Term ')' { TyInf2 $4 (r2typeInf $4 0) }
| qtypeinf2 ' ' '(' Term ')' { QTyInf2 $4 (quantR2typeInf $4 0) Default }
| qtypeinf2 ' ' '(' Term ')' ' ' count { QTyInf2 $4 (quantR2typeInf $4 0) Count }
Reduction : reduceMax ' ' '(' Term ')' { Reduct "maximal strategy" $4 (reduce maximal [$4] 0) Default }
| reduceNorm ' ' '(' Term ')' { Reduct "normal strategy" $4 (reduce normal [$4] 0) Default }
| reduceApp ' ' '(' Term ')' { Reduct "applicative strategy" $4 (reduce applicative [$4] 0) Default }
| reduceMax ' ' '(' Term ')' ' ' steps { Reduct "maximal strategy" $4 (reduce maximal [$4] 0) Steps }
| reduceNorm ' ' '(' Term ')' ' ' steps { Reduct "normal strategy" $4 (reduce normal [$4] 0) Steps }
| reduceApp ' ' '(' Term ')' ' ' steps { Reduct "applicative strategy" $4 (reduce applicative [$4] 0) Steps }
| reduceMax ' ' '(' Term ')' ' ' count { Reduct "maximal strategy" $4 (reduce maximal [$4] 0) Count }
| reduceNorm ' ' '(' Term ')' ' ' count { Reduct "normal strategy" $4 (reduce normal [$4] 0) Count }
| reduceApp ' ' '(' Term ')' ' ' count { Reduct "applicative strategy" $4 (reduce applicative [$4] 0) Count }
{
parseError :: [Token] -> a
parseError _ = error "Parse error"
data Exp
= TyInf TyInf
| Reduction Reduction
| Term Term
data TyInf
= TyInf0 Term (Basis, Type0, Int)
| TyInf2 Term (Env, Type2, Int)
| QTyInf2 Term (LEnv, TLinearRank2, Int ,Int) Mode
data Reduction -- Reduct Reduction_strategy Term Initial_term (Reverse_list_of_reductions, Number_reductions) Mode_of_printing
= Reduct String Term ([Term], Int) Mode
data Mode
= Default -- shows everything (except reduction steps, in the case of Reduct)
| Count -- only shows the counters
| Steps -- (only for Reduct) shows everything, with reduction steps
instance Show Exp where
show (TyInf x) = show x
show (Reduction x) = show x
show (Term x) = "Term: " ++ show x ++ ['\n']
instance Show TyInf where
show (TyInf0 term (basis, t0, _)) = "\t[--- Inference (simple types) ---]" ++ "\n\tTerm = " ++ show term ++ "\n\tBasis = " ++ show basis ++ "\n\tType = " ++ show t0 ++ ['\n']
show (TyInf2 term (env, t0, _)) = "\t[--- Inference (rank 2 intersection types) ---]" ++ "\n\tTerm = " ++ show term ++ "\n\tEnvironment = " ++ show env ++ "\n\tType = " ++ show t0 ++ ['\n']
show (QTyInf2 term (env, t2, c, _) Default) = "\t[--- Inference (linear rank 2 quantitative types) ---]" ++ "\n\tTerm = " ++ show term ++ "\n\tEnvironment = " ++ show env ++ "\n\tType = " ++ show t2 ++ "\n\tCount = " ++ show c ++ ['\n']
show (QTyInf2 term (env, t2, c, _) Count) = "\t[--- Inference (linear rank 2 quantitative types) ---]" ++ "\n\tTerm = " ++ show term ++ "\n\tCount = " ++ show c ++ ['\n']
instance Show Reduction where
show (Reduct strat term (terms, c) Default) = "\t[--- Reduction (" ++ strat ++ ") ---]" ++ "\n\tTerm = " ++ show term ++ "\n\tNormal form = " ++ show (head terms) ++ "\n\tCount = " ++ show c ++ ['\n']
show (Reduct strat term (terms, c) Steps) = "\t[--- Reduction (" ++ strat ++ ") ---]" ++ "\n\tTerm = " ++ show term ++ "\n\tNormal form = " ++ show (head terms) ++ "\n\tCount = " ++ show c ++ "\n\tReductions: " ++ show (head (reverse terms)) ++ "\n" ++ concat (map (\x -> "\t -> " ++ show x ++ "\n") (tail (reverse terms)))
show (Reduct strat term (terms, c) Count) = "\t[--- Reduction (" ++ strat ++ ") ---]" ++ "\n\tTerm = " ++ show term ++ "\n\tCount = " ++ show c ++ ['\n']
data Token
= TokenLambda
| TokenPoint
| TokenSpace
| TokenVar String
| TokenOB
| TokenCB
| TokenInf0
| TokenInf2
| TokenQInf2
| TokenReduceMax
| TokenReduceNorm
| TokenReduceApp
| TokenSteps
| TokenCount
deriving Show
lexer :: String -> [Token]
lexer [] = []
lexer (c:cs)
| isAlphaNum c = lexVar (c:cs)
lexer ('\\':cs) = TokenLambda : lexer cs
lexer ('.':cs) = TokenPoint : lexer cs
lexer (' ':cs) = TokenSpace : lexer cs
lexer ('(':cs) = TokenOB : lexer cs
lexer (')':cs) = TokenCB : lexer cs
lexVar cs =
case span isAlphaNum cs of
("ti0",rest) -> TokenInf0 : lexer rest
("ti2",rest) -> TokenInf2 : lexer rest
("qti2",rest) -> TokenQInf2 : lexer rest
("reduceMax",rest) -> TokenReduceMax : lexer rest
("reduceNorm",rest) -> TokenReduceNorm : lexer rest
("reduceApp",rest) -> TokenReduceApp : lexer rest
("steps",rest) -> TokenSteps : lexer rest
("count",rest) -> TokenCount : lexer rest
(var,rest) -> TokenVar var : lexer rest
main = do line <- getLine
let action | all isSpace line || line!!0 == '#' = main
| line!!0 == '-' = putStrLn (tail line)
| otherwise = (print . parse . lexer) line
action
main
}