2019-02-08 05:49:12 +01:00
|
|
|
{
|
2019-03-29 10:19:11 +01:00
|
|
|
{- sv2v
|
|
|
|
|
- Author: Zachary Snow <zach@zachjs.com>
|
|
|
|
|
- Original Lexer Author: Tom Hawkins <tomahawkins@gmail.com>
|
|
|
|
|
-
|
|
|
|
|
- Combined source lexing and preprocessing
|
|
|
|
|
-
|
|
|
|
|
- These procedures are combined so that we can simultaneously process macros in
|
|
|
|
|
- a sane way (something analogous to character-by-character) and have our
|
|
|
|
|
- lexemes properly tagged with source file positions.
|
|
|
|
|
-
|
|
|
|
|
- The scariest piece of this module is the use of `unsafePerformIO`. We want to
|
|
|
|
|
- be able to search for and read files whenever we see an include directive.
|
|
|
|
|
- Trying to thread the IO Monad through alex's interface would be very
|
|
|
|
|
- convoluted. The operations performed are not effectful, and are type safe.
|
|
|
|
|
-}
|
2019-04-03 19:45:43 +02:00
|
|
|
|
2019-03-29 10:19:11 +01:00
|
|
|
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
|
|
|
|
|
-- The above pragma gets rid of annoying warning caused by alex 3.2.4. This has
|
|
|
|
|
-- been fixed on their development branch, so this can be removed once they roll
|
|
|
|
|
-- a new release. (no new release as of 3/29/2018)
|
2019-04-03 19:45:43 +02:00
|
|
|
|
2019-08-06 04:00:04 +02:00
|
|
|
module Language.SystemVerilog.Parser.Lex
|
|
|
|
|
( lexFile
|
|
|
|
|
, Env
|
|
|
|
|
) where
|
2019-03-29 10:19:11 +01:00
|
|
|
|
|
|
|
|
import System.FilePath (dropFileName)
|
|
|
|
|
import System.Directory (findFile)
|
|
|
|
|
import System.IO.Unsafe (unsafePerformIO)
|
|
|
|
|
import qualified Data.Map.Strict as Map
|
2019-04-03 19:45:43 +02:00
|
|
|
import Data.List (span, elemIndex, dropWhileEnd)
|
2019-04-03 05:05:42 +02:00
|
|
|
import Data.Maybe (isJust, fromJust)
|
2019-02-08 05:49:12 +01:00
|
|
|
|
2019-02-08 06:19:39 +01:00
|
|
|
import Language.SystemVerilog.Parser.Tokens
|
2019-02-08 05:49:12 +01:00
|
|
|
}
|
|
|
|
|
|
2019-03-29 06:07:08 +01:00
|
|
|
%wrapper "monadUserState"
|
2019-02-08 05:49:12 +01:00
|
|
|
|
|
|
|
|
-- Numbers
|
|
|
|
|
|
|
|
|
|
$nonZeroDecimalDigit = [1-9]
|
|
|
|
|
$decimalDigit = [0-9]
|
2019-02-23 21:10:25 +01:00
|
|
|
@xDigit = [xX]
|
|
|
|
|
@zDigit = [zZ\?]
|
|
|
|
|
@binaryDigit = @xDigit | @zDigit | [0-1]
|
|
|
|
|
@octalDigit = @xDigit | @zDigit | [0-7]
|
|
|
|
|
@hexDigit = @xDigit | @zDigit | [0-9a-fA-F]
|
2019-02-08 05:49:12 +01:00
|
|
|
|
2019-03-22 21:57:13 +01:00
|
|
|
@decimalBase = "'" [sS]? [dD]
|
|
|
|
|
@binaryBase = "'" [sS]? [bB]
|
|
|
|
|
@octalBase = "'" [sS]? [oO]
|
|
|
|
|
@hexBase = "'" [sS]? [hH]
|
2019-02-08 05:49:12 +01:00
|
|
|
|
2019-02-23 21:10:25 +01:00
|
|
|
@binaryValue = @binaryDigit ("_" | @binaryDigit)*
|
2019-02-08 05:49:12 +01:00
|
|
|
@octalValue = @octalDigit ("_" | @octalDigit)*
|
|
|
|
|
@hexValue = @hexDigit ("_" | @hexDigit)*
|
|
|
|
|
|
|
|
|
|
@unsignedNumber = $decimalDigit ("_" | $decimalDigit)*
|
|
|
|
|
|
2019-03-26 20:21:06 +01:00
|
|
|
@sign = [\-\+]
|
|
|
|
|
@fixedPointNumber = @unsignedNumber "." @unsignedNumber
|
|
|
|
|
@floatingPointNumber = @unsignedNumber ("." @unsignedNumber)? [eE] @sign? @unsignedNumber
|
|
|
|
|
|
2019-03-22 07:01:48 +01:00
|
|
|
@size = @unsignedNumber " "?
|
2019-02-08 05:49:12 +01:00
|
|
|
|
2019-03-22 07:01:48 +01:00
|
|
|
@decimalNumber = @size? @decimalBase " "? @unsignedNumber
|
|
|
|
|
@binaryNumber = @size? @binaryBase " "? @binaryValue
|
|
|
|
|
@octalNumber = @size? @octalBase " "? @octalValue
|
|
|
|
|
@hexNumber = @size? @hexBase " "? @hexValue
|
2019-03-26 20:21:06 +01:00
|
|
|
@realNumber = @fixedPointNumber | @floatingPointNumber
|
2019-02-08 05:58:34 +01:00
|
|
|
|
2019-03-08 22:37:20 +01:00
|
|
|
@unbasedUnsizedLiteral = "'" ( 0 | 1 | x | X | z | Z )
|
|
|
|
|
|
|
|
|
|
@number
|
2019-03-29 06:07:08 +01:00
|
|
|
= @unsignedNumber
|
|
|
|
|
| @decimalNumber
|
|
|
|
|
| @octalNumber
|
|
|
|
|
| @binaryNumber
|
|
|
|
|
| @hexNumber
|
|
|
|
|
| @unbasedUnsizedLiteral
|
|
|
|
|
| @realNumber
|
2019-02-08 05:49:12 +01:00
|
|
|
|
|
|
|
|
-- Strings
|
|
|
|
|
|
2019-03-26 00:42:20 +01:00
|
|
|
@string = \" (\\\"|[^\"\r\n])* \"
|
2019-02-08 05:49:12 +01:00
|
|
|
|
2019-09-12 03:44:57 +02:00
|
|
|
-- Times
|
|
|
|
|
|
|
|
|
|
@timeUnit = s | ms | us | ns | ps | fs
|
|
|
|
|
@time
|
|
|
|
|
= @unsignedNumber @timeUnit
|
|
|
|
|
| @fixedPointNumber @timeUnit
|
|
|
|
|
|
2019-02-08 05:49:12 +01:00
|
|
|
-- Identifiers
|
|
|
|
|
|
|
|
|
|
@escapedIdentifier = "\" ($printable # $white)+ $white
|
|
|
|
|
@simpleIdentifier = [a-zA-Z_] [a-zA-Z0-9_\$]*
|
|
|
|
|
@systemIdentifier = "$" [a-zA-Z0-9_\$]+
|
|
|
|
|
|
2019-03-18 10:00:23 +01:00
|
|
|
-- Comments
|
|
|
|
|
|
2019-03-29 06:07:08 +01:00
|
|
|
@commentBlock = "/*"
|
|
|
|
|
@commentLine = "//"
|
2019-03-18 10:00:23 +01:00
|
|
|
|
|
|
|
|
-- Directives
|
|
|
|
|
|
|
|
|
|
@directive = "`" @simpleIdentifier
|
|
|
|
|
|
|
|
|
|
-- Whitespace
|
|
|
|
|
|
|
|
|
|
@newline = \n
|
|
|
|
|
@escapedNewline = \\\n
|
|
|
|
|
@whitespace = ($white # \n) | @escapedNewline
|
2019-02-08 05:49:12 +01:00
|
|
|
|
|
|
|
|
tokens :-
|
|
|
|
|
|
2019-09-14 18:31:44 +02:00
|
|
|
"$bits" { tok KW_dollar_bits }
|
|
|
|
|
"$dimensions" { tok KW_dollar_dimensions }
|
|
|
|
|
"$unpacked_dimensions" { tok KW_dollar_unpacked_dimensions }
|
|
|
|
|
"$left" { tok KW_dollar_left }
|
|
|
|
|
"$right" { tok KW_dollar_right }
|
|
|
|
|
"$low" { tok KW_dollar_low }
|
|
|
|
|
"$high" { tok KW_dollar_high }
|
|
|
|
|
"$increment" { tok KW_dollar_increment }
|
|
|
|
|
"$size" { tok KW_dollar_size }
|
|
|
|
|
|
2019-09-05 03:02:02 +02:00
|
|
|
"accept_on" { tok KW_accept_on }
|
|
|
|
|
"alias" { tok KW_alias }
|
2019-03-29 06:07:08 +01:00
|
|
|
"always" { tok KW_always }
|
|
|
|
|
"always_comb" { tok KW_always_comb }
|
|
|
|
|
"always_ff" { tok KW_always_ff }
|
|
|
|
|
"always_latch" { tok KW_always_latch }
|
|
|
|
|
"and" { tok KW_and }
|
2019-03-30 05:47:37 +01:00
|
|
|
"assert" { tok KW_assert }
|
2019-03-29 06:07:08 +01:00
|
|
|
"assign" { tok KW_assign }
|
2019-04-04 01:08:30 +02:00
|
|
|
"assume" { tok KW_assume }
|
2019-03-29 06:07:08 +01:00
|
|
|
"automatic" { tok KW_automatic }
|
2019-09-05 03:02:02 +02:00
|
|
|
"before" { tok KW_before }
|
2019-03-29 06:07:08 +01:00
|
|
|
"begin" { tok KW_begin }
|
2019-09-05 03:02:02 +02:00
|
|
|
"bind" { tok KW_bind }
|
|
|
|
|
"bins" { tok KW_bins }
|
|
|
|
|
"binsof" { tok KW_binsof }
|
2019-03-29 06:07:08 +01:00
|
|
|
"bit" { tok KW_bit }
|
2019-09-05 03:02:02 +02:00
|
|
|
"break" { tok KW_break }
|
2019-03-29 06:07:08 +01:00
|
|
|
"buf" { tok KW_buf }
|
2019-09-05 03:02:02 +02:00
|
|
|
"bufif0" { tok KW_bufif0 }
|
|
|
|
|
"bufif1" { tok KW_bufif1 }
|
2019-03-29 06:07:08 +01:00
|
|
|
"byte" { tok KW_byte }
|
|
|
|
|
"case" { tok KW_case }
|
|
|
|
|
"casex" { tok KW_casex }
|
|
|
|
|
"casez" { tok KW_casez }
|
2019-09-05 03:02:02 +02:00
|
|
|
"cell" { tok KW_cell }
|
|
|
|
|
"chandle" { tok KW_chandle }
|
|
|
|
|
"checker" { tok KW_checker }
|
|
|
|
|
"class" { tok KW_class }
|
|
|
|
|
"clocking" { tok KW_clocking }
|
|
|
|
|
"cmos" { tok KW_cmos }
|
|
|
|
|
"config" { tok KW_config }
|
|
|
|
|
"const" { tok KW_const }
|
|
|
|
|
"constraint" { tok KW_constraint }
|
|
|
|
|
"context" { tok KW_context }
|
|
|
|
|
"continue" { tok KW_continue }
|
2019-04-04 01:08:30 +02:00
|
|
|
"cover" { tok KW_cover }
|
2019-09-05 03:02:02 +02:00
|
|
|
"covergroup" { tok KW_covergroup }
|
|
|
|
|
"coverpoint" { tok KW_coverpoint }
|
|
|
|
|
"cross" { tok KW_cross }
|
|
|
|
|
"deassign" { tok KW_deassign }
|
2019-03-29 06:07:08 +01:00
|
|
|
"default" { tok KW_default }
|
|
|
|
|
"defparam" { tok KW_defparam }
|
2019-09-05 03:02:02 +02:00
|
|
|
"design" { tok KW_design }
|
2019-03-30 05:47:37 +01:00
|
|
|
"disable" { tok KW_disable }
|
2019-09-05 03:02:02 +02:00
|
|
|
"dist" { tok KW_dist }
|
2019-03-29 06:07:08 +01:00
|
|
|
"do" { tok KW_do }
|
2019-09-05 03:02:02 +02:00
|
|
|
"edge" { tok KW_edge }
|
2019-03-29 06:07:08 +01:00
|
|
|
"else" { tok KW_else }
|
|
|
|
|
"end" { tok KW_end }
|
|
|
|
|
"endcase" { tok KW_endcase }
|
2019-09-05 03:02:02 +02:00
|
|
|
"endchecker" { tok KW_endchecker }
|
|
|
|
|
"endclass" { tok KW_endclass }
|
|
|
|
|
"endclocking" { tok KW_endclocking }
|
|
|
|
|
"endconfig" { tok KW_endconfig }
|
2019-03-29 06:07:08 +01:00
|
|
|
"endfunction" { tok KW_endfunction }
|
|
|
|
|
"endgenerate" { tok KW_endgenerate }
|
2019-09-05 03:02:02 +02:00
|
|
|
"endgroup" { tok KW_endgroup }
|
2019-03-29 06:07:08 +01:00
|
|
|
"endinterface" { tok KW_endinterface }
|
|
|
|
|
"endmodule" { tok KW_endmodule }
|
2019-04-23 21:53:51 +02:00
|
|
|
"endpackage" { tok KW_endpackage }
|
2019-09-05 03:02:02 +02:00
|
|
|
"endprimitive" { tok KW_endprimitive }
|
|
|
|
|
"endprogram" { tok KW_endprogram }
|
|
|
|
|
"endproperty" { tok KW_endproperty }
|
|
|
|
|
"endspecify" { tok KW_endspecify }
|
|
|
|
|
"endsequence" { tok KW_endsequence }
|
|
|
|
|
"endtable" { tok KW_endtable }
|
2019-03-29 06:07:08 +01:00
|
|
|
"endtask" { tok KW_endtask }
|
|
|
|
|
"enum" { tok KW_enum }
|
2019-09-05 03:02:02 +02:00
|
|
|
"event" { tok KW_event }
|
|
|
|
|
"eventually" { tok KW_eventually }
|
|
|
|
|
"expect" { tok KW_expect }
|
2019-04-23 21:53:51 +02:00
|
|
|
"export" { tok KW_export }
|
2019-09-05 03:02:02 +02:00
|
|
|
"extends" { tok KW_extends }
|
2019-03-29 06:07:08 +01:00
|
|
|
"extern" { tok KW_extern }
|
2019-09-05 03:02:02 +02:00
|
|
|
"final" { tok KW_final }
|
2019-03-30 05:47:37 +01:00
|
|
|
"first_match" { tok KW_first_match }
|
2019-03-29 06:07:08 +01:00
|
|
|
"for" { tok KW_for }
|
2019-09-05 03:02:02 +02:00
|
|
|
"force" { tok KW_force }
|
|
|
|
|
"foreach" { tok KW_foreach }
|
2019-03-29 06:07:08 +01:00
|
|
|
"forever" { tok KW_forever }
|
2019-09-05 03:02:02 +02:00
|
|
|
"fork" { tok KW_fork }
|
|
|
|
|
"forkjoin" { tok KW_forkjoin }
|
2019-03-29 06:07:08 +01:00
|
|
|
"function" { tok KW_function }
|
|
|
|
|
"generate" { tok KW_generate }
|
|
|
|
|
"genvar" { tok KW_genvar }
|
2019-09-05 03:02:02 +02:00
|
|
|
"global" { tok KW_global }
|
|
|
|
|
"highz0" { tok KW_highz0 }
|
|
|
|
|
"highz1" { tok KW_highz1 }
|
2019-03-29 06:07:08 +01:00
|
|
|
"if" { tok KW_if }
|
2019-03-30 05:47:37 +01:00
|
|
|
"iff" { tok KW_iff }
|
2019-09-05 03:02:02 +02:00
|
|
|
"ifnone" { tok KW_ifnone }
|
|
|
|
|
"ignore_bins" { tok KW_ignore_bins }
|
|
|
|
|
"illegal_bins" { tok KW_illegal_bins }
|
|
|
|
|
"implements" { tok KW_implements }
|
|
|
|
|
"implies" { tok KW_implies }
|
2019-04-23 21:53:51 +02:00
|
|
|
"import" { tok KW_import }
|
2019-09-05 03:02:02 +02:00
|
|
|
"incdir" { tok KW_incdir }
|
|
|
|
|
"include" { tok KW_include }
|
2019-03-29 06:07:08 +01:00
|
|
|
"initial" { tok KW_initial }
|
|
|
|
|
"inout" { tok KW_inout }
|
|
|
|
|
"input" { tok KW_input }
|
2019-09-05 03:02:02 +02:00
|
|
|
"inside" { tok KW_inside }
|
|
|
|
|
"instance" { tok KW_instance }
|
2019-03-29 06:07:08 +01:00
|
|
|
"int" { tok KW_int }
|
|
|
|
|
"integer" { tok KW_integer }
|
2019-09-05 03:02:02 +02:00
|
|
|
"interconnect" { tok KW_interconnect }
|
2019-03-29 06:07:08 +01:00
|
|
|
"interface" { tok KW_interface }
|
2019-03-30 05:47:37 +01:00
|
|
|
"intersect" { tok KW_intersect }
|
2019-09-05 03:02:02 +02:00
|
|
|
"join" { tok KW_join }
|
|
|
|
|
"join_any" { tok KW_join_any }
|
|
|
|
|
"join_none" { tok KW_join_none }
|
|
|
|
|
"large" { tok KW_large }
|
|
|
|
|
"let" { tok KW_let }
|
|
|
|
|
"liblist" { tok KW_liblist }
|
|
|
|
|
"library" { tok KW_library }
|
|
|
|
|
"local" { tok KW_local }
|
2019-03-29 06:07:08 +01:00
|
|
|
"localparam" { tok KW_localparam }
|
|
|
|
|
"logic" { tok KW_logic }
|
|
|
|
|
"longint" { tok KW_longint }
|
2019-09-05 03:02:02 +02:00
|
|
|
"macromodule" { tok KW_macromodule }
|
|
|
|
|
"matches" { tok KW_matches }
|
|
|
|
|
"medium" { tok KW_medium }
|
2019-03-29 06:07:08 +01:00
|
|
|
"modport" { tok KW_modport }
|
|
|
|
|
"module" { tok KW_module }
|
|
|
|
|
"nand" { tok KW_nand }
|
|
|
|
|
"negedge" { tok KW_negedge }
|
2019-09-05 03:02:02 +02:00
|
|
|
"nettype" { tok KW_nettype }
|
|
|
|
|
"new" { tok KW_new }
|
|
|
|
|
"nexttime" { tok KW_nexttime }
|
|
|
|
|
"nmos" { tok KW_nmos }
|
2019-03-29 06:07:08 +01:00
|
|
|
"nor" { tok KW_nor }
|
2019-09-05 03:02:02 +02:00
|
|
|
"noshowcancelled" { tok KW_noshowcancelled }
|
2019-03-29 06:07:08 +01:00
|
|
|
"not" { tok KW_not }
|
2019-09-05 03:02:02 +02:00
|
|
|
"notif0" { tok KW_notif0 }
|
|
|
|
|
"notif1" { tok KW_notif1 }
|
|
|
|
|
"null" { tok KW_null }
|
2019-03-29 06:07:08 +01:00
|
|
|
"or" { tok KW_or }
|
|
|
|
|
"output" { tok KW_output }
|
2019-04-23 21:53:51 +02:00
|
|
|
"package" { tok KW_package }
|
2019-03-29 06:07:08 +01:00
|
|
|
"packed" { tok KW_packed }
|
|
|
|
|
"parameter" { tok KW_parameter }
|
2019-09-05 03:02:02 +02:00
|
|
|
"pmos" { tok KW_pmos }
|
2019-03-29 06:07:08 +01:00
|
|
|
"posedge" { tok KW_posedge }
|
2019-09-05 03:02:02 +02:00
|
|
|
"primitive" { tok KW_primitive }
|
2019-03-30 05:47:37 +01:00
|
|
|
"priority" { tok KW_priority }
|
2019-09-05 03:02:02 +02:00
|
|
|
"program" { tok KW_program }
|
2019-03-30 05:47:37 +01:00
|
|
|
"property" { tok KW_property }
|
2019-09-05 03:02:02 +02:00
|
|
|
"protected" { tok KW_protected }
|
|
|
|
|
"pull0" { tok KW_pull0 }
|
|
|
|
|
"pull1" { tok KW_pull1 }
|
|
|
|
|
"pulldown" { tok KW_pulldown }
|
|
|
|
|
"pullup" { tok KW_pullup }
|
|
|
|
|
"pulsestyle_ondetect" { tok KW_pulsestyle_ondetect }
|
|
|
|
|
"pulsestyle_onevent" { tok KW_pulsestyle_onevent }
|
|
|
|
|
"pure" { tok KW_pure }
|
|
|
|
|
"rand" { tok KW_rand }
|
|
|
|
|
"randc" { tok KW_randc }
|
|
|
|
|
"randcase" { tok KW_randcase }
|
|
|
|
|
"randsequence" { tok KW_randsequence }
|
|
|
|
|
"rcmos" { tok KW_rcmos }
|
2019-03-29 06:07:08 +01:00
|
|
|
"real" { tok KW_real }
|
|
|
|
|
"realtime" { tok KW_realtime }
|
2019-09-05 03:02:02 +02:00
|
|
|
"ref" { tok KW_ref }
|
2019-03-29 06:07:08 +01:00
|
|
|
"reg" { tok KW_reg }
|
2019-09-05 03:02:02 +02:00
|
|
|
"reject_on" { tok KW_reject_on }
|
|
|
|
|
"release" { tok KW_release }
|
2019-03-29 06:07:08 +01:00
|
|
|
"repeat" { tok KW_repeat }
|
2019-09-05 03:02:02 +02:00
|
|
|
"restrict" { tok KW_restrict }
|
2019-03-29 06:07:08 +01:00
|
|
|
"return" { tok KW_return }
|
2019-09-05 03:02:02 +02:00
|
|
|
"rnmos" { tok KW_rnmos }
|
|
|
|
|
"rpmos" { tok KW_rpmos }
|
|
|
|
|
"rtran" { tok KW_rtran }
|
|
|
|
|
"rtranif0" { tok KW_rtranif0 }
|
|
|
|
|
"rtranif1" { tok KW_rtranif1 }
|
|
|
|
|
"s_always" { tok KW_s_always }
|
|
|
|
|
"s_eventually" { tok KW_s_eventually }
|
|
|
|
|
"s_nexttime" { tok KW_s_nexttime }
|
|
|
|
|
"s_until" { tok KW_s_until }
|
|
|
|
|
"s_until_with" { tok KW_s_until_with }
|
|
|
|
|
"scalared" { tok KW_scalared }
|
|
|
|
|
"sequence" { tok KW_sequence }
|
2019-03-29 06:07:08 +01:00
|
|
|
"shortint" { tok KW_shortint }
|
|
|
|
|
"shortreal" { tok KW_shortreal }
|
2019-09-05 03:02:02 +02:00
|
|
|
"showcancelled" { tok KW_showcancelled }
|
2019-03-29 06:07:08 +01:00
|
|
|
"signed" { tok KW_signed }
|
2019-09-05 03:02:02 +02:00
|
|
|
"small" { tok KW_small }
|
|
|
|
|
"soft" { tok KW_soft }
|
|
|
|
|
"solve" { tok KW_solve }
|
|
|
|
|
"specify" { tok KW_specify }
|
|
|
|
|
"specparam" { tok KW_specparam }
|
2019-03-29 06:07:08 +01:00
|
|
|
"static" { tok KW_static }
|
2019-09-05 03:02:02 +02:00
|
|
|
"string" { tok KW_string }
|
|
|
|
|
"strong" { tok KW_strong }
|
|
|
|
|
"strong0" { tok KW_strong0 }
|
|
|
|
|
"strong1" { tok KW_strong1 }
|
2019-03-29 06:07:08 +01:00
|
|
|
"struct" { tok KW_struct }
|
2019-09-05 03:02:02 +02:00
|
|
|
"super" { tok KW_super }
|
2019-03-29 06:07:08 +01:00
|
|
|
"supply0" { tok KW_supply0 }
|
|
|
|
|
"supply1" { tok KW_supply1 }
|
2019-09-05 03:02:02 +02:00
|
|
|
"sync_accept_on" { tok KW_sync_accept_on }
|
|
|
|
|
"sync_reject_on" { tok KW_sync_reject_on }
|
|
|
|
|
"table" { tok KW_table }
|
|
|
|
|
"tagged" { tok KW_tagged }
|
2019-03-29 06:07:08 +01:00
|
|
|
"task" { tok KW_task }
|
2019-09-05 03:02:02 +02:00
|
|
|
"this" { tok KW_this }
|
2019-03-30 05:47:37 +01:00
|
|
|
"throughout" { tok KW_throughout }
|
2019-03-29 06:07:08 +01:00
|
|
|
"time" { tok KW_time }
|
2019-09-05 03:02:02 +02:00
|
|
|
"timeprecision" { tok KW_timeprecision }
|
|
|
|
|
"timeunit" { tok KW_timeunit }
|
|
|
|
|
"tran" { tok KW_tran }
|
|
|
|
|
"tranif0" { tok KW_tranif0 }
|
|
|
|
|
"tranif1" { tok KW_tranif1 }
|
2019-03-29 06:07:08 +01:00
|
|
|
"tri" { tok KW_tri }
|
|
|
|
|
"tri0" { tok KW_tri0 }
|
|
|
|
|
"tri1" { tok KW_tri1 }
|
|
|
|
|
"triand" { tok KW_triand }
|
|
|
|
|
"trior" { tok KW_trior }
|
|
|
|
|
"trireg" { tok KW_trireg }
|
2019-09-05 03:02:02 +02:00
|
|
|
"type" { tok KW_type }
|
2019-03-29 06:07:08 +01:00
|
|
|
"typedef" { tok KW_typedef }
|
2019-08-09 05:12:06 +02:00
|
|
|
"union" { tok KW_union }
|
2019-03-29 06:07:08 +01:00
|
|
|
"unique" { tok KW_unique }
|
2019-03-30 05:47:37 +01:00
|
|
|
"unique0" { tok KW_unique0 }
|
2019-03-29 06:07:08 +01:00
|
|
|
"unsigned" { tok KW_unsigned }
|
2019-09-05 03:02:02 +02:00
|
|
|
"until" { tok KW_until }
|
|
|
|
|
"until_with" { tok KW_until_with }
|
|
|
|
|
"untyped" { tok KW_untyped }
|
|
|
|
|
"use" { tok KW_use }
|
2019-03-29 06:07:08 +01:00
|
|
|
"uwire" { tok KW_uwire }
|
2019-09-05 03:02:02 +02:00
|
|
|
"var" { tok KW_var }
|
|
|
|
|
"vectored" { tok KW_vectored }
|
|
|
|
|
"virtual" { tok KW_virtual }
|
|
|
|
|
"void" { tok KW_void }
|
|
|
|
|
"wait" { tok KW_wait }
|
|
|
|
|
"wait_order" { tok KW_wait_order }
|
2019-03-29 06:07:08 +01:00
|
|
|
"wand" { tok KW_wand }
|
2019-09-05 03:02:02 +02:00
|
|
|
"weak" { tok KW_weak }
|
|
|
|
|
"weak0" { tok KW_weak0 }
|
|
|
|
|
"weak1" { tok KW_weak1 }
|
2019-03-29 06:07:08 +01:00
|
|
|
"while" { tok KW_while }
|
2019-09-05 03:02:02 +02:00
|
|
|
"wildcard" { tok KW_wildcard }
|
2019-03-29 06:07:08 +01:00
|
|
|
"wire" { tok KW_wire }
|
2019-09-05 03:02:02 +02:00
|
|
|
"with" { tok KW_with }
|
2019-03-30 05:47:37 +01:00
|
|
|
"within" { tok KW_within }
|
2019-03-29 06:07:08 +01:00
|
|
|
"wor" { tok KW_wor }
|
|
|
|
|
"xnor" { tok KW_xnor }
|
|
|
|
|
"xor" { tok KW_xor }
|
|
|
|
|
|
|
|
|
|
@simpleIdentifier { tok Id_simple }
|
|
|
|
|
@escapedIdentifier { tok Id_escaped }
|
|
|
|
|
@systemIdentifier { tok Id_system }
|
|
|
|
|
|
|
|
|
|
@number { tok Lit_number }
|
|
|
|
|
@string { tok Lit_string }
|
2019-09-12 03:44:57 +02:00
|
|
|
@time { tok Lit_time }
|
2019-03-29 06:07:08 +01:00
|
|
|
|
|
|
|
|
"(" { tok Sym_paren_l }
|
|
|
|
|
")" { tok Sym_paren_r }
|
|
|
|
|
"[" { tok Sym_brack_l }
|
|
|
|
|
"]" { tok Sym_brack_r }
|
|
|
|
|
"{" { tok Sym_brace_l }
|
|
|
|
|
"}" { tok Sym_brace_r }
|
|
|
|
|
"~" { tok Sym_tildy }
|
|
|
|
|
"!" { tok Sym_bang }
|
|
|
|
|
"@" { tok Sym_at }
|
|
|
|
|
"#" { tok Sym_pound }
|
|
|
|
|
"%" { tok Sym_percent }
|
|
|
|
|
"^" { tok Sym_hat }
|
|
|
|
|
"&" { tok Sym_amp }
|
|
|
|
|
"|" { tok Sym_bar }
|
|
|
|
|
"*" { tok Sym_aster }
|
|
|
|
|
"." { tok Sym_dot }
|
|
|
|
|
"," { tok Sym_comma }
|
|
|
|
|
":" { tok Sym_colon }
|
|
|
|
|
";" { tok Sym_semi }
|
|
|
|
|
"=" { tok Sym_eq }
|
|
|
|
|
"<" { tok Sym_lt }
|
|
|
|
|
">" { tok Sym_gt }
|
|
|
|
|
"+" { tok Sym_plus }
|
|
|
|
|
"-" { tok Sym_dash }
|
|
|
|
|
"?" { tok Sym_question }
|
|
|
|
|
"/" { tok Sym_slash }
|
|
|
|
|
"$" { tok Sym_dollar }
|
|
|
|
|
"'" { tok Sym_s_quote }
|
|
|
|
|
|
|
|
|
|
"~&" { tok Sym_tildy_amp }
|
|
|
|
|
"~|" { tok Sym_tildy_bar }
|
|
|
|
|
"~^" { tok Sym_tildy_hat }
|
|
|
|
|
"^~" { tok Sym_hat_tildy }
|
|
|
|
|
"==" { tok Sym_eq_eq }
|
|
|
|
|
"!=" { tok Sym_bang_eq }
|
|
|
|
|
"&&" { tok Sym_amp_amp }
|
|
|
|
|
"||" { tok Sym_bar_bar }
|
|
|
|
|
"**" { tok Sym_aster_aster }
|
|
|
|
|
"<=" { tok Sym_lt_eq }
|
|
|
|
|
">=" { tok Sym_gt_eq }
|
|
|
|
|
">>" { tok Sym_gt_gt }
|
|
|
|
|
"<<" { tok Sym_lt_lt }
|
|
|
|
|
"++" { tok Sym_plus_plus }
|
|
|
|
|
"--" { tok Sym_dash_dash }
|
|
|
|
|
"+=" { tok Sym_plus_eq }
|
|
|
|
|
"-=" { tok Sym_dash_eq }
|
|
|
|
|
"*=" { tok Sym_aster_eq }
|
|
|
|
|
"/=" { tok Sym_slash_eq }
|
|
|
|
|
"%=" { tok Sym_percent_eq }
|
|
|
|
|
"&=" { tok Sym_amp_eq }
|
|
|
|
|
"|=" { tok Sym_bar_eq }
|
|
|
|
|
"^=" { tok Sym_hat_eq }
|
|
|
|
|
"+:" { tok Sym_plus_colon }
|
|
|
|
|
"-:" { tok Sym_dash_colon }
|
|
|
|
|
"::" { tok Sym_colon_colon }
|
|
|
|
|
".*" { tok Sym_dot_aster }
|
|
|
|
|
"->" { tok Sym_dash_gt }
|
|
|
|
|
":=" { tok Sym_colon_eq }
|
|
|
|
|
":/" { tok Sym_colon_slash }
|
|
|
|
|
"##" { tok Sym_pound_pound }
|
|
|
|
|
"[*" { tok Sym_brack_l_aster }
|
|
|
|
|
"[=" { tok Sym_brack_l_eq }
|
|
|
|
|
"=>" { tok Sym_eq_gt }
|
|
|
|
|
"@*" { tok Sym_at_aster }
|
|
|
|
|
"(*" { tok Sym_paren_l_aster }
|
|
|
|
|
"*)" { tok Sym_aster_paren_r }
|
|
|
|
|
"*>" { tok Sym_aster_gt }
|
|
|
|
|
|
|
|
|
|
"===" { tok Sym_eq_eq_eq }
|
|
|
|
|
"!==" { tok Sym_bang_eq_eq }
|
|
|
|
|
"==?" { tok Sym_eq_eq_question }
|
|
|
|
|
"!=?" { tok Sym_bang_eq_question }
|
|
|
|
|
">>>" { tok Sym_gt_gt_gt }
|
|
|
|
|
"<<<" { tok Sym_lt_lt_lt }
|
|
|
|
|
"<<=" { tok Sym_lt_lt_eq }
|
|
|
|
|
">>=" { tok Sym_gt_gt_eq }
|
|
|
|
|
"|->" { tok Sym_bar_dash_gt }
|
|
|
|
|
"|=>" { tok Sym_bar_eq_gt }
|
|
|
|
|
"[->" { tok Sym_brack_l_dash_gt }
|
2019-03-30 05:47:37 +01:00
|
|
|
"#-#" { tok Sym_pound_dash_pound }
|
|
|
|
|
"#=#" { tok Sym_pound_eq_pound }
|
2019-03-29 06:07:08 +01:00
|
|
|
"@@(" { tok Sym_at_at_paren_l }
|
|
|
|
|
"(*)" { tok Sym_paren_l_aster_paren_r }
|
|
|
|
|
"->>" { tok Sym_dash_gt_gt }
|
|
|
|
|
"&&&" { tok Sym_amp_amp_amp }
|
|
|
|
|
|
|
|
|
|
"<<<=" { tok Sym_lt_lt_lt_eq }
|
|
|
|
|
">>>=" { tok Sym_gt_gt_gt_eq }
|
|
|
|
|
|
2019-03-29 10:19:11 +01:00
|
|
|
@directive { handleDirective }
|
2019-03-29 06:07:08 +01:00
|
|
|
@commentLine { removeUntil "\n" }
|
|
|
|
|
@commentBlock { removeUntil "*/" }
|
|
|
|
|
|
2019-03-29 10:19:11 +01:00
|
|
|
$white ;
|
2019-03-29 06:07:08 +01:00
|
|
|
|
|
|
|
|
. { tok Unknown }
|
2019-02-08 05:49:12 +01:00
|
|
|
|
|
|
|
|
{
|
2019-03-29 06:07:08 +01:00
|
|
|
|
2019-03-29 18:59:51 +01:00
|
|
|
-- our actions don't return any data
|
|
|
|
|
type Action = AlexInput -> Int -> Alex ()
|
|
|
|
|
|
|
|
|
|
-- keeps track of the state of an if-else cascade level
|
2019-03-29 10:19:11 +01:00
|
|
|
data Cond
|
|
|
|
|
= CurrentlyTrue
|
|
|
|
|
| PreviouslyTrue
|
|
|
|
|
| NeverTrue
|
|
|
|
|
deriving (Eq, Show)
|
2019-03-29 06:07:08 +01:00
|
|
|
|
2019-08-06 04:00:04 +02:00
|
|
|
-- map from macro to definition, plus arguments
|
|
|
|
|
type Env = Map.Map String (String, [(String, Maybe String)])
|
|
|
|
|
|
2019-03-29 18:59:51 +01:00
|
|
|
-- our custom lexer state
|
2019-03-29 10:19:11 +01:00
|
|
|
data AlexUserState = LS
|
2019-03-30 08:21:56 +01:00
|
|
|
{ lsToks :: [Token] -- tokens read so far, *in reverse order* for efficiency
|
2019-03-29 18:59:51 +01:00
|
|
|
, lsCurrFile :: FilePath -- currently active filename
|
2019-08-06 04:00:04 +02:00
|
|
|
, lsEnv :: Env -- active macro definitions
|
2019-03-29 18:59:51 +01:00
|
|
|
, lsCondStack :: [Cond] -- if-else cascade state
|
|
|
|
|
, lsIncludePaths :: [FilePath] -- folders to search for includes
|
2019-03-29 10:19:11 +01:00
|
|
|
} deriving (Eq, Show)
|
2019-03-29 06:07:08 +01:00
|
|
|
|
2019-03-29 18:59:51 +01:00
|
|
|
-- this initial user state does not contain the initial filename, environment,
|
|
|
|
|
-- or include paths; alex requires that this be defined; we override it before
|
|
|
|
|
-- we begin the actual lexing procedure
|
2019-03-29 10:19:11 +01:00
|
|
|
alexInitUserState :: AlexUserState
|
|
|
|
|
alexInitUserState = LS [] "" Map.empty [] []
|
|
|
|
|
|
2019-03-29 18:59:51 +01:00
|
|
|
-- public-facing lexer entrypoint
|
2019-08-06 04:00:04 +02:00
|
|
|
lexFile :: [String] -> Env -> FilePath -> IO ([Token], Env)
|
2019-03-29 10:19:11 +01:00
|
|
|
lexFile includePaths env path = do
|
|
|
|
|
str <- readFile path
|
|
|
|
|
let result = runAlex str $ setEnv >> alexMonadScan >> get
|
|
|
|
|
return $ case result of
|
|
|
|
|
Left msg -> error $ "Lexical Error: " ++ msg
|
2019-03-29 18:59:51 +01:00
|
|
|
Right finalState ->
|
|
|
|
|
if null $ lsCondStack finalState
|
2019-08-06 04:00:04 +02:00
|
|
|
then (reverse $ lsToks finalState, lsEnv finalState)
|
2019-03-29 18:59:51 +01:00
|
|
|
else error $ "unfinished conditional directives: " ++
|
|
|
|
|
(show $ length $ lsCondStack finalState)
|
2019-03-29 10:19:11 +01:00
|
|
|
where
|
2019-04-03 06:16:30 +02:00
|
|
|
setEnv = do
|
|
|
|
|
-- standardize the file path format
|
|
|
|
|
path' <- includeSearch path
|
|
|
|
|
modify $ \s -> s
|
2019-08-06 04:00:04 +02:00
|
|
|
{ lsEnv = env
|
2019-04-03 06:16:30 +02:00
|
|
|
, lsIncludePaths = includePaths
|
|
|
|
|
, lsCurrFile = path'
|
|
|
|
|
}
|
2019-03-29 06:07:08 +01:00
|
|
|
|
2019-03-29 18:59:51 +01:00
|
|
|
-- invoked by alexMonadScan
|
|
|
|
|
alexEOF :: Alex ()
|
|
|
|
|
alexEOF = return ()
|
|
|
|
|
|
|
|
|
|
-- raises an alexError with the current file position appended
|
|
|
|
|
lexicalError :: String -> Alex a
|
|
|
|
|
lexicalError msg = do
|
2019-03-29 22:02:49 +01:00
|
|
|
(pn, _, _, _) <- alexGetInput
|
|
|
|
|
pos <- toTokPos pn
|
2019-03-29 18:59:51 +01:00
|
|
|
alexError $ msg ++ ", at " ++ show pos
|
|
|
|
|
|
|
|
|
|
-- get the current user state
|
2019-03-29 06:07:08 +01:00
|
|
|
get :: Alex AlexUserState
|
|
|
|
|
get = Alex $ \s -> Right (s, alex_ust s)
|
|
|
|
|
|
2019-03-29 18:59:51 +01:00
|
|
|
-- get the current user state and apply a function to it
|
2019-03-29 06:07:08 +01:00
|
|
|
gets :: (AlexUserState -> a) -> Alex a
|
|
|
|
|
gets f = get >>= return . f
|
|
|
|
|
|
2019-03-29 18:59:51 +01:00
|
|
|
-- apply a transformation to the current user state
|
2019-03-29 06:07:08 +01:00
|
|
|
modify :: (AlexUserState -> AlexUserState) -> Alex ()
|
|
|
|
|
modify f = Alex func
|
|
|
|
|
where func s = Right (s { alex_ust = new }, ())
|
|
|
|
|
where new = f (alex_ust s)
|
|
|
|
|
|
2019-03-29 18:59:51 +01:00
|
|
|
-- helpers specifically accessing the current file state
|
2019-03-29 10:19:11 +01:00
|
|
|
getCurrentFile :: Alex String
|
|
|
|
|
getCurrentFile = gets lsCurrFile
|
|
|
|
|
setCurrentFile :: String -> Alex ()
|
|
|
|
|
setCurrentFile x = modify $ \s -> s { lsCurrFile = x }
|
|
|
|
|
|
2019-03-29 18:59:51 +01:00
|
|
|
-- find the given file for inclusion
|
2019-03-29 10:19:11 +01:00
|
|
|
includeSearch :: FilePath -> Alex FilePath
|
|
|
|
|
includeSearch file = do
|
|
|
|
|
base <- getCurrentFile
|
|
|
|
|
includePaths <- gets lsIncludePaths
|
|
|
|
|
let directories = dropFileName base : includePaths
|
|
|
|
|
let result = unsafePerformIO $ findFile directories file
|
|
|
|
|
case result of
|
|
|
|
|
Just path -> return path
|
2019-03-29 18:59:51 +01:00
|
|
|
Nothing -> lexicalError $ "Could not find file " ++ show file ++
|
|
|
|
|
", included from " ++ show base
|
2019-03-29 10:19:11 +01:00
|
|
|
|
2019-03-29 18:59:51 +01:00
|
|
|
-- read in the given file
|
|
|
|
|
loadFile :: FilePath -> Alex String
|
|
|
|
|
loadFile = return . unsafePerformIO . readFile
|
2019-03-29 10:19:11 +01:00
|
|
|
|
|
|
|
|
isIdentChar :: Char -> Bool
|
|
|
|
|
isIdentChar ch =
|
|
|
|
|
('a' <= ch && ch <= 'z') ||
|
|
|
|
|
('A' <= ch && ch <= 'Z') ||
|
|
|
|
|
('0' <= ch && ch <= '9') ||
|
|
|
|
|
(ch == '_') || (ch == '$')
|
|
|
|
|
|
|
|
|
|
takeString :: Alex String
|
|
|
|
|
takeString = do
|
2019-03-29 18:59:51 +01:00
|
|
|
(pos, _, _, str) <- alexGetInput
|
2019-03-29 10:19:11 +01:00
|
|
|
let (x, rest) = span isIdentChar str
|
2019-03-29 18:59:51 +01:00
|
|
|
let lastChar = if null x then ' ' else last x
|
|
|
|
|
alexSetInput (foldl alexMove pos x, lastChar, [], rest)
|
2019-03-29 10:19:11 +01:00
|
|
|
return x
|
|
|
|
|
|
2019-03-29 22:02:49 +01:00
|
|
|
toTokPos :: AlexPosn -> Alex Position
|
|
|
|
|
toTokPos (AlexPn _ l c) = do
|
2019-03-29 10:19:11 +01:00
|
|
|
file <- getCurrentFile
|
|
|
|
|
return $ Position file l c
|
|
|
|
|
|
|
|
|
|
-- read tokens after the name until the first (un-escaped) newline
|
|
|
|
|
takeUntilNewline :: Alex String
|
|
|
|
|
takeUntilNewline = do
|
2019-03-29 18:59:51 +01:00
|
|
|
(pos, _, _, str) <- alexGetInput
|
2019-03-29 10:19:11 +01:00
|
|
|
case str of
|
|
|
|
|
[] -> return ""
|
|
|
|
|
'\n' : _ -> do
|
|
|
|
|
return ""
|
2019-03-30 00:08:44 +01:00
|
|
|
'/' : '/' : _ -> do
|
|
|
|
|
remainder <- takeThrough '\n'
|
|
|
|
|
case last $ init remainder of
|
|
|
|
|
'\\' -> takeUntilNewline >>= return . (' ' :)
|
|
|
|
|
_ -> return ""
|
2019-03-29 10:19:11 +01:00
|
|
|
'\\' : '\n' : rest -> do
|
2019-03-29 18:59:51 +01:00
|
|
|
let newPos = alexMove (alexMove pos '\\') '\n'
|
|
|
|
|
alexSetInput (newPos, '\n', [], rest)
|
2019-03-29 10:19:11 +01:00
|
|
|
takeUntilNewline >>= return . (' ' :)
|
|
|
|
|
ch : rest -> do
|
2019-03-29 18:59:51 +01:00
|
|
|
let newPos = alexMove pos ch
|
|
|
|
|
alexSetInput (newPos, ch, [], rest)
|
2019-03-29 10:19:11 +01:00
|
|
|
takeUntilNewline >>= return . (ch :)
|
|
|
|
|
|
2019-03-29 18:59:51 +01:00
|
|
|
-- select characters up to and including the given character
|
|
|
|
|
takeThrough :: Char -> Alex String
|
|
|
|
|
takeThrough goal = do
|
|
|
|
|
(_, _, _, str) <- alexGetInput
|
|
|
|
|
if null str
|
|
|
|
|
then lexicalError $
|
|
|
|
|
"unexpected end of input, looking for " ++ (show goal)
|
|
|
|
|
else do
|
|
|
|
|
ch <- takeChar
|
|
|
|
|
if ch == goal
|
|
|
|
|
then return [ch]
|
|
|
|
|
else do
|
|
|
|
|
rest <- takeThrough goal
|
|
|
|
|
return $ ch : rest
|
|
|
|
|
|
|
|
|
|
-- pop one character from the input stream
|
|
|
|
|
takeChar :: Alex Char
|
|
|
|
|
takeChar = do
|
2019-04-16 21:52:38 +02:00
|
|
|
(pos, _, _, str) <- alexGetInput
|
|
|
|
|
(ch, chs) <-
|
|
|
|
|
if null str
|
|
|
|
|
then lexicalError "unexpected end of input"
|
|
|
|
|
else return (head str, tail str)
|
2019-03-29 18:59:51 +01:00
|
|
|
let newPos = alexMove pos ch
|
2019-04-16 21:52:38 +02:00
|
|
|
alexSetInput (newPos, ch, [], chs)
|
2019-03-29 18:59:51 +01:00
|
|
|
return ch
|
|
|
|
|
|
|
|
|
|
-- drop spaces in the input until a non-space is reached or EOF
|
|
|
|
|
dropSpaces :: Alex ()
|
|
|
|
|
dropSpaces = do
|
2019-08-11 23:55:42 +02:00
|
|
|
(pos, _, _, str) <- alexGetInput
|
|
|
|
|
case str of
|
|
|
|
|
' ' : rest -> do
|
|
|
|
|
alexSetInput (alexMove pos ' ', ' ', [], rest)
|
|
|
|
|
dropSpaces
|
|
|
|
|
[] -> return ()
|
|
|
|
|
_ -> return ()
|
2019-03-29 18:59:51 +01:00
|
|
|
|
2019-04-03 05:05:42 +02:00
|
|
|
isWhitespaceChar :: Char -> Bool
|
|
|
|
|
isWhitespaceChar ch = elem ch [' ', '\t', '\n']
|
|
|
|
|
|
2019-08-11 23:55:42 +02:00
|
|
|
-- drop all leading whitespace in the input
|
2019-04-03 05:05:42 +02:00
|
|
|
dropWhitespace :: Alex ()
|
|
|
|
|
dropWhitespace = do
|
2019-08-11 23:55:42 +02:00
|
|
|
(pos, _, _, str) <- alexGetInput
|
|
|
|
|
case str of
|
|
|
|
|
ch : chs ->
|
|
|
|
|
if isWhitespaceChar ch
|
|
|
|
|
then do
|
|
|
|
|
alexSetInput (alexMove pos ch, ch, [], chs)
|
|
|
|
|
dropWhitespace
|
|
|
|
|
else return()
|
|
|
|
|
[] -> return ()
|
2019-03-29 18:59:51 +01:00
|
|
|
|
|
|
|
|
-- removes and returns a quoted string such as <foo.bar> or "foo.bar"
|
|
|
|
|
takeQuotedString :: Alex String
|
|
|
|
|
takeQuotedString = do
|
|
|
|
|
dropSpaces
|
|
|
|
|
ch <- takeChar
|
|
|
|
|
end <-
|
|
|
|
|
case ch of
|
|
|
|
|
'"' -> return '"'
|
|
|
|
|
'<' -> return '>'
|
|
|
|
|
_ -> lexicalError $ "bad beginning of include arg: " ++ (show ch)
|
|
|
|
|
rest <- takeThrough end
|
|
|
|
|
let res = ch : rest
|
|
|
|
|
if end == '>'
|
|
|
|
|
then lexicalError $ "library includes are not supported: " ++ res
|
|
|
|
|
else return res
|
|
|
|
|
|
2019-03-29 22:02:49 +01:00
|
|
|
peekChar :: Alex Char
|
|
|
|
|
peekChar = do
|
|
|
|
|
(_, _, _, str) <- alexGetInput
|
|
|
|
|
return $ if null str
|
|
|
|
|
then '\n'
|
|
|
|
|
else head str
|
|
|
|
|
|
2019-04-03 05:05:42 +02:00
|
|
|
takeMacroDefinition :: Alex (String, [(String, Maybe String)])
|
2019-03-29 22:02:49 +01:00
|
|
|
takeMacroDefinition = do
|
|
|
|
|
leadCh <- peekChar
|
|
|
|
|
if leadCh /= '('
|
|
|
|
|
then do
|
|
|
|
|
body <- takeUntilNewline
|
|
|
|
|
return (body, [])
|
|
|
|
|
else do
|
2019-04-03 05:05:42 +02:00
|
|
|
args <- takeMacroArguments
|
2019-03-29 22:02:49 +01:00
|
|
|
body <- takeUntilNewline
|
2019-04-03 05:05:42 +02:00
|
|
|
argsWithDefaults <- mapM splitArg args
|
2019-03-29 22:02:49 +01:00
|
|
|
if null args
|
|
|
|
|
then lexicalError "macros cannot have 0 args"
|
2019-04-03 05:05:42 +02:00
|
|
|
else return (body, argsWithDefaults)
|
|
|
|
|
where
|
|
|
|
|
splitArg :: String -> Alex (String, Maybe String)
|
|
|
|
|
splitArg [] = lexicalError "macro defn. empty argument"
|
|
|
|
|
splitArg str = do
|
|
|
|
|
let (name, rest) = span isIdentChar str
|
|
|
|
|
if null name || not (all isIdentChar name) then
|
|
|
|
|
lexicalError $ "invalid macro arg name: " ++ show name
|
|
|
|
|
else if null rest then
|
|
|
|
|
return (name, Nothing)
|
|
|
|
|
else do
|
|
|
|
|
let trimmed = dropWhile isWhitespaceChar rest
|
|
|
|
|
let leadCh = head trimmed
|
|
|
|
|
if leadCh /= '='
|
|
|
|
|
then lexicalError $ "bad char after arg name: " ++ (show leadCh)
|
|
|
|
|
else return (name, Just $ tail trimmed)
|
2019-03-29 22:02:49 +01:00
|
|
|
|
2019-03-29 23:55:45 +01:00
|
|
|
-- commas and right parens are forbidden outside matched pairs of: (), [], {},
|
|
|
|
|
-- "", except to delimit arguments or end the list of arguments; see 22.5.1
|
2019-03-29 22:02:49 +01:00
|
|
|
takeMacroArguments :: Alex [String]
|
|
|
|
|
takeMacroArguments = do
|
2019-08-11 23:55:42 +02:00
|
|
|
dropWhitespace
|
2019-04-16 21:52:38 +02:00
|
|
|
leadCh <- takeChar
|
|
|
|
|
if leadCh == '('
|
|
|
|
|
then argLoop
|
|
|
|
|
else lexicalError $ "expected begining of macro arguments, but found "
|
|
|
|
|
++ show leadCh
|
2019-03-29 23:55:45 +01:00
|
|
|
where
|
2019-04-03 05:05:42 +02:00
|
|
|
argLoop :: Alex [String]
|
|
|
|
|
argLoop = do
|
|
|
|
|
dropWhitespace
|
|
|
|
|
(arg, isEnd) <- loop "" []
|
|
|
|
|
let arg' = dropWhileEnd isWhitespaceChar arg
|
|
|
|
|
if isEnd
|
|
|
|
|
then return [arg']
|
|
|
|
|
else do
|
|
|
|
|
rest <- argLoop
|
|
|
|
|
return $ arg' : rest
|
|
|
|
|
loop :: String -> [Char] -> Alex (String, Bool)
|
2019-03-29 23:55:45 +01:00
|
|
|
loop curr stack = do
|
|
|
|
|
ch <- takeChar
|
|
|
|
|
case (stack, ch) of
|
|
|
|
|
( s,'\\') -> do
|
|
|
|
|
ch2 <- takeChar
|
|
|
|
|
loop (curr ++ [ch, ch2]) s
|
2019-04-03 05:05:42 +02:00
|
|
|
([ ], ',') -> return (curr, False)
|
|
|
|
|
([ ], ')') -> return (curr, True)
|
2019-03-29 23:55:45 +01:00
|
|
|
|
|
|
|
|
('"' : s, '"') -> loop (curr ++ [ch]) s
|
|
|
|
|
( s, '"') -> loop (curr ++ [ch]) ('"' : s)
|
|
|
|
|
('[' : s, ']') -> loop (curr ++ [ch]) s
|
|
|
|
|
( s, '[') -> loop (curr ++ [ch]) ('[' : s)
|
|
|
|
|
('(' : s, ')') -> loop (curr ++ [ch]) s
|
|
|
|
|
( s, '(') -> loop (curr ++ [ch]) ('(' : s)
|
|
|
|
|
('{' : s, '}') -> loop (curr ++ [ch]) s
|
|
|
|
|
( s, '{') -> loop (curr ++ [ch]) ('{' : s)
|
|
|
|
|
|
|
|
|
|
( s,'\n') -> loop (curr ++ [' ']) s
|
|
|
|
|
( s, _ ) -> loop (curr ++ [ch ]) s
|
|
|
|
|
|
|
|
|
|
findUnescapedQuote :: String -> (String, String)
|
|
|
|
|
findUnescapedQuote [] = ([], [])
|
|
|
|
|
findUnescapedQuote ('`' : '\\' : '`' : '"' : rest) = ('\\' : '"' : start, end)
|
|
|
|
|
where (start, end) = findUnescapedQuote rest
|
|
|
|
|
findUnescapedQuote ('\\' : '"' : rest) = ('\\' : '"' : start, end)
|
|
|
|
|
where (start, end) = findUnescapedQuote rest
|
|
|
|
|
findUnescapedQuote ('"' : rest) = ("\"", rest)
|
|
|
|
|
findUnescapedQuote (ch : rest) = (ch : start, end)
|
|
|
|
|
where (start, end) = findUnescapedQuote rest
|
|
|
|
|
|
|
|
|
|
-- substitute in the arguments for a macro expension
|
2019-03-29 22:02:49 +01:00
|
|
|
substituteArgs :: String -> [String] -> [String] -> String
|
|
|
|
|
substituteArgs "" _ _ = ""
|
2019-03-29 23:55:45 +01:00
|
|
|
substituteArgs ('`' : '`' : body) names args =
|
|
|
|
|
substituteArgs body names args
|
|
|
|
|
substituteArgs ('"' : body) names args =
|
|
|
|
|
'"' : start ++ substituteArgs rest names args
|
|
|
|
|
where (start, rest) = findUnescapedQuote body
|
2019-04-03 05:05:42 +02:00
|
|
|
substituteArgs ('\\' : '"' : body) names args =
|
|
|
|
|
'\\' : '"' : substituteArgs body names args
|
2019-03-29 23:55:45 +01:00
|
|
|
substituteArgs ('`' : '"' : body) names args =
|
|
|
|
|
'"' : substituteArgs (init start) names args
|
|
|
|
|
++ '"' : substituteArgs rest names args
|
|
|
|
|
where (start, rest) = findUnescapedQuote body
|
2019-03-29 22:02:49 +01:00
|
|
|
substituteArgs body names args =
|
2019-04-03 05:05:42 +02:00
|
|
|
case span isIdentChar body of
|
|
|
|
|
([], _) -> head body : substituteArgs (tail body) names args
|
|
|
|
|
(ident, rest) ->
|
|
|
|
|
case elemIndex ident names of
|
|
|
|
|
Nothing -> ident ++ substituteArgs rest names args
|
|
|
|
|
Just idx -> (args !! idx) ++ substituteArgs rest names args
|
|
|
|
|
|
|
|
|
|
defaultMacroArgs :: [Maybe String] -> [String] -> Alex [String]
|
|
|
|
|
defaultMacroArgs [] [] = return []
|
|
|
|
|
defaultMacroArgs [] _ = lexicalError "too many macro arguments given"
|
|
|
|
|
defaultMacroArgs defaults [] = do
|
|
|
|
|
if all isJust defaults
|
|
|
|
|
then return $ map fromJust defaults
|
|
|
|
|
else lexicalError "too few macro arguments given"
|
|
|
|
|
defaultMacroArgs (f : fs) (a : as) = do
|
|
|
|
|
let arg = if a == "" && isJust f
|
|
|
|
|
then fromJust f
|
|
|
|
|
else a
|
|
|
|
|
args <- defaultMacroArgs fs as
|
|
|
|
|
return $ arg : args
|
2019-03-29 22:02:49 +01:00
|
|
|
|
2019-03-29 18:59:51 +01:00
|
|
|
-- directives that must always be processed even if the current code block is
|
|
|
|
|
-- being excluded; we have to process conditions so we can match them up with
|
|
|
|
|
-- their ending tag, even if they're being skipped
|
|
|
|
|
unskippableDirectives :: [String]
|
|
|
|
|
unskippableDirectives = ["else", "elsif", "endif", "ifdef", "ifndef"]
|
|
|
|
|
|
2019-03-29 10:19:11 +01:00
|
|
|
handleDirective :: Action
|
2019-03-29 18:59:51 +01:00
|
|
|
handleDirective (posOrig, _, _, strOrig) len = do
|
|
|
|
|
let thisTokenStr = take len strOrig
|
|
|
|
|
let directive = tail $ thisTokenStr
|
|
|
|
|
let newPos = foldl alexMove posOrig thisTokenStr
|
|
|
|
|
alexSetInput (newPos, last thisTokenStr, [], drop len strOrig)
|
2019-03-29 10:19:11 +01:00
|
|
|
|
|
|
|
|
env <- gets lsEnv
|
|
|
|
|
tempInput <- alexGetInput
|
|
|
|
|
let dropUntilNewline = removeUntil "\n" tempInput 0
|
|
|
|
|
|
|
|
|
|
condStack <- gets lsCondStack
|
2019-04-03 05:05:42 +02:00
|
|
|
if any (/= CurrentlyTrue) condStack
|
2019-03-29 10:19:11 +01:00
|
|
|
&& not (elem directive unskippableDirectives)
|
|
|
|
|
then alexMonadScan
|
|
|
|
|
else case directive of
|
|
|
|
|
|
|
|
|
|
"default_nettype" -> dropUntilNewline
|
|
|
|
|
"timescale" -> dropUntilNewline
|
|
|
|
|
|
2019-03-29 22:02:49 +01:00
|
|
|
"__FILE__" -> do
|
|
|
|
|
tokPos <- toTokPos posOrig
|
|
|
|
|
currFile <- gets lsCurrFile
|
|
|
|
|
let tokStr = show currFile
|
|
|
|
|
modify $ push $ Token Lit_string tokStr tokPos
|
|
|
|
|
alexMonadScan
|
|
|
|
|
"__LINE__" -> do
|
|
|
|
|
tokPos <- toTokPos posOrig
|
|
|
|
|
let Position _ currLine _ = tokPos
|
|
|
|
|
let tokStr = show currLine
|
|
|
|
|
modify $ push $ Token Lit_number tokStr tokPos
|
|
|
|
|
alexMonadScan
|
|
|
|
|
|
2019-03-29 18:59:51 +01:00
|
|
|
"include" -> do
|
|
|
|
|
quotedFilename <- takeQuotedString
|
|
|
|
|
inputFollow <- alexGetInput
|
|
|
|
|
fileFollow <- getCurrentFile
|
|
|
|
|
-- process the included file
|
|
|
|
|
let filename = init $ tail quotedFilename
|
|
|
|
|
path <- includeSearch filename
|
|
|
|
|
content <- loadFile path
|
|
|
|
|
let inputIncluded = (alexStartPos, ' ', [], content)
|
|
|
|
|
setCurrentFile path
|
|
|
|
|
alexSetInput inputIncluded
|
|
|
|
|
alexMonadScan
|
|
|
|
|
-- resume processing the original file
|
|
|
|
|
setCurrentFile fileFollow
|
|
|
|
|
alexSetInput inputFollow
|
|
|
|
|
alexMonadScan
|
|
|
|
|
|
2019-03-29 10:19:11 +01:00
|
|
|
"ifdef" -> do
|
2019-03-29 18:59:51 +01:00
|
|
|
dropSpaces
|
2019-03-29 10:19:11 +01:00
|
|
|
name <- takeString
|
|
|
|
|
let newCond = if Map.member name env
|
|
|
|
|
then CurrentlyTrue
|
|
|
|
|
else NeverTrue
|
|
|
|
|
modify $ \s -> s { lsCondStack = newCond : condStack }
|
|
|
|
|
alexMonadScan
|
|
|
|
|
"ifndef" -> do
|
2019-03-29 18:59:51 +01:00
|
|
|
dropSpaces
|
2019-03-29 10:19:11 +01:00
|
|
|
name <- takeString
|
|
|
|
|
let newCond = if Map.notMember name env
|
|
|
|
|
then CurrentlyTrue
|
|
|
|
|
else NeverTrue
|
|
|
|
|
modify $ \s -> s { lsCondStack = newCond : condStack }
|
|
|
|
|
alexMonadScan
|
|
|
|
|
"else" -> do
|
|
|
|
|
let newCond = if head condStack == NeverTrue
|
|
|
|
|
then CurrentlyTrue
|
|
|
|
|
else NeverTrue
|
|
|
|
|
modify $ \s -> s { lsCondStack = newCond : tail condStack }
|
|
|
|
|
alexMonadScan
|
|
|
|
|
"elsif" -> do
|
2019-03-29 18:59:51 +01:00
|
|
|
dropSpaces
|
2019-03-29 10:19:11 +01:00
|
|
|
name <- takeString
|
|
|
|
|
let currCond = head condStack
|
|
|
|
|
let newCond =
|
|
|
|
|
if currCond /= NeverTrue then
|
|
|
|
|
PreviouslyTrue
|
|
|
|
|
else if Map.member name env then
|
|
|
|
|
CurrentlyTrue
|
|
|
|
|
else
|
|
|
|
|
NeverTrue
|
|
|
|
|
modify $ \s -> s { lsCondStack = newCond : tail condStack }
|
|
|
|
|
alexMonadScan
|
|
|
|
|
"endif" -> do
|
|
|
|
|
modify $ \s -> s { lsCondStack = tail condStack }
|
|
|
|
|
alexMonadScan
|
|
|
|
|
|
|
|
|
|
"define" -> do
|
2019-03-29 18:59:51 +01:00
|
|
|
dropSpaces
|
2019-03-29 10:19:11 +01:00
|
|
|
name <- takeString
|
2019-03-29 22:02:49 +01:00
|
|
|
defn <- takeMacroDefinition
|
2019-03-29 10:19:11 +01:00
|
|
|
modify $ \s -> s { lsEnv = Map.insert name defn env }
|
|
|
|
|
alexMonadScan
|
|
|
|
|
"undef" -> do
|
2019-03-29 18:59:51 +01:00
|
|
|
dropSpaces
|
2019-03-29 10:19:11 +01:00
|
|
|
name <- takeString
|
|
|
|
|
modify $ \s -> s { lsEnv = Map.delete name env }
|
|
|
|
|
alexMonadScan
|
|
|
|
|
"undefineall" -> do
|
|
|
|
|
modify $ \s -> s { lsEnv = Map.empty }
|
|
|
|
|
alexMonadScan
|
|
|
|
|
|
|
|
|
|
_ -> do
|
|
|
|
|
case Map.lookup directive env of
|
2019-03-29 18:59:51 +01:00
|
|
|
Nothing -> lexicalError $ "Undefined macro: " ++ directive
|
2019-03-29 22:02:49 +01:00
|
|
|
Just (body, formalArgs) -> do
|
2019-04-03 19:05:33 +02:00
|
|
|
(AlexPn _ l c, _, _, _) <- alexGetInput
|
2019-03-29 22:02:49 +01:00
|
|
|
replacement <- if null formalArgs
|
|
|
|
|
then return body
|
|
|
|
|
else do
|
|
|
|
|
actualArgs <- takeMacroArguments
|
2019-04-03 05:05:42 +02:00
|
|
|
defaultedArgs <- defaultMacroArgs (map snd formalArgs) actualArgs
|
|
|
|
|
return $ substituteArgs body (map fst formalArgs) defaultedArgs
|
2019-04-03 19:05:33 +02:00
|
|
|
-- save our current state
|
|
|
|
|
currInput <- alexGetInput
|
|
|
|
|
currToks <- gets lsToks
|
|
|
|
|
modify $ \s -> s { lsToks = [] }
|
|
|
|
|
-- lex the macro expansion, preserving the file and line
|
|
|
|
|
alexSetInput (AlexPn 0 l 0, ' ' , [], replacement)
|
|
|
|
|
alexMonadScan
|
|
|
|
|
-- re-tag and save tokens from the macro expansion
|
|
|
|
|
newToks <- gets lsToks
|
|
|
|
|
currFile <- getCurrentFile
|
|
|
|
|
let loc = "macro expansion of " ++ directive ++ " at " ++ currFile
|
|
|
|
|
let pos = Position loc l (c - length directive - 1)
|
|
|
|
|
let reTag (Token a b _) = Token a b pos
|
|
|
|
|
modify $ \s -> s { lsToks = (map reTag newToks) ++ currToks }
|
|
|
|
|
-- continue lexing after the macro
|
|
|
|
|
alexSetInput currInput
|
2019-03-29 10:19:11 +01:00
|
|
|
alexMonadScan
|
|
|
|
|
|
2019-03-29 06:07:08 +01:00
|
|
|
-- remove characters from the input until the pattern is reached
|
|
|
|
|
removeUntil :: String -> Action
|
|
|
|
|
removeUntil pattern _ _ = loop
|
|
|
|
|
where
|
|
|
|
|
patternLen = length pattern
|
|
|
|
|
wantNewline = pattern == "\n"
|
|
|
|
|
loop = do
|
2019-03-29 18:59:51 +01:00
|
|
|
(pos, _, _, str) <- alexGetInput
|
2019-03-29 06:07:08 +01:00
|
|
|
let found = (null str && wantNewline)
|
|
|
|
|
|| pattern == take patternLen str
|
2019-03-29 18:59:51 +01:00
|
|
|
let nextPos = alexMove pos (head str)
|
2019-03-29 06:07:08 +01:00
|
|
|
let afterPos = if wantNewline
|
2019-03-29 18:59:51 +01:00
|
|
|
then alexMove pos '\n'
|
|
|
|
|
else foldl alexMove pos pattern
|
2019-03-29 06:07:08 +01:00
|
|
|
let (newPos, newStr) = if found
|
|
|
|
|
then (afterPos, drop patternLen str)
|
|
|
|
|
else (nextPos, drop 1 str)
|
2019-03-29 18:59:51 +01:00
|
|
|
if not found && null str
|
|
|
|
|
then lexicalError $ "Reached EOF while looking for: " ++
|
|
|
|
|
show pattern
|
|
|
|
|
else do
|
|
|
|
|
alexSetInput (newPos, ' ', [], newStr)
|
|
|
|
|
if found
|
|
|
|
|
then alexMonadScan
|
|
|
|
|
else loop
|
2019-03-29 06:07:08 +01:00
|
|
|
|
2019-03-29 22:02:49 +01:00
|
|
|
push :: Token -> AlexUserState -> AlexUserState
|
2019-03-30 08:21:56 +01:00
|
|
|
push t s = s { lsToks = t : (lsToks s) }
|
2019-03-29 22:02:49 +01:00
|
|
|
|
2019-03-29 06:07:08 +01:00
|
|
|
tok :: TokenName -> Action
|
2019-03-29 22:02:49 +01:00
|
|
|
tok tokId (pos, _, _, input) len = do
|
2019-03-29 10:19:11 +01:00
|
|
|
let tokStr = take len input
|
2019-03-29 22:02:49 +01:00
|
|
|
tokPos <- toTokPos pos
|
2019-03-29 10:19:11 +01:00
|
|
|
condStack <- gets lsCondStack
|
2019-04-03 05:05:42 +02:00
|
|
|
() <- if any (/= CurrentlyTrue) condStack
|
2019-03-29 10:19:11 +01:00
|
|
|
then modify id
|
|
|
|
|
else modify (push $ Token tokId tokStr tokPos)
|
|
|
|
|
alexMonadScan
|
2019-02-08 05:49:12 +01:00
|
|
|
}
|