diff --git a/cxxheaderparser/lexer.py b/cxxheaderparser/lexer.py index 17caf92..28839d9 100644 --- a/cxxheaderparser/lexer.py +++ b/cxxheaderparser/lexer.py @@ -73,6 +73,7 @@ class PlyLexer: "alignas", "alignof", "asm", + "__asm", "auto", "bool", "break", @@ -99,6 +100,7 @@ class PlyLexer: "enum", "explicit", "export", + "__extension__", "extern", "false", "final", @@ -124,6 +126,7 @@ class PlyLexer: "public", "register", "reinterpret_cast", + "__restrict", "requires", "return", "short", diff --git a/cxxheaderparser/parser.py b/cxxheaderparser/parser.py index 7cea0ea..76a04d1 100644 --- a/cxxheaderparser/parser.py +++ b/cxxheaderparser/parser.py @@ -302,6 +302,7 @@ def parse(self) -> None: ] = { "__attribute__": self._consume_gcc_attribute, "__declspec": self._consume_declspec, + "__extension__": lambda _1, _2: None, "alignas": self._consume_attribute_specifier_seq, "extern": self._parse_extern, "friend": self._parse_friend_decl, @@ -879,6 +880,12 @@ def _parse_requires_segment( # Let the caller decide return tok + def _consume_asm( + self, tok: LexToken, doxygen: typing.Optional[str] = None + ) -> None: + tok = self._next_token_must_be("(") + self._consume_balanced_tokens(tok) + # # Attributes # @@ -1394,8 +1401,15 @@ def _discard_ctor_initializer(self) -> None: def _parse_bitfield(self) -> int: # is a integral constant expression... for now, just do integers - tok = self._next_token_must_be("INT_CONST_DEC") - return int(tok.value) + const_expr = '' + while True: + tok = self.lex.token_if_not("=", ";") + if tok: + const_expr += tok.value + else: + break + + return int(eval(const_expr)) def _parse_field( self, @@ -1791,6 +1805,9 @@ def _parse_parameter( dtype = self._parse_cv_ptr(parsed_type) + # optional __restrict + tok = self.lex.token_if("__restrict") + # optional parameter pack if self.lex.token_if("ELLIPSIS"): param_pack = True @@ -2679,6 +2696,15 @@ def _parse_declarations( # Unset the doxygen, location doxygen = None + tok = self.lex.token_if("__asm") + if tok: + self._consume_asm(tok) + + tok = self.lex.token_if_in_set(self._attribute_start_tokens) + while tok: + self._consume_attribute(tok) + tok = self.lex.token_if_in_set(self._attribute_start_tokens) + # Check for multiple declarations tok = self._next_token_must_be(",", ";") location = tok.location