OmniSciDB
c1a53651b2
|
Public Member Functions | |
def | __init__ |
def | line |
def | tokens |
def | tokenize |
def | is_at_end |
def | current_token |
def | add_token |
def | lookahead |
def | advance |
def | peek |
def | can_token_be_double_char |
def | consume_double_char |
def | consume_single_char |
def | consume_whitespace |
def | consume_string |
def | consume_number |
def | consume_identifier |
def | is_token_identifier |
def | is_token_string |
def | is_digit |
def | is_alpha |
def | is_token_whitespace |
def | raise_tokenize_error |
Public Attributes | |
start | |
curr | |
Private Attributes | |
_line | |
_tokens | |
Definition at line 460 of file generate_TableFunctionsFactory_init.py.
def generate_TableFunctionsFactory_init.Tokenize.__init__ | ( | self, | |
line | |||
) |
Definition at line 461 of file generate_TableFunctionsFactory_init.py.
def generate_TableFunctionsFactory_init.Tokenize.add_token | ( | self, | |
type | |||
) |
Definition at line 499 of file generate_TableFunctionsFactory_init.py.
References generate_TableFunctionsFactory_init.Tokenize.curr, generate_TableFunctionsFactory_init.Tokenize.line(), generate_TableFunctionsFactory_init.UdtfNode.line, generate_TableFunctionsFactory_init.Parser.line, foreign_storage::Interval< T >.start, JoinColumnIterator.start, ai.heavy.jdbc.HeavyAIEscapeParser.Pair.start, JoinColumnTyped::Slice.start, JoinColumnTuple::Slice.start, generate_TableFunctionsFactory_init.Tokenize.start, and import_export::ImportStatus.start.
Referenced by generate_TableFunctionsFactory_init.Tokenize.consume_double_char(), generate_TableFunctionsFactory_init.Tokenize.consume_identifier(), generate_TableFunctionsFactory_init.Tokenize.consume_number(), generate_TableFunctionsFactory_init.Tokenize.consume_single_char(), and generate_TableFunctionsFactory_init.Tokenize.consume_string().
def generate_TableFunctionsFactory_init.Tokenize.advance | ( | self | ) |
Definition at line 508 of file generate_TableFunctionsFactory_init.py.
References generate_TableFunctionsFactory_init.Tokenize.curr.
Referenced by generate_TableFunctionsFactory_init.Parser.consume(), generate_TableFunctionsFactory_init.Tokenize.consume_double_char(), generate_TableFunctionsFactory_init.Tokenize.consume_identifier(), generate_TableFunctionsFactory_init.Tokenize.consume_number(), generate_TableFunctionsFactory_init.Tokenize.consume_single_char(), generate_TableFunctionsFactory_init.Tokenize.consume_string(), generate_TableFunctionsFactory_init.Tokenize.consume_whitespace(), and generate_TableFunctionsFactory_init.Parser.expect().
def generate_TableFunctionsFactory_init.Tokenize.can_token_be_double_char | ( | self | ) |
Definition at line 514 of file generate_TableFunctionsFactory_init.py.
References generate_TableFunctionsFactory_init.Tokenize.peek().
Referenced by generate_TableFunctionsFactory_init.Tokenize.tokenize().
def generate_TableFunctionsFactory_init.Tokenize.consume_double_char | ( | self | ) |
Definition at line 518 of file generate_TableFunctionsFactory_init.py.
References generate_TableFunctionsFactory_init.Tokenize.add_token(), generate_TableFunctionsFactory_init.Tokenize.advance(), anonymous_namespace{RelAlgDag.cpp}::RANodeIterator.advance(), generate_TableFunctionsFactory_init.Tokenize.lookahead(), and generate_TableFunctionsFactory_init.Tokenize.raise_tokenize_error().
Referenced by generate_TableFunctionsFactory_init.Tokenize.tokenize().
def generate_TableFunctionsFactory_init.Tokenize.consume_identifier | ( | self | ) |
IDENTIFIER: [A-Za-z_][A-Za-z0-9_]*
Definition at line 585 of file generate_TableFunctionsFactory_init.py.
References generate_TableFunctionsFactory_init.Tokenize.add_token(), generate_TableFunctionsFactory_init.Tokenize.advance(), anonymous_namespace{RelAlgDag.cpp}::RANodeIterator.advance(), and generate_TableFunctionsFactory_init.Tokenize.lookahead().
Referenced by generate_TableFunctionsFactory_init.Tokenize.tokenize().
def generate_TableFunctionsFactory_init.Tokenize.consume_number | ( | self | ) |
NUMBER: [0-9]+
Definition at line 572 of file generate_TableFunctionsFactory_init.py.
References generate_TableFunctionsFactory_init.Tokenize.add_token(), generate_TableFunctionsFactory_init.Tokenize.advance(), anonymous_namespace{RelAlgDag.cpp}::RANodeIterator.advance(), and generate_TableFunctionsFactory_init.Tokenize.lookahead().
Referenced by generate_TableFunctionsFactory_init.Tokenize.tokenize().
def generate_TableFunctionsFactory_init.Tokenize.consume_single_char | ( | self | ) |
Definition at line 527 of file generate_TableFunctionsFactory_init.py.
References generate_TableFunctionsFactory_init.Tokenize.add_token(), generate_TableFunctionsFactory_init.Tokenize.advance(), anonymous_namespace{RelAlgDag.cpp}::RANodeIterator.advance(), generate_TableFunctionsFactory_init.Tokenize.peek(), and generate_TableFunctionsFactory_init.Tokenize.raise_tokenize_error().
Referenced by generate_TableFunctionsFactory_init.Tokenize.tokenize().
def generate_TableFunctionsFactory_init.Tokenize.consume_string | ( | self | ) |
STRING: \".*?\"
Definition at line 558 of file generate_TableFunctionsFactory_init.py.
References generate_TableFunctionsFactory_init.Tokenize.add_token(), generate_TableFunctionsFactory_init.Tokenize.advance(), anonymous_namespace{RelAlgDag.cpp}::RANodeIterator.advance(), generate_TableFunctionsFactory_init.Tokenize.lookahead(), and generate_TableFunctionsFactory_init.Tokenize.peek().
Referenced by generate_TableFunctionsFactory_init.Tokenize.tokenize().
def generate_TableFunctionsFactory_init.Tokenize.consume_whitespace | ( | self | ) |
Definition at line 555 of file generate_TableFunctionsFactory_init.py.
References generate_TableFunctionsFactory_init.Tokenize.advance(), and anonymous_namespace{RelAlgDag.cpp}::RANodeIterator.advance().
Referenced by generate_TableFunctionsFactory_init.Tokenize.tokenize().
def generate_TableFunctionsFactory_init.Tokenize.current_token | ( | self | ) |
Definition at line 496 of file generate_TableFunctionsFactory_init.py.
References generate_TableFunctionsFactory_init.Tokenize.curr, generate_TableFunctionsFactory_init.Tokenize.line(), generate_TableFunctionsFactory_init.UdtfNode.line, generate_TableFunctionsFactory_init.Parser.line, foreign_storage::Interval< T >.start, JoinColumnIterator.start, ai.heavy.jdbc.HeavyAIEscapeParser.Pair.start, JoinColumnTyped::Slice.start, JoinColumnTuple::Slice.start, generate_TableFunctionsFactory_init.Tokenize.start, and import_export::ImportStatus.start.
Referenced by generate_TableFunctionsFactory_init.Parser.consume(), generate_TableFunctionsFactory_init.Parser.expect(), generate_TableFunctionsFactory_init.Parser.match(), and generate_TableFunctionsFactory_init.Parser.raise_parser_error().
def generate_TableFunctionsFactory_init.Tokenize.is_alpha | ( | self | ) |
Definition at line 607 of file generate_TableFunctionsFactory_init.py.
References generate_TableFunctionsFactory_init.Tokenize.peek().
def generate_TableFunctionsFactory_init.Tokenize.is_at_end | ( | self | ) |
Definition at line 493 of file generate_TableFunctionsFactory_init.py.
References generate_TableFunctionsFactory_init.Tokenize.curr, generate_TableFunctionsFactory_init.Tokenize.line(), generate_TableFunctionsFactory_init.UdtfNode.line, and generate_TableFunctionsFactory_init.Parser.line.
Referenced by generate_TableFunctionsFactory_init.Parser.parse_annotation(), generate_TableFunctionsFactory_init.Parser.parse_arg(), generate_TableFunctionsFactory_init.Parser.parse_args(), generate_TableFunctionsFactory_init.Parser.parse_templates(), generate_TableFunctionsFactory_init.Parser.parse_type(), generate_TableFunctionsFactory_init.Parser.parse_udtf(), and generate_TableFunctionsFactory_init.Tokenize.tokenize().
def generate_TableFunctionsFactory_init.Tokenize.is_digit | ( | self | ) |
Definition at line 604 of file generate_TableFunctionsFactory_init.py.
References generate_TableFunctionsFactory_init.Tokenize.peek().
Referenced by generate_TableFunctionsFactory_init.Tokenize.tokenize().
def generate_TableFunctionsFactory_init.Tokenize.is_token_identifier | ( | self | ) |
Definition at line 598 of file generate_TableFunctionsFactory_init.py.
References generate_TableFunctionsFactory_init.Tokenize.peek().
Referenced by generate_TableFunctionsFactory_init.Tokenize.tokenize().
def generate_TableFunctionsFactory_init.Tokenize.is_token_string | ( | self | ) |
Definition at line 601 of file generate_TableFunctionsFactory_init.py.
References generate_TableFunctionsFactory_init.Tokenize.peek().
Referenced by generate_TableFunctionsFactory_init.Tokenize.tokenize().
def generate_TableFunctionsFactory_init.Tokenize.is_token_whitespace | ( | self | ) |
Definition at line 610 of file generate_TableFunctionsFactory_init.py.
References generate_TableFunctionsFactory_init.Tokenize.peek().
Referenced by generate_TableFunctionsFactory_init.Tokenize.tokenize().
def generate_TableFunctionsFactory_init.Tokenize.line | ( | self | ) |
Definition at line 469 of file generate_TableFunctionsFactory_init.py.
References generate_TableFunctionsFactory_init.Tokenize._line.
Referenced by generate_TableFunctionsFactory_init.Tokenize.add_token(), generate_TableFunctionsFactory_init.Tokenize.current_token(), generate_TableFunctionsFactory_init.Tokenize.is_at_end(), generate_TableFunctionsFactory_init.Tokenize.lookahead(), generate_TableFunctionsFactory_init.Tokenize.peek(), and generate_TableFunctionsFactory_init.Tokenize.raise_tokenize_error().
def generate_TableFunctionsFactory_init.Tokenize.lookahead | ( | self | ) |
Definition at line 503 of file generate_TableFunctionsFactory_init.py.
References generate_TableFunctionsFactory_init.Tokenize.curr, generate_TableFunctionsFactory_init.Tokenize.line(), generate_TableFunctionsFactory_init.UdtfNode.line, and generate_TableFunctionsFactory_init.Parser.line.
Referenced by generate_TableFunctionsFactory_init.Tokenize.consume_double_char(), generate_TableFunctionsFactory_init.Tokenize.consume_identifier(), generate_TableFunctionsFactory_init.Tokenize.consume_number(), generate_TableFunctionsFactory_init.Tokenize.consume_string(), and generate_TableFunctionsFactory_init.Parser.parse_arg().
def generate_TableFunctionsFactory_init.Tokenize.peek | ( | self | ) |
Definition at line 511 of file generate_TableFunctionsFactory_init.py.
References generate_TableFunctionsFactory_init.Tokenize.curr, generate_TableFunctionsFactory_init.Tokenize.line(), generate_TableFunctionsFactory_init.UdtfNode.line, and generate_TableFunctionsFactory_init.Parser.line.
Referenced by generate_TableFunctionsFactory_init.Tokenize.can_token_be_double_char(), generate_TableFunctionsFactory_init.Tokenize.consume_single_char(), generate_TableFunctionsFactory_init.Tokenize.consume_string(), generate_TableFunctionsFactory_init.Tokenize.is_alpha(), generate_TableFunctionsFactory_init.Tokenize.is_digit(), generate_TableFunctionsFactory_init.Tokenize.is_token_identifier(), generate_TableFunctionsFactory_init.Tokenize.is_token_string(), generate_TableFunctionsFactory_init.Tokenize.is_token_whitespace(), and generate_TableFunctionsFactory_init.Tokenize.raise_tokenize_error().
def generate_TableFunctionsFactory_init.Tokenize.raise_tokenize_error | ( | self | ) |
Definition at line 613 of file generate_TableFunctionsFactory_init.py.
References generate_TableFunctionsFactory_init.Tokenize.curr, generate_TableFunctionsFactory_init.Tokenize.line(), generate_TableFunctionsFactory_init.UdtfNode.line, generate_TableFunctionsFactory_init.Parser.line, and generate_TableFunctionsFactory_init.Tokenize.peek().
Referenced by generate_TableFunctionsFactory_init.Tokenize.consume_double_char(), and generate_TableFunctionsFactory_init.Tokenize.consume_single_char().
def generate_TableFunctionsFactory_init.Tokenize.tokenize | ( | self | ) |
Definition at line 476 of file generate_TableFunctionsFactory_init.py.
References generate_TableFunctionsFactory_init.Tokenize.can_token_be_double_char(), generate_TableFunctionsFactory_init.Tokenize.consume_double_char(), generate_TableFunctionsFactory_init.Tokenize.consume_identifier(), generate_TableFunctionsFactory_init.Tokenize.consume_number(), generate_TableFunctionsFactory_init.Tokenize.consume_single_char(), generate_TableFunctionsFactory_init.Tokenize.consume_string(), generate_TableFunctionsFactory_init.Tokenize.consume_whitespace(), generate_TableFunctionsFactory_init.Tokenize.curr, generate_TableFunctionsFactory_init.Tokenize.is_at_end(), generate_TableFunctionsFactory_init.Tokenize.is_digit(), generate_TableFunctionsFactory_init.Tokenize.is_token_identifier(), generate_TableFunctionsFactory_init.Tokenize.is_token_string(), generate_TableFunctionsFactory_init.Tokenize.is_token_whitespace(), foreign_storage::Interval< T >.start, JoinColumnIterator.start, ai.heavy.jdbc.HeavyAIEscapeParser.Pair.start, JoinColumnTyped::Slice.start, JoinColumnTuple::Slice.start, generate_TableFunctionsFactory_init.Tokenize.start, and import_export::ImportStatus.start.
def generate_TableFunctionsFactory_init.Tokenize.tokens | ( | self | ) |
Definition at line 473 of file generate_TableFunctionsFactory_init.py.
References generate_TableFunctionsFactory_init.Tokenize._tokens.
Referenced by generate_TableFunctionsFactory_init.Parser.raise_parser_error().
|
private |
Definition at line 462 of file generate_TableFunctionsFactory_init.py.
Referenced by generate_TableFunctionsFactory_init.Tokenize.line().
|
private |
Definition at line 463 of file generate_TableFunctionsFactory_init.py.
Referenced by generate_TableFunctionsFactory_init.Parser.consume(), generate_TableFunctionsFactory_init.Parser.current_token(), generate_TableFunctionsFactory_init.Parser.expect(), generate_TableFunctionsFactory_init.Parser.is_at_end(), generate_TableFunctionsFactory_init.Parser.lookahead(), generate_TableFunctionsFactory_init.Tokenize.tokens(), and generate_TableFunctionsFactory_init.Parser.tokens().
generate_TableFunctionsFactory_init.Tokenize.curr |
Definition at line 465 of file generate_TableFunctionsFactory_init.py.
Referenced by generate_TableFunctionsFactory_init.Tokenize.add_token(), generate_TableFunctionsFactory_init.Tokenize.advance(), generate_TableFunctionsFactory_init.Tokenize.current_token(), generate_TableFunctionsFactory_init.Tokenize.is_at_end(), generate_TableFunctionsFactory_init.Tokenize.lookahead(), generate_TableFunctionsFactory_init.Tokenize.peek(), generate_TableFunctionsFactory_init.Tokenize.raise_tokenize_error(), and generate_TableFunctionsFactory_init.Tokenize.tokenize().
generate_TableFunctionsFactory_init.Tokenize.start |
Definition at line 464 of file generate_TableFunctionsFactory_init.py.
Referenced by generate_TableFunctionsFactory_init.Tokenize.add_token(), generate_TableFunctionsFactory_init.Tokenize.current_token(), and generate_TableFunctionsFactory_init.Tokenize.tokenize().