Coverage for /var/srv/projects/api.amasfac.comuna18.com/tmp/venv/lib/python3.9/site-packages/sqlparse/lexer.py: 20%
35 statements
« prev ^ index » next coverage.py v6.4.4, created at 2023-07-17 14:22 -0600
« prev ^ index » next coverage.py v6.4.4, created at 2023-07-17 14:22 -0600
1#
2# Copyright (C) 2009-2020 the sqlparse authors and contributors
3# <see AUTHORS file>
4#
5# This module is part of python-sqlparse and is released under
6# the BSD License: https://opensource.org/licenses/BSD-3-Clause
8"""SQL Lexer"""
10# This code is based on the SqlLexer in pygments.
11# http://pygments.org/
12# It's separated from the rest of pygments to increase performance
13# and to allow some customizations.
15from io import TextIOBase
17from sqlparse import tokens
18from sqlparse.keywords import SQL_REGEX
19from sqlparse.utils import consume
22class Lexer:
23 """Lexer
24 Empty class. Leaving for backwards-compatibility
25 """
27 @staticmethod
28 def get_tokens(text, encoding=None):
29 """
30 Return an iterable of (tokentype, value) pairs generated from
31 `text`. If `unfiltered` is set to `True`, the filtering mechanism
32 is bypassed even if filters are defined.
34 Also preprocess the text, i.e. expand tabs and strip it if
35 wanted and applies registered filters.
37 Split ``text`` into (tokentype, text) pairs.
39 ``stack`` is the initial stack (default: ``['root']``)
40 """
41 if isinstance(text, TextIOBase):
42 text = text.read()
44 if isinstance(text, str):
45 pass
46 elif isinstance(text, bytes):
47 if encoding:
48 text = text.decode(encoding)
49 else:
50 try:
51 text = text.decode('utf-8')
52 except UnicodeDecodeError:
53 text = text.decode('unicode-escape')
54 else:
55 raise TypeError("Expected text or file-like object, got {!r}".
56 format(type(text)))
58 iterable = enumerate(text)
59 for pos, char in iterable:
60 for rexmatch, action in SQL_REGEX:
61 m = rexmatch(text, pos)
63 if not m:
64 continue
65 elif isinstance(action, tokens._TokenType):
66 yield action, m.group()
67 elif callable(action):
68 yield action(m.group())
70 consume(iterable, m.end() - pos - 1)
71 break
72 else:
73 yield tokens.Error, char
76def tokenize(sql, encoding=None):
77 """Tokenize sql.
79 Tokenize *sql* using the :class:`Lexer` and return a 2-tuple stream
80 of ``(token type, value)`` items.
81 """
82 return Lexer().get_tokens(sql, encoding)