You're Invited:Meet the Socket Team at RSAC and BSidesSF 2026, March 23–26.RSVP
Socket
Book a DemoSign in
Socket

sql-metadata

Package Overview
Dependencies
Maintainers
1
Versions
42
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

sql-metadata - pypi Package Compare versions

Comparing version
2.13.0
to
2.14.0
+2
-1
PKG-INFO
Metadata-Version: 2.1
Name: sql_metadata
Version: 2.13.0
Version: 2.14.0
Summary: Uses tokenized query returned by python-sqlparse and generates query metadata

@@ -17,2 +17,3 @@ Home-page: https://github.com/macbre/sql-metadata

Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: 3.13
Requires-Dist: sqlparse (>=0.4.1,<0.6.0)

@@ -19,0 +20,0 @@ Project-URL: Repository, https://github.com/macbre/sql-metadata

[tool.poetry]
name = "sql_metadata"
version = "2.13.0"
version = "2.14.0"
license="MIT"

@@ -22,4 +22,4 @@ description = "Uses tokenized query returned by python-sqlparse and generates query metadata"

coverage = {extras = ["toml"], version = "^6.5"}
pylint = "^3.2.6"
pytest = "^8.3.2"
pylint = "^3.2.7"
pytest = "^8.3.3"
pytest-cov = "^5.0.0"

@@ -26,0 +26,0 @@ coveralls = "^3.3.1"

@@ -222,3 +222,3 @@ # pylint: disable=C0302

elif token.is_column_name_inside_insert_clause:
column = str(token.value).strip("`")
column = str(token.value)
self._add_to_columns_subsection(

@@ -373,6 +373,4 @@ keyword=token.last_keyword_normalized, column=column

continue
table_name = str(token.value.strip("`"))
token.token_type = TokenType.TABLE
tables.append(table_name)
tables.append(str(token.value))

@@ -814,3 +812,4 @@ self._tables = tables - with_names

@staticmethod
def _resolve_nested_query(
# pylint:disable=too-many-return-statements
def _resolve_nested_query( # noqa: C901
subquery_alias: str,

@@ -851,2 +850,5 @@ nested_queries_names: List[str],

return column_name
for table in subparser.tables:
if f"{table}.*" in subparser.columns:
return column_name
raise exc # pragma: no cover

@@ -1016,2 +1018,4 @@ resolved_column = subparser.columns[column_index]

"""
if token.is_keyword:
return False
return str(token) == "." or (

@@ -1030,3 +1034,3 @@ index + 1 < self.tokens_length

value, is_complex = self._combine_tokens(index=index, value=value)
index = index - 2
index = index - 1
token.value = value

@@ -1038,5 +1042,8 @@

"""
if index > 1 and str(self.non_empty_tokens[index - 1]) == ".":
prev_value = self.non_empty_tokens[index - 2].value.strip("`").strip('"')
value = f"{prev_value}.{value}"
if index > 1:
prev_value = self.non_empty_tokens[index - 1]
if not self._is_token_part_of_complex_identifier(prev_value, index - 1):
return value, False
prev_value = str(prev_value).strip("`")
value = f"{prev_value}{value}"
return value, True

@@ -1043,0 +1050,0 @@ return value, False