From a98fb9a791af81d73e117d5a75212526a257bbcd Mon Sep 17 00:00:00 2001 From: Adams Tower Date: Wed, 10 Apr 2024 15:11:02 -0400 Subject: [PATCH] Remove changes from 3.1 that look wrong Some look like an incorrect merge, some are leftover debugging code --- superset/commands/annotation_layer/create.py | 2 +- superset/commands/annotation_layer/update.py | 4 +- superset/commands/dataset/duplicate.py | 5 +- superset/commands/sql_lab/export.py | 5 +- superset/connectors/sqla/models.py | 5 +- superset/connectors/sqla/utils.py | 2 +- superset/daos/dataset.py | 3 +- superset/datasets/api.py | 4 - superset/db_engine_specs/base.py | 12 +-- superset/db_engine_specs/bigquery.py | 2 +- superset/explore/form_data/api.py | 5 -- superset/models/core.py | 12 +-- superset/models/helpers.py | 2 +- superset/sql_lab.py | 5 +- superset/sql_parse.py | 91 +++----------------- superset/sql_validators/presto_db.py | 4 +- superset/sqllab/query_render.py | 6 +- tests/integration_tests/charts/api_tests.py | 2 +- tests/unit_tests/sql_parse_tests.py | 58 ++++++++++--- 19 files changed, 97 insertions(+), 132 deletions(-) diff --git a/superset/commands/annotation_layer/create.py b/superset/commands/annotation_layer/create.py index b8f61dd9e0a60..6b87ad570363a 100644 --- a/superset/commands/annotation_layer/create.py +++ b/superset/commands/annotation_layer/create.py @@ -36,7 +36,7 @@ class CreateAnnotationLayerCommand(BaseCommand): def __init__(self, data: dict[str, Any]): self._properties = data.copy() - def run(self) -> None: + def run(self) -> Model: self.validate() try: return AnnotationLayerDAO.create(attributes=self._properties) diff --git a/superset/commands/annotation_layer/update.py b/superset/commands/annotation_layer/update.py index 0662e8e82e705..d15440882b155 100644 --- a/superset/commands/annotation_layer/update.py +++ b/superset/commands/annotation_layer/update.py @@ -40,9 +40,9 @@ def __init__(self, model_id: int, data: dict[str, Any]): self._properties = data.copy() self._model: Optional[AnnotationLayer] = None - def run(self) -> None: + def run(self) -> Model: self.validate() - assert self._models + assert self._model try: annotation_layer = AnnotationLayerDAO.update(self._model, self._properties) diff --git a/superset/commands/dataset/duplicate.py b/superset/commands/dataset/duplicate.py index 0ae47c35bca4d..850290422e1c5 100644 --- a/superset/commands/dataset/duplicate.py +++ b/superset/commands/dataset/duplicate.py @@ -70,7 +70,10 @@ def run(self) -> Model: table.normalize_columns = self._base_model.normalize_columns table.always_filter_main_dttm = self._base_model.always_filter_main_dttm table.is_sqllab_view = True - table.sql = ParsedQuery(self._base_model.sql).stripped() + table.sql = ParsedQuery( + self._base_model.sql, + engine=database.db_engine_spec.engine, + ).stripped() db.session.add(table) cols = [] for config_ in self._base_model.columns: diff --git a/superset/commands/sql_lab/export.py b/superset/commands/sql_lab/export.py index 1b9b0e03442fa..aa6050f27f9ae 100644 --- a/superset/commands/sql_lab/export.py +++ b/superset/commands/sql_lab/export.py @@ -115,7 +115,10 @@ def run( limit = None else: sql = self._query.executed_sql - limit = ParsedQuery(sql).limit + limit = ParsedQuery( + sql, + engine=self._query.database.db_engine_spec.engine, + ).limit if limit is not None and self._query.limiting_factor in { LimitingFactor.QUERY, LimitingFactor.DROPDOWN, diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index d8b1967683499..e26e21ba5b095 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -1461,7 +1461,10 @@ def get_from_clause( return self.get_sqla_table(), None from_sql = self.get_rendered_sql(template_processor) - parsed_query = ParsedQuery(from_sql) + parsed_query = ParsedQuery( + from_sql, + engine=self.db_engine_spec.engine, + ) if not ( parsed_query.is_unknown() or self.db_engine_spec.is_readonly_query(parsed_query) diff --git a/superset/connectors/sqla/utils.py b/superset/connectors/sqla/utils.py index 66594084c82d5..688be53515040 100644 --- a/superset/connectors/sqla/utils.py +++ b/superset/connectors/sqla/utils.py @@ -111,7 +111,7 @@ def get_virtual_table_metadata(dataset: SqlaTable) -> list[ResultSetColumnType]: sql = dataset.get_template_processor().process_template( dataset.sql, **dataset.template_params_dict ) - parsed_query = ParsedQuery(sql) + parsed_query = ParsedQuery(sql, engine=db_engine_spec.engine) if not db_engine_spec.is_readonly_query(parsed_query): raise SupersetSecurityException( SupersetError( diff --git a/superset/daos/dataset.py b/superset/daos/dataset.py index 30f10e013e6fe..0a4425dbd7a66 100644 --- a/superset/daos/dataset.py +++ b/superset/daos/dataset.py @@ -75,8 +75,7 @@ def validate_table_exists( database.get_table(table_name, schema=schema) return True except SQLAlchemyError as ex: # pragma: no cover - # logger.warning("Got an error %s validating table: %s", str(ex), table_name) - logger.exception("Got an error %s validating table: %s", str(ex), table_name) + logger.warning("Got an error %s validating table: %s", str(ex), table_name) return False @staticmethod diff --git a/superset/datasets/api.py b/superset/datasets/api.py index b48d7ef468dcd..bc4a42e58ee7e 100644 --- a/superset/datasets/api.py +++ b/superset/datasets/api.py @@ -277,10 +277,6 @@ class DatasetRestApi(BaseSupersetModelRestApi): list_outer_default_load = True show_outer_default_load = True - def response_400(self, message=None): - logger.error(f"Error from datasets api: {message}") - return super().response_400(message=message) - @expose("/", methods=("POST",)) @protect() @safe diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py index ce67cb448cd34..66293ccf52bbc 100644 --- a/superset/db_engine_specs/base.py +++ b/superset/db_engine_specs/base.py @@ -900,7 +900,7 @@ def apply_limit_to_sql( return database.compile_sqla_query(qry) if cls.limit_method == LimitMethod.FORCE_LIMIT: - parsed_query = sql_parse.ParsedQuery(sql) + parsed_query = sql_parse.ParsedQuery(sql, engine=cls.engine) sql = parsed_query.set_or_update_query_limit(limit, force=force) return sql @@ -981,7 +981,7 @@ def get_limit_from_sql(cls, sql: str) -> int | None: :param sql: SQL query :return: Value of limit clause in query """ - parsed_query = sql_parse.ParsedQuery(sql) + parsed_query = sql_parse.ParsedQuery(sql, engine=cls.engine) return parsed_query.limit @classmethod @@ -993,7 +993,7 @@ def set_or_update_query_limit(cls, sql: str, limit: int) -> str: :param limit: New limit to insert/replace into query :return: Query with new limit """ - parsed_query = sql_parse.ParsedQuery(sql) + parsed_query = sql_parse.ParsedQuery(sql, engine=cls.engine) return parsed_query.set_or_update_query_limit(limit) @classmethod @@ -1490,7 +1490,7 @@ def process_statement(cls, statement: str, database: Database) -> str: :param database: Database instance :return: Dictionary with different costs """ - parsed_query = ParsedQuery(statement) + parsed_query = ParsedQuery(statement, engine=cls.engine) sql = parsed_query.stripped() sql_query_mutator = current_app.config["SQL_QUERY_MUTATOR"] mutate_after_split = current_app.config["MUTATE_AFTER_SPLIT"] @@ -1525,7 +1525,7 @@ def estimate_query_cost( "Database does not support cost estimation" ) - parsed_query = sql_parse.ParsedQuery(sql) + parsed_query = sql_parse.ParsedQuery(sql, engine=cls.engine) statements = parsed_query.get_statements() costs = [] @@ -1586,7 +1586,7 @@ def execute( # pylint: disable=unused-argument :return: """ if not cls.allows_sql_comments: - query = sql_parse.strip_comments_from_sql(query) + query = sql_parse.strip_comments_from_sql(query, engine=cls.engine) if cls.arraysize: cursor.arraysize = cls.arraysize diff --git a/superset/db_engine_specs/bigquery.py b/superset/db_engine_specs/bigquery.py index 8e7ed0bf7d061..a8d834276e60c 100644 --- a/superset/db_engine_specs/bigquery.py +++ b/superset/db_engine_specs/bigquery.py @@ -435,7 +435,7 @@ def estimate_query_cost( if not cls.get_allow_cost_estimate(extra): raise SupersetException("Database does not support cost estimation") - parsed_query = sql_parse.ParsedQuery(sql) + parsed_query = sql_parse.ParsedQuery(sql, engine=cls.engine) statements = parsed_query.get_statements() costs = [] for statement in statements: diff --git a/superset/explore/form_data/api.py b/superset/explore/form_data/api.py index 4e770df7a2037..6c882d92a6fe6 100644 --- a/superset/explore/form_data/api.py +++ b/superset/explore/form_data/api.py @@ -106,11 +106,6 @@ def post(self) -> Response: return self.response(201, key=key) except ValidationError as ex: return self.response(400, message=ex.messages) - # except ( - # ChartAccessDeniedError, - # DatasetAccessDeniedError, - # TemporaryCacheAccessDeniedError, - # ) as ex: except TemporaryCacheAccessDeniedError as ex: return self.response(403, message=str(ex)) except TemporaryCacheResourceNotFoundError as ex: diff --git a/superset/models/core.py b/superset/models/core.py index 5161ccda840c4..216a341330b1d 100755 --- a/superset/models/core.py +++ b/superset/models/core.py @@ -351,9 +351,7 @@ def get_password_masked_url_from_uri( # pylint: disable=invalid-name cls, uri: str ) -> URL: sqlalchemy_url = make_url_safe(uri) - #TODO: turn this back on after I'm done debugging stuff! - # return cls.get_password_masked_url(sqlalchemy_url) - return sqlalchemy_url + return cls.get_password_masked_url(sqlalchemy_url) @classmethod def get_password_masked_url(cls, masked_url: URL) -> URL: @@ -367,9 +365,7 @@ def set_sqlalchemy_uri(self, uri: str) -> None: if conn.password != PASSWORD_MASK and not custom_password_store: # do not over-write the password with the password mask self.password = conn.password - #TODO: turn this back on after I'm done debugging stuff! - # conn = conn.set(password=PASSWORD_MASK if conn.password else None) - self.sqlalchemy_uri = str(conn) # hides the password + conn = conn.set(password=PASSWORD_MASK if conn.password else None) def get_effective_user(self, object_url: URL) -> str | None: """ @@ -481,9 +477,7 @@ def _get_sqla_engine( effective_username, ) - #TODO: switch this back on once I'm done debugging stuff! - # masked_url = self.get_password_masked_url(sqlalchemy_url) - masked_url = sqlalchemy_url + masked_url = self.get_password_masked_url(sqlalchemy_url) logger.debug("Database._get_sqla_engine(). Masked URL: %s", str(masked_url)) if self.impersonate_user: diff --git a/superset/models/helpers.py b/superset/models/helpers.py index 1dc5a57da5466..4ff206882ec75 100644 --- a/superset/models/helpers.py +++ b/superset/models/helpers.py @@ -1094,7 +1094,7 @@ def get_from_clause( """ from_sql = self.get_rendered_sql(template_processor) - parsed_query = ParsedQuery(from_sql) + parsed_query = ParsedQuery(from_sql, engine=self.db_engine_spec.engine) if not ( parsed_query.is_unknown() or self.db_engine_spec.is_readonly_query(parsed_query) diff --git a/superset/sql_lab.py b/superset/sql_lab.py index 0c81bc4c2d8fd..059e436962539 100644 --- a/superset/sql_lab.py +++ b/superset/sql_lab.py @@ -208,7 +208,7 @@ def execute_sql_statement( # pylint: disable=too-many-arguments, too-many-local database: Database = query.database db_engine_spec = database.db_engine_spec - parsed_query = ParsedQuery(sql_statement) + parsed_query = ParsedQuery(sql_statement, engine=db_engine_spec.engine) if is_feature_enabled("RLS_IN_SQLLAB"): # There are two ways to insert RLS: either replacing the table with a subquery # that has the RLS, or appending the RLS to the ``WHERE`` clause. The former is @@ -228,7 +228,8 @@ def execute_sql_statement( # pylint: disable=too-many-arguments, too-many-local database.id, query.schema, ) - ) + ), + engine=db_engine_spec.engine, ) sql = parsed_query.stripped() diff --git a/superset/sql_parse.py b/superset/sql_parse.py index 7fe320c432057..d2e20f9cbaed8 100644 --- a/superset/sql_parse.py +++ b/superset/sql_parse.py @@ -20,7 +20,8 @@ import logging import re -from collections.abc import Iterator +import urllib.parse +from collections.abc import Iterable, Iterator from dataclasses import dataclass from typing import Any, cast, TYPE_CHECKING @@ -66,7 +67,7 @@ try: from sqloxide import parse_sql as sqloxide_parse -except: # pylint: disable=bare-except +except (ImportError, ModuleNotFoundError): sqloxide_parse = None if TYPE_CHECKING: @@ -227,7 +228,11 @@ def strip_comments_from_sql(statement: str, engine: str | None = None) -> str: :param statement: A string with the SQL statement :return: SQL statement without comments """ - return ParsedQuery(statement).strip_comments() if "--" in statement else statement + return ( + ParsedQuery(statement, engine=engine).strip_comments() + if "--" in statement + else statement + ) @dataclass(eq=True, frozen=True) @@ -246,7 +251,7 @@ def __str__(self) -> str: """ return ".".join( - parse.quote(part, safe="").replace(".", "%2E") + urllib.parse.quote(part, safe="").replace(".", "%2E") for part in [self.catalog, self.schema, self.table] if part ) @@ -266,6 +271,7 @@ def __init__( sql_statement = sqlparse.format(sql_statement, strip_comments=True) self.sql: str = sql_statement + self._dialect = SQLGLOT_DIALECTS.get(engine) if engine else None self._tables: set[Table] = set() self._alias_names: set[str] = set() self._limit: int | None = None @@ -278,12 +284,7 @@ def __init__( @property def tables(self) -> set[Table]: if not self._tables: - for statement in self._parsed: - self._extract_from_token(statement) - - self._tables = { - table for table in self._tables if str(table) not in self._alias_names - } + self._tables = self._extract_tables_from_sql() return self._tables def _extract_tables_from_sql(self) -> set[Table]: @@ -572,28 +573,6 @@ def get_table(tlist: TokenList) -> Table | None: def _is_identifier(token: Token) -> bool: return isinstance(token, (IdentifierList, Identifier)) - def _process_tokenlist(self, token_list: TokenList) -> None: - """ - Add table names to table set - - :param token_list: TokenList to be processed - """ - # exclude subselects - if "(" not in str(token_list): - table = self.get_table(token_list) - if table and not table.table.startswith(CTE_PREFIX): - self._tables.add(table) - return - - # store aliases - if token_list.has_alias(): - self._alias_names.add(token_list.get_alias()) - - # some aliases are not parsed properly - if token_list.tokens[0].ttype == Name: - self._alias_names.add(token_list.tokens[0].value) - self._extract_from_token(token_list) - def as_create_table( self, table_name: str, @@ -620,50 +599,6 @@ def as_create_table( exec_sql += f"CREATE {method} {full_table_name} AS \n{sql}" return exec_sql - def _extract_from_token(self, token: Token) -> None: - """ - store a list of subtokens and store lists of - subtoken list. - - It extracts and from :param token: and loops - through all subtokens recursively. It finds table_name_preceding_token and - passes and to self._process_tokenlist to populate - self._tables. - - :param token: instance of Token or child class, e.g. TokenList, to be processed - """ - if not hasattr(token, "tokens"): - return - - table_name_preceding_token = False - - for item in token.tokens: - if item.is_group and ( - not self._is_identifier(item) or isinstance(item.tokens[0], Parenthesis) - ): - self._extract_from_token(item) - - if item.ttype in Keyword and ( - item.normalized in PRECEDES_TABLE_NAME - or item.normalized.endswith(" JOIN") - ): - table_name_preceding_token = True - continue - - if item.ttype in Keyword: - table_name_preceding_token = False - continue - if table_name_preceding_token: - if isinstance(item, Identifier): - self._process_tokenlist(item) - elif isinstance(item, IdentifierList): - for token2 in item.get_identifiers(): - if isinstance(token2, TokenList): - self._process_tokenlist(token2) - elif isinstance(item, IdentifierList): - if any(not self._is_identifier(token2) for token2 in item.tokens): - self._extract_from_token(item) - def set_or_update_query_limit(self, new_limit: int, force: bool = False) -> str: """Returns the query with the specified limit. @@ -1060,7 +995,7 @@ def insert_rls_in_predicate( # mapping between sqloxide and SQLAlchemy dialects -SQLOXITE_DIALECTS = { +SQLOXIDE_DIALECTS = { "ansi": {"trino", "trinonative", "presto"}, "hive": {"hive", "databricks"}, "ms": {"mssql"}, @@ -1093,7 +1028,7 @@ def extract_table_references( tree = None if sqloxide_parse: - for dialect, sqla_dialects in SQLOXITE_DIALECTS.items(): + for dialect, sqla_dialects in SQLOXIDE_DIALECTS.items(): if sqla_dialect in sqla_dialects: break sql_text = RE_JINJA_BLOCK.sub(" ", sql_text) diff --git a/superset/sql_validators/presto_db.py b/superset/sql_validators/presto_db.py index c01b9386718ca..10de0ba1dc03c 100644 --- a/superset/sql_validators/presto_db.py +++ b/superset/sql_validators/presto_db.py @@ -50,7 +50,7 @@ def validate_statement( ) -> Optional[SQLValidationAnnotation]: # pylint: disable=too-many-locals db_engine_spec = database.db_engine_spec - parsed_query = ParsedQuery(statement) + parsed_query = ParsedQuery(statement, engine=deb_engine_spec.engine) sql = parsed_query.stripped() # Hook to allow environment-specific mutation (usually comments) to the SQL @@ -154,7 +154,7 @@ def validate( For example, "SELECT 1 FROM default.mytable" becomes "EXPLAIN (TYPE VALIDATE) SELECT 1 FROM default.mytable. """ - parsed_query = ParsedQuery(sql) + parsed_query = ParsedQuery(sql, engine=db_engine_spec.engine) statements = parsed_query.get_statements() logger.info("Validating %i statement(s)", len(statements)) diff --git a/superset/sqllab/query_render.py b/superset/sqllab/query_render.py index c729d3a691ff1..caf9a3cb2b206 100644 --- a/superset/sqllab/query_render.py +++ b/superset/sqllab/query_render.py @@ -58,7 +58,11 @@ def render(self, execution_context: SqlJsonExecutionContext) -> str: database=query_model.database, query=query_model ) - parsed_query = ParsedQuery(query_model.sql, strip_comments=True) + parsed_query = ParsedQuery( + query_model.sql, + strip_comments=True, + engine=query_model.database.db_engine_spec.engine, + ) rendered_query = sql_template_processor.process_template( parsed_query.stripped(), **execution_context.template_params ) diff --git a/tests/integration_tests/charts/api_tests.py b/tests/integration_tests/charts/api_tests.py index 2ce3a6a56fbe1..9dec56fedd4c7 100644 --- a/tests/integration_tests/charts/api_tests.py +++ b/tests/integration_tests/charts/api_tests.py @@ -654,7 +654,7 @@ def test_chart_get_list_no_username(self): self.login(username="admin") uri = f"api/v1/chart/{chart_id}" rv = self.put_assert_metric(uri, chart_data, "put") - assert rv.status_code == 200 + self.assertEqual(rv.status_code, 200) model = db.session.query(Slice).get(chart_id) response = self.get_assert_metric("api/v1/chart/", "get_list") diff --git a/tests/unit_tests/sql_parse_tests.py b/tests/unit_tests/sql_parse_tests.py index 214a968540a46..d3d432bb981d9 100644 --- a/tests/unit_tests/sql_parse_tests.py +++ b/tests/unit_tests/sql_parse_tests.py @@ -45,11 +45,11 @@ ) -def extract_tables(query: str) -> set[Table]: +def extract_tables(query: str, engine: Optional[str] = None) -> set[Table]: """ Helper function to extract tables referenced in a query. """ - return ParsedQuery(query).tables + return ParsedQuery(query, engine=engine).tables def test_table() -> None: @@ -101,8 +101,13 @@ def test_extract_tables() -> None: Table("left_table") } - # reverse select - assert extract_tables("FROM t1 SELECT field") == {Table("t1")} + assert extract_tables( + "SELECT FROM (SELECT FROM forbidden_table) AS forbidden_table;" + ) == {Table("forbidden_table")} + + assert extract_tables( + "select * from (select * from forbidden_table) forbidden_table" + ) == {Table("forbidden_table")} def test_extract_tables_subselect() -> None: @@ -303,7 +308,7 @@ def test_extract_tables_show_tables_from() -> None: """ Test ``SHOW TABLES FROM``. """ - assert extract_tables("SHOW TABLES FROM s1 like '%order%'") == set() + assert extract_tables("SHOW TABLES FROM s1 like '%order%'", "mysql") == set() def test_extract_tables_show_columns_from() -> None: @@ -344,7 +349,7 @@ def test_extract_tables_where_subquery() -> None: """ SELECT name FROM t1 -WHERE regionkey EXISTS (SELECT regionkey FROM t2) +WHERE EXISTS (SELECT 1 FROM t2 WHERE t1.regionkey = t2.regionkey); """ ) == {Table("t1"), Table("t2")} @@ -559,6 +564,18 @@ def test_extract_tables_reusing_aliases() -> None: == {Table("src")} ) + # weird query with circular dependency + assert ( + extract_tables( + """ +with src as ( select key from q2 where key = '5'), +q2 as ( select key from src where key = '5') +select * from (select key from src) a +""" + ) + == set() + ) + def test_extract_tables_multistatement() -> None: """ @@ -572,6 +589,10 @@ def test_extract_tables_multistatement() -> None: Table("t1"), Table("t2"), } + assert extract_tables( + "ADD JAR file:///hive.jar; SELECT * FROM t1;", + engine="hive", + ) == {Table("t1")} def test_extract_tables_complex() -> None: @@ -698,7 +719,8 @@ def test_extract_tables_nested_select() -> None: select (extractvalue(1,concat(0x7e,(select GROUP_CONCAT(TABLE_NAME) from INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA like "%bi%"),0x7e))); -""" +""", + "mysql", ) == {Table("COLUMNS", "INFORMATION_SCHEMA")} ) @@ -709,7 +731,8 @@ def test_extract_tables_nested_select() -> None: select (extractvalue(1,concat(0x7e,(select GROUP_CONCAT(COLUMN_NAME) from INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME="bi_achievement_daily"),0x7e))); -""" +""", + "mysql", ) == {Table("COLUMNS", "INFORMATION_SCHEMA")} ) @@ -1339,6 +1362,14 @@ def test_sqlparse_issue_652(): "(SELECT table_name FROM /**/ information_schema.tables WHERE table_name LIKE '%user%' LIMIT 1)", True, ), + ( + "SELECT FROM (SELECT FROM forbidden_table) AS forbidden_table;", + True, + ), + ( + "SELECT * FROM (SELECT * FROM forbidden_table) forbidden_table", + True, + ), ], ) def test_has_table_query(sql: str, expected: bool) -> None: @@ -1820,16 +1851,17 @@ def test_extract_table_references(mocker: MockerFixture) -> None: # test falling back to sqlparse logger = mocker.patch("superset.sql_parse.logger") sql = "SELECT * FROM table UNION ALL SELECT * FROM other_table" - assert extract_table_references( - sql, - "trino", - ) == {Table(table="other_table", schema=None, catalog=None)} + assert extract_table_references(sql, "trino") == { + Table(table="table", schema=None, catalog=None), + Table(table="other_table", schema=None, catalog=None), + } logger.warning.assert_called_once() logger = mocker.patch("superset.migrations.shared.utils.logger") sql = "SELECT * FROM table UNION ALL SELECT * FROM other_table" assert extract_table_references(sql, "trino", show_warning=False) == { - Table(table="other_table", schema=None, catalog=None) + Table(table="table", schema=None, catalog=None), + Table(table="other_table", schema=None, catalog=None), } logger.warning.assert_not_called()