New Research: Supply Chain Attack on Axios Pulls Malicious Dependency from npm.Details
Socket
Book a DemoSign in
Socket

sqlalchemy-bigquery

Package Overview
Dependencies
Maintainers
4
Versions
36
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

sqlalchemy-bigquery - pypi Package Compare versions

Comparing version
1.11.0.dev2
to
1.10.0
+0
-1
AUTHORS

@@ -22,3 +22,2 @@ # This is the list of sqlalchemy-bigquery's significant contributors.

Romain Rigaux
Sharoon Thomas (sharoonthomas)
Sumedh Sakdeo

@@ -25,0 +24,0 @@ Tim Swast (tswast)

+11
-13
Metadata-Version: 2.1
Name: sqlalchemy-bigquery
Version: 1.11.0.dev2
Version: 1.10.0
Summary: SQLAlchemy dialect for BigQuery

@@ -29,5 +29,5 @@ Home-page: https://github.com/googleapis/python-bigquery-sqlalchemy

Requires-Dist: google-auth<3.0.0dev,>=1.25.0
Requires-Dist: google-cloud-bigquery<4.0.0dev,>=3.3.6
Requires-Dist: google-cloud-bigquery<4.0.0dev,>=2.25.2
Requires-Dist: packaging
Requires-Dist: sqlalchemy<2.1,>=1.4.16
Requires-Dist: sqlalchemy<2.0.0dev,>=1.2.0
Provides-Extra: geography

@@ -47,11 +47,11 @@ Requires-Dist: GeoAlchemy2; extra == "geography"

Provides-Extra: all
Requires-Dist: alembic; extra == "all"
Requires-Dist: google-cloud-bigquery-storage<3.0.0dev,>=2.0.0; extra == "all"
Requires-Dist: shapely; extra == "all"
Requires-Dist: pyarrow>=3.0.0; extra == "all"
Requires-Dist: grpcio<2.0dev,>=1.49.1; python_version >= "3.11" and extra == "all"
Requires-Dist: packaging; extra == "all"
Requires-Dist: pytz; extra == "all"
Requires-Dist: google-cloud-bigquery-storage<3.0.0dev,>=2.0.0; extra == "all"
Requires-Dist: grpcio<2.0dev,>=1.49.1; python_version >= "3.11" and extra == "all"
Requires-Dist: GeoAlchemy2; extra == "all"
Requires-Dist: grpcio<2.0dev,>=1.47.0; extra == "all"
Requires-Dist: alembic; extra == "all"
Requires-Dist: pyarrow>=3.0.0; extra == "all"
Requires-Dist: shapely; extra == "all"

@@ -95,4 +95,3 @@ SQLAlchemy Dialect for BigQuery

.. note::
This library is a prerelease to gauge compatiblity with SQLAlchemy
versions >= 1.4.16 and < 2.1
This library is only compatible with SQLAlchemy versions < 2.0.0

@@ -170,5 +169,4 @@ Installation

table = Table('dataset.table', MetaData(bind=engine), autoload=True)
print(select([func.count('*')], from_obj=table().scalar())
print(select([func.count('*')], from_obj=table).scalar())
Project

@@ -345,3 +343,3 @@ ^^^^^^^

'bigquery://some-project/some-dataset?user_supplied_client=True',
connect_args={'client': custom_bq_client},
connect_args={'client': custom_bq_client},
)

@@ -348,0 +346,0 @@

@@ -38,4 +38,3 @@ SQLAlchemy Dialect for BigQuery

.. note::
This library is a prerelease to gauge compatiblity with SQLAlchemy
versions >= 1.4.16 and < 2.1
This library is only compatible with SQLAlchemy versions < 2.0.0

@@ -113,5 +112,4 @@ Installation

table = Table('dataset.table', MetaData(bind=engine), autoload=True)
print(select([func.count('*')], from_obj=table().scalar())
print(select([func.count('*')], from_obj=table).scalar())
Project

@@ -288,3 +286,3 @@ ^^^^^^^

'bigquery://some-project/some-dataset?user_supplied_client=True',
connect_args={'client': custom_bq_client},
connect_args={'client': custom_bq_client},
)

@@ -291,0 +289,0 @@

@@ -102,5 +102,5 @@ #!/usr/bin/env python

"google-auth>=1.25.0,<3.0.0dev", # Work around pip wack.
"google-cloud-bigquery>=3.3.6,<4.0.0dev",
"google-cloud-bigquery>=2.25.2,<4.0.0dev",
"packaging",
"sqlalchemy>=1.4.16,<2.1",
"sqlalchemy>=1.2.0,<2.0.0dev",
],

@@ -107,0 +107,0 @@ extras_require=extras,

Metadata-Version: 2.1
Name: sqlalchemy-bigquery
Version: 1.11.0.dev2
Version: 1.10.0
Summary: SQLAlchemy dialect for BigQuery

@@ -29,5 +29,5 @@ Home-page: https://github.com/googleapis/python-bigquery-sqlalchemy

Requires-Dist: google-auth<3.0.0dev,>=1.25.0
Requires-Dist: google-cloud-bigquery<4.0.0dev,>=3.3.6
Requires-Dist: google-cloud-bigquery<4.0.0dev,>=2.25.2
Requires-Dist: packaging
Requires-Dist: sqlalchemy<2.1,>=1.4.16
Requires-Dist: sqlalchemy<2.0.0dev,>=1.2.0
Provides-Extra: geography

@@ -47,11 +47,11 @@ Requires-Dist: GeoAlchemy2; extra == "geography"

Provides-Extra: all
Requires-Dist: alembic; extra == "all"
Requires-Dist: google-cloud-bigquery-storage<3.0.0dev,>=2.0.0; extra == "all"
Requires-Dist: shapely; extra == "all"
Requires-Dist: pyarrow>=3.0.0; extra == "all"
Requires-Dist: grpcio<2.0dev,>=1.49.1; python_version >= "3.11" and extra == "all"
Requires-Dist: packaging; extra == "all"
Requires-Dist: pytz; extra == "all"
Requires-Dist: google-cloud-bigquery-storage<3.0.0dev,>=2.0.0; extra == "all"
Requires-Dist: grpcio<2.0dev,>=1.49.1; python_version >= "3.11" and extra == "all"
Requires-Dist: GeoAlchemy2; extra == "all"
Requires-Dist: grpcio<2.0dev,>=1.47.0; extra == "all"
Requires-Dist: alembic; extra == "all"
Requires-Dist: pyarrow>=3.0.0; extra == "all"
Requires-Dist: shapely; extra == "all"

@@ -95,4 +95,3 @@ SQLAlchemy Dialect for BigQuery

.. note::
This library is a prerelease to gauge compatiblity with SQLAlchemy
versions >= 1.4.16 and < 2.1
This library is only compatible with SQLAlchemy versions < 2.0.0

@@ -170,5 +169,4 @@ Installation

table = Table('dataset.table', MetaData(bind=engine), autoload=True)
print(select([func.count('*')], from_obj=table().scalar())
print(select([func.count('*')], from_obj=table).scalar())
Project

@@ -345,3 +343,3 @@ ^^^^^^^

'bigquery://some-project/some-dataset?user_supplied_client=True',
connect_args={'client': custom_bq_client},
connect_args={'client': custom_bq_client},
)

@@ -348,0 +346,0 @@

google-api-core!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0,<3.0.0dev,>=1.31.5
google-auth<3.0.0dev,>=1.25.0
google-cloud-bigquery<4.0.0dev,>=3.3.6
google-cloud-bigquery<4.0.0dev,>=2.25.2
packaging
sqlalchemy<2.1,>=1.4.16
sqlalchemy<2.0.0dev,>=1.2.0

@@ -11,10 +11,10 @@ [alembic]

[all]
alembic
google-cloud-bigquery-storage<3.0.0dev,>=2.0.0
shapely
pyarrow>=3.0.0
packaging
pytz
google-cloud-bigquery-storage<3.0.0dev,>=2.0.0
GeoAlchemy2
grpcio<2.0dev,>=1.47.0
alembic
pyarrow>=3.0.0
shapely

@@ -21,0 +21,0 @@ [all:python_version >= "3.11"]

@@ -20,2 +20,3 @@ # Copyright (c) 2021 The sqlalchemy-bigquery Authors

import packaging.version
import sqlalchemy.sql.default_comparator

@@ -27,6 +28,11 @@ import sqlalchemy.sql.sqltypes

import sqlalchemy.sql.coercions
import sqlalchemy.sql.roles
sqlalchemy_1_4_or_more = packaging.version.parse(
sqlalchemy.__version__
) >= packaging.version.parse("1.4")
if sqlalchemy_1_4_or_more:
import sqlalchemy.sql.coercions
import sqlalchemy.sql.roles
def _get_subtype_col_spec(type_):

@@ -102,4 +108,2 @@ global _get_subtype_col_spec

return self[name]
else:
raise AttributeError(name)

@@ -109,12 +113,28 @@ comparator_factory = Comparator

def _field_index(self, name, operator):
return sqlalchemy.sql.coercions.expect(
sqlalchemy.sql.roles.BinaryElementRole,
name,
expr=self.expr,
operator=operator,
bindparam_type=sqlalchemy.types.String(),
)
# In the implementations of _field_index below, we're stealing from
# the JSON type implementation, but the code to steal changed in
# 1.4. :/
if sqlalchemy_1_4_or_more:
def _field_index(self, name, operator):
return sqlalchemy.sql.coercions.expect(
sqlalchemy.sql.roles.BinaryElementRole,
name,
expr=self.expr,
operator=operator,
bindparam_type=sqlalchemy.types.String(),
)
else:
def _field_index(self, name, operator):
return sqlalchemy.sql.default_comparator._check_literal(
self.expr,
operator,
name,
bindparam_type=sqlalchemy.types.String(),
)
def struct_getitem_op(a, b):

@@ -121,0 +141,0 @@ raise NotImplementedError()

@@ -166,3 +166,3 @@ # Copyright (c) 2017 The sqlalchemy-bigquery Authors

)
def __distribute_types_to_expanded_placeholders(self, m): # pragma: NO COVER
def __distribute_types_to_expanded_placeholders(self, m):
# If we have an in parameter, it sometimes gets expaned to 0 or more

@@ -178,4 +178,2 @@ # parameters and we need to move the type marker to each

# `%(foo)s` gets expaneded to `%(foo_0)s, `%(foo_1)s, ...`.
# Coverage: despite our best efforts, never recognized this segment of code as being tested.
placeholders, type_ = m.groups()

@@ -226,3 +224,3 @@ if placeholders:

# print(
# select(func.unnest(foo.c.objects).alias('foo_objects').column)
# select([func.unnest(foo.c.objects).alias('foo_objects').column])
# .compile(engine))

@@ -364,3 +362,7 @@ #

__expanding_text = "POSTCOMPILE"
__expanding_text = (
"EXPANDING"
if __sqlalchemy_version_info < packaging.version.parse("1.4")
else "POSTCOMPILE"
)

@@ -393,2 +395,5 @@ # https://github.com/sqlalchemy/sqlalchemy/commit/f79df12bd6d99b8f6f09d4bf07722638c4b4c159

def visit_empty_set_expr(self, element_types):
return ""
def visit_not_in_op_binary(self, binary, operator, **kw):

@@ -427,4 +432,4 @@ return (

def visit_not_contains_op_binary(self, binary, operator, **kw):
return super(BigQueryCompiler, self).visit_not_contains_op_binary(
def visit_notcontains_op_binary(self, binary, operator, **kw):
return super(BigQueryCompiler, self).visit_notcontains_op_binary(
self._maybe_reescape(binary), operator, **kw

@@ -438,4 +443,4 @@ )

def visit_not_startswith_op_binary(self, binary, operator, **kw):
return super(BigQueryCompiler, self).visit_not_startswith_op_binary(
def visit_notstartswith_op_binary(self, binary, operator, **kw):
return super(BigQueryCompiler, self).visit_notstartswith_op_binary(
self._maybe_reescape(binary), operator, **kw

@@ -449,4 +454,4 @@ )

def visit_not_endswith_op_binary(self, binary, operator, **kw):
return super(BigQueryCompiler, self).visit_not_endswith_op_binary(
def visit_notendswith_op_binary(self, binary, operator, **kw):
return super(BigQueryCompiler, self).visit_notendswith_op_binary(
self._maybe_reescape(binary), operator, **kw

@@ -516,4 +521,3 @@ )

#
# Coverage: despite our best efforts, never recognized this segment of code as being tested.
if getattr(bindparam, "expand_op", None) is not None: # pragma: NO COVER
if getattr(bindparam, "expand_op", None) is not None:
assert bindparam.expand_op.__name__.endswith("in_op") # in in

@@ -652,11 +656,11 @@ bindparam = bindparam._clone(maintain_key=True)

# BigQuery has no support for foreign keys.
def visit_foreign_key_constraint(self, constraint, **kw):
def visit_foreign_key_constraint(self, constraint):
return None
# BigQuery has no support for primary keys.
def visit_primary_key_constraint(self, constraint, **kw):
def visit_primary_key_constraint(self, constraint):
return None
# BigQuery has no support for unique constraints.
def visit_unique_constraint(self, constraint, **kw):
def visit_unique_constraint(self, constraint):
return None

@@ -769,3 +773,3 @@

def visit_set_table_comment(self, create, **kw):
def visit_set_table_comment(self, create):
table_name = self.preparer.format_table(create.element)

@@ -777,3 +781,3 @@ description = self.sql_compiler.render_literal_value(

def visit_drop_table_comment(self, drop, **kw):
def visit_drop_table_comment(self, drop):
table_name = self.preparer.format_table(drop.element)

@@ -1041,12 +1045,4 @@ return f"ALTER TABLE {table_name} SET OPTIONS(description=null)"

def dbapi(cls):
"""
Use `import_dbapi()` instead.
Maintained for backward compatibility.
"""
return dbapi
@classmethod
def import_dbapi(cls):
return dbapi
@staticmethod

@@ -1222,17 +1218,3 @@ def _build_formatted_table_id(table):

def has_table(self, connection, table_name, schema=None, **kw):
"""Checks whether a table exists in BigQuery.
Args:
connection (google.cloud.bigquery.client.Client): The client
object used to interact with BigQuery.
table_name (str): The name of the table to check for.
schema (str, optional): The name of the schema to which the table
belongs. Defaults to the default schema.
**kw (dict): Any extra keyword arguments will be ignored.
Returns:
bool: True if the table exists, False otherwise.
"""
def has_table(self, connection, table_name, schema=None):
try:

@@ -1291,2 +1273,6 @@ self._get_table(connection, table_name, schema)

def _check_unicode_returns(self, connection, additional_tests=None):
# requests gives back Unicode strings
return True
def get_view_definition(self, connection, view_name, schema=None, **kw):

@@ -1311,9 +1297,3 @@ if isinstance(connection, Engine):

if isinstance(arg, sqlalchemy.sql.expression.ColumnElement):
if not (
isinstance(arg.type, sqlalchemy.sql.sqltypes.ARRAY)
or (
hasattr(arg.type, "impl")
and isinstance(arg.type.impl, sqlalchemy.sql.sqltypes.ARRAY)
)
):
if not isinstance(arg.type, sqlalchemy.sql.sqltypes.ARRAY):
raise TypeError("The argument to unnest must have an ARRAY type.")

@@ -1320,0 +1300,0 @@ self.type = arg.type.item_type

@@ -140,7 +140,2 @@ # Copyright (c) 2021 The sqlalchemy-bigquery Authors

@property
def array_type(self):
"""Target database must support array_type"""
return supported()
@property
def implicit_default_schema(self):

@@ -147,0 +142,0 @@ """target system has a strong concept of 'default' schema that can

@@ -20,2 +20,2 @@ # Copyright (c) 2021 The sqlalchemy-bigquery Authors

__version__ = "1.11.0.dev2"
__version__ = "1.10.0"

@@ -21,3 +21,2 @@ # Copyright (c) 2021 The sqlalchemy-bigquery Authors

import datetime
import decimal
import mock

@@ -32,18 +31,12 @@ import packaging.version

import sqlalchemy.sql.sqltypes
from sqlalchemy.testing import util, config
from sqlalchemy.testing import util
from sqlalchemy.testing.assertions import eq_
from sqlalchemy.testing.suite import select, exists
from sqlalchemy.testing.suite import config, select, exists
from sqlalchemy.testing.suite import * # noqa
from sqlalchemy.testing.suite import Integer, Table, Column, String, bindparam, testing
from sqlalchemy.testing.suite import (
ComponentReflectionTest as _ComponentReflectionTest,
CTETest as _CTETest,
ExistsTest as _ExistsTest,
FetchLimitOffsetTest as _FetchLimitOffsetTest,
DifficultParametersTest as _DifficultParametersTest,
DistinctOnTest,
HasIndexTest,
IdentityAutoincrementTest,
InsertBehaviorTest as _InsertBehaviorTest,
LongNameBlowoutTest,
PostCompileParamsTest,
QuotedNameArgumentTest,

@@ -54,179 +47,27 @@ SimpleUpdateDeleteTest as _SimpleUpdateDeleteTest,

from sqlalchemy.testing.suite.test_types import (
ArrayTest,
)
from sqlalchemy.testing.suite.test_reflection import (
BizarroCharacterFKResolutionTest,
ComponentReflectionTest,
HasTableTest,
)
if packaging.version.parse(sqlalchemy.__version__) < packaging.version.parse("1.4"):
from sqlalchemy.testing.suite import LimitOffsetTest as _LimitOffsetTest
if packaging.version.parse(sqlalchemy.__version__) >= packaging.version.parse("2.0"):
import uuid
from sqlalchemy.sql import type_coerce
from sqlalchemy.testing.suite import (
TrueDivTest as _TrueDivTest,
IntegerTest as _IntegerTest,
NumericTest as _NumericTest,
StringTest as _StringTest,
UuidTest as _UuidTest,
)
class LimitOffsetTest(_LimitOffsetTest):
@pytest.mark.skip("BigQuery doesn't allow an offset without a limit.")
def test_simple_offset(self):
pass
class DifficultParametersTest(_DifficultParametersTest):
"""There are some parameters that don't work with bigquery that were removed from this test"""
test_bound_offset = test_simple_offset
tough_parameters = testing.combinations(
("boring",),
("per cent",),
("per % cent",),
("%percent",),
("col:ons",),
("_starts_with_underscore",),
("more :: %colons%",),
("_name",),
("___name",),
("42numbers",),
("percent%signs",),
("has spaces",),
("1param",),
("1col:on",),
argnames="paramname",
)
class TimestampMicrosecondsTest(_TimestampMicrosecondsTest):
data = datetime.datetime(2012, 10, 15, 12, 57, 18, 396, tzinfo=pytz.UTC)
@tough_parameters
@config.requirements.unusual_column_name_characters
def test_round_trip_same_named_column(self, paramname, connection, metadata):
name = paramname
def test_literal(self):
# The base tests doesn't set up the literal properly, because
# it doesn't pass its datatype to `literal`.
t = Table(
"t",
metadata,
Column("id", Integer, primary_key=True),
Column(name, String(50), nullable=False),
)
def literal(value):
assert value == self.data
return sqlalchemy.sql.elements.literal(value, self.datatype)
# table is created
t.create(connection)
with mock.patch("sqlalchemy.testing.suite.test_types.literal", literal):
super(TimestampMicrosecondsTest, self).test_literal()
# automatic param generated by insert
connection.execute(t.insert().values({"id": 1, name: "some name"}))
# automatic param generated by criteria, plus selecting the column
stmt = select(t.c[name]).where(t.c[name] == "some name")
eq_(connection.scalar(stmt), "some name")
# use the name in a param explicitly
stmt = select(t.c[name]).where(t.c[name] == bindparam(name))
row = connection.execute(stmt, {name: "some name"}).first()
# name works as the key from cursor.description
eq_(row._mapping[name], "some name")
# use expanding IN
stmt = select(t.c[name]).where(
t.c[name].in_(["some name", "some other_name"])
)
row = connection.execute(stmt).first()
@testing.fixture
def multirow_fixture(self, metadata, connection):
mytable = Table(
"mytable",
metadata,
Column("myid", Integer),
Column("name", String(50)),
Column("desc", String(50)),
)
mytable.create(connection)
connection.execute(
mytable.insert(),
[
{"myid": 1, "name": "a", "desc": "a_desc"},
{"myid": 2, "name": "b", "desc": "b_desc"},
{"myid": 3, "name": "c", "desc": "c_desc"},
{"myid": 4, "name": "d", "desc": "d_desc"},
],
)
yield mytable
@tough_parameters
def test_standalone_bindparam_escape(
self, paramname, connection, multirow_fixture
):
tbl1 = multirow_fixture
stmt = select(tbl1.c.myid).where(
tbl1.c.name == bindparam(paramname, value="x")
)
res = connection.scalar(stmt, {paramname: "c"})
eq_(res, 3)
@tough_parameters
def test_standalone_bindparam_escape_expanding(
self, paramname, connection, multirow_fixture
):
tbl1 = multirow_fixture
stmt = (
select(tbl1.c.myid)
.where(tbl1.c.name.in_(bindparam(paramname, value=["a", "b"])))
.order_by(tbl1.c.myid)
)
res = connection.scalars(stmt, {paramname: ["d", "a"]}).all()
eq_(res, [1, 4])
# BQ has no autoinc and client-side defaults can't work for select
del _IntegerTest.test_huge_int_auto_accommodation
class NumericTest(_NumericTest):
"""Added a where clause for BQ compatibility."""
@testing.fixture
def do_numeric_test(self, metadata, connection):
def run(type_, input_, output, filter_=None, check_scale=False):
t = Table("t", metadata, Column("x", type_))
t.create(connection)
connection.execute(t.insert(), [{"x": x} for x in input_])
result = {row[0] for row in connection.execute(t.select())}
output = set(output)
if filter_:
result = {filter_(x) for x in result}
output = {filter_(x) for x in output}
eq_(result, output)
if check_scale:
eq_([str(x) for x in result], [str(x) for x in output])
where_expr = True
connection.execute(t.delete().where(where_expr))
if type_.asdecimal:
test_value = decimal.Decimal("2.9")
add_value = decimal.Decimal("37.12")
else:
test_value = 2.9
add_value = 37.12
connection.execute(t.insert(), {"x": test_value})
assert_we_are_a_number = connection.scalar(
select(type_coerce(t.c.x + add_value, type_))
)
eq_(
round(assert_we_are_a_number, 3),
round(test_value + add_value, 3),
)
return run
class TimestampMicrosecondsTest(_TimestampMicrosecondsTest):
"""BQ has no support for BQ util.text_type"""
data = datetime.datetime(2012, 10, 15, 12, 57, 18, 396, tzinfo=pytz.UTC)
def test_select_direct(self, connection):

@@ -247,240 +88,41 @@ # This func added because this test was failing when passed the

def test_round_trip_executemany(self, connection):
unicode_table = self.tables.unicode_table
connection.execute(
unicode_table.insert(),
[{"id": i, "unicode_data": self.data} for i in range(3)],
)
rows = connection.execute(select(unicode_table.c.unicode_data)).fetchall()
eq_(rows, [(self.data,) for i in range(3)])
for row in rows:
assert isinstance(row[0], str)
sqlalchemy.testing.suite.test_types._UnicodeFixture.test_round_trip_executemany = (
test_round_trip_executemany
else:
from sqlalchemy.testing.suite import (
FetchLimitOffsetTest as _FetchLimitOffsetTest,
RowCountTest as _RowCountTest,
)
class TrueDivTest(_TrueDivTest):
@pytest.mark.skip("BQ rounds based on datatype")
def test_floordiv_integer(self):
class FetchLimitOffsetTest(_FetchLimitOffsetTest):
@pytest.mark.skip("BigQuery doesn't allow an offset without a limit.")
def test_simple_offset(self):
pass
@pytest.mark.skip("BQ rounds based on datatype")
def test_floordiv_integer_bound(self):
pass
test_bound_offset = test_simple_offset
test_expr_offset = test_simple_offset_zero = test_simple_offset
class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest):
"""The base tests fail if operations return rows for some reason."""
# The original test is missing an order by.
def test_update(self):
t = self.tables.plain_pk
connection = config.db.connect()
# In SQLAlchemy 2.0, the datatype changed to dict in the following function.
r = connection.execute(t.update().where(t.c.id == 2), dict(data="d2_new"))
assert not r.is_insert
# Also, note that sqlalchemy union is a union distinct, not a
# union all. This test caught that were were getting that wrong.
def test_limit_render_multiple_times(self, connection):
table = self.tables.some_table
stmt = select(table.c.id).order_by(table.c.id).limit(1).scalar_subquery()
eq_(
connection.execute(t.select().order_by(t.c.id)).fetchall(),
[(1, "d1"), (2, "d2_new"), (3, "d3")],
)
u = sqlalchemy.union(select(stmt), select(stmt)).subquery().select()
def test_delete(self):
t = self.tables.plain_pk
connection = config.db.connect()
r = connection.execute(t.delete().where(t.c.id == 2))
assert not r.is_insert
eq_(
connection.execute(t.select().order_by(t.c.id)).fetchall(),
[(1, "d1"), (3, "d3")],
self._assert_result(
connection,
u,
[(1,)],
)
class StringTest(_StringTest):
"""Added a where clause for BQ compatibility"""
def test_dont_truncate_rightside(
self, metadata, connection, expr=None, expected=None
):
t = Table(
"t",
metadata,
Column("x", String(2)),
Column("id", Integer, primary_key=True),
)
t.create(connection)
connection.connection.commit()
connection.execute(
t.insert(),
[{"x": "AB", "id": 1}, {"x": "BC", "id": 2}, {"x": "AC", "id": 3}],
)
combinations = [("%B%", ["AB", "BC"]), ("A%C", ["AC"]), ("A%C%Z", [])]
for args in combinations:
eq_(
connection.scalars(select(t.c.x).where(t.c.x.like(args[0]))).all(),
args[1],
)
class UuidTest(_UuidTest):
"""BQ needs to pass in UUID as a string"""
@classmethod
def define_tables(cls, metadata):
Table(
"uuid_table",
metadata,
Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
Column("uuid_data", String), # Use native UUID for primary data
Column(
"uuid_text_data", String, nullable=True
), # Optional text representation
Column("uuid_data_nonnative", String),
Column("uuid_text_data_nonnative", String),
)
def test_uuid_round_trip(self, connection):
data = str(uuid.uuid4())
uuid_table = self.tables.uuid_table
connection.execute(
uuid_table.insert(),
{"id": 1, "uuid_data": data, "uuid_data_nonnative": data},
)
row = connection.execute(
select(uuid_table.c.uuid_data, uuid_table.c.uuid_data_nonnative).where(
uuid_table.c.uuid_data == data,
uuid_table.c.uuid_data_nonnative == data,
)
).first()
eq_(row, (data, data))
def test_uuid_text_round_trip(self, connection):
data = str(uuid.uuid4())
uuid_table = self.tables.uuid_table
connection.execute(
uuid_table.insert(),
{
"id": 1,
"uuid_text_data": data,
"uuid_text_data_nonnative": data,
},
)
row = connection.execute(
select(
uuid_table.c.uuid_text_data,
uuid_table.c.uuid_text_data_nonnative,
).where(
uuid_table.c.uuid_text_data == data,
uuid_table.c.uuid_text_data_nonnative == data,
)
).first()
eq_((row[0].lower(), row[1].lower()), (data, data))
def test_literal_uuid(self, literal_round_trip):
data = str(uuid.uuid4())
literal_round_trip(String(), [data], [data])
def test_literal_text(self, literal_round_trip):
data = str(uuid.uuid4())
literal_round_trip(
String(),
[data],
[data],
filter_=lambda x: x.lower(),
)
def test_literal_nonnative_uuid(self, literal_round_trip):
data = str(uuid.uuid4())
literal_round_trip(String(), [data], [data])
def test_literal_nonnative_text(self, literal_round_trip):
data = str(uuid.uuid4())
literal_round_trip(
String(),
[data],
[data],
filter_=lambda x: x.lower(),
)
@testing.requires.insert_returning
def test_uuid_returning(self, connection):
data = str(uuid.uuid4())
str_data = str(data)
uuid_table = self.tables.uuid_table
result = connection.execute(
uuid_table.insert().returning(
uuid_table.c.uuid_data,
uuid_table.c.uuid_text_data,
uuid_table.c.uuid_data_nonnative,
uuid_table.c.uuid_text_data_nonnative,
),
{
"id": 1,
"uuid_data": data,
"uuid_text_data": str_data,
"uuid_data_nonnative": data,
"uuid_text_data_nonnative": str_data,
},
)
row = result.first()
eq_(row, (data, str_data, data, str_data))
else:
from sqlalchemy.testing.suite import (
RowCountTest as _RowCountTest,
)
del DifficultParametersTest # exercises column names illegal in BQ
del DistinctOnTest # expects unquoted table names.
del HasIndexTest # BQ doesn't do the indexes that SQLA is loooking for.
del IdentityAutoincrementTest # BQ doesn't do autoincrement
class RowCountTest(_RowCountTest):
""""""
# This test makes makes assertions about generated sql and trips
# over the backquotes that we add everywhere. XXX Why do we do that?
del PostCompileParamsTest
@classmethod
def insert_data(cls, connection):
cls.data = data = [
("Angela", "A"),
("Andrew", "A"),
("Anand", "A"),
("Bob", "B"),
("Bobette", "B"),
("Buffy", "B"),
("Charlie", "C"),
("Cynthia", "C"),
("Chris", "C"),
]
employees_table = cls.tables.employees
connection.execute(
employees_table.insert(),
[
{"employee_id": i, "name": n, "department": d}
for i, (n, d) in enumerate(data)
],
)
class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest):
"""The base tests fail if operations return rows for some reason."""
def test_update(self):
t = self.tables.plain_pk
r = config.db.execute(t.update().where(t.c.id == 2), data="d2_new")
assert not r.is_insert
eq_(
config.db.execute(t.select().order_by(t.c.id)).fetchall(),
[(1, "d1"), (2, "d2_new"), (3, "d3")],
)
def test_delete(self):
t = self.tables.plain_pk
r = config.db.execute(t.delete().where(t.c.id == 2))
assert not r.is_insert
eq_(
config.db.execute(t.select().order_by(t.c.id)).fetchall(),
[(1, "d1"), (3, "d3")],
)
class TimestampMicrosecondsTest(_TimestampMicrosecondsTest):

@@ -536,10 +178,36 @@ data = datetime.datetime(2012, 10, 15, 12, 57, 18, 396, tzinfo=pytz.UTC)

class RowCountTest(_RowCountTest):
@classmethod
def insert_data(cls, connection):
cls.data = data = [
("Angela", "A"),
("Andrew", "A"),
("Anand", "A"),
("Bob", "B"),
("Bobette", "B"),
("Buffy", "B"),
("Charlie", "C"),
("Cynthia", "C"),
("Chris", "C"),
]
class CTETest(_CTETest):
@pytest.mark.skip("Can't use CTEs with insert")
def test_insert_from_select_round_trip(self):
pass
employees_table = cls.tables.employees
connection.execute(
employees_table.insert(),
[
{"employee_id": i, "name": n, "department": d}
for i, (n, d) in enumerate(data)
],
)
@pytest.mark.skip("Recusive CTEs aren't supported.")
def test_select_recursive_round_trip(self):
# Quotes aren't allowed in BigQuery table names.
del QuotedNameArgumentTest
class InsertBehaviorTest(_InsertBehaviorTest):
@pytest.mark.skip(
"BQ has no autoinc and client-side defaults can't work for select."
)
def test_insert_from_select_autoinc(cls):
pass

@@ -560,3 +228,3 @@

connection.execute(
select(stuff.c.id).where(
select([stuff.c.id]).where(
and_(

@@ -575,3 +243,3 @@ stuff.c.id == 1,

connection.execute(
select(stuff.c.id).where(exists().where(stuff.c.data == "no data"))
select([stuff.c.id]).where(exists().where(stuff.c.data == "no data"))
).fetchall(),

@@ -582,63 +250,50 @@ [],

class FetchLimitOffsetTest(_FetchLimitOffsetTest):
@pytest.mark.skip("BigQuery doesn't allow an offset without a limit.")
def test_simple_offset(self):
pass
# This test requires features (indexes, primary keys, etc., that BigQuery doesn't have.
del LongNameBlowoutTest
test_bound_offset = test_simple_offset
test_expr_offset = test_simple_offset_zero = test_simple_offset
test_limit_offset_nobinds = test_simple_offset # TODO figure out
# how to prevent this from failing
# The original test is missing an order by.
# Also, note that sqlalchemy union is a union distinct, not a
# union all. This test caught that were were getting that wrong.
def test_limit_render_multiple_times(self, connection):
table = self.tables.some_table
stmt = select(table.c.id).order_by(table.c.id).limit(1).scalar_subquery()
class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest):
"""The base tests fail if operations return rows for some reason."""
u = sqlalchemy.union(select(stmt), select(stmt)).subquery().select()
def test_update(self):
t = self.tables.plain_pk
r = config.db.execute(t.update().where(t.c.id == 2), data="d2_new")
assert not r.is_insert
# assert not r.returns_rows
self._assert_result(
connection,
u,
[(1,)],
eq_(
config.db.execute(t.select().order_by(t.c.id)).fetchall(),
[(1, "d1"), (2, "d2_new"), (3, "d3")],
)
def test_delete(self):
t = self.tables.plain_pk
r = config.db.execute(t.delete().where(t.c.id == 2))
assert not r.is_insert
# assert not r.returns_rows
eq_(
config.db.execute(t.select().order_by(t.c.id)).fetchall(),
[(1, "d1"), (3, "d3")],
)
class InsertBehaviorTest(_InsertBehaviorTest):
@pytest.mark.skip(
"BQ has no autoinc and client-side defaults can't work for select."
)
def test_insert_from_select_autoinc(cls):
class CTETest(_CTETest):
@pytest.mark.skip("Can't use CTEs with insert")
def test_insert_from_select_round_trip(self):
pass
@pytest.mark.skip(
"BQ has no autoinc and client-side defaults can't work for select."
)
def test_no_results_for_non_returning_insert(cls):
@pytest.mark.skip("Recusive CTEs aren't supported.")
def test_select_recursive_round_trip(self):
pass
del ComponentReflectionTest # Multiple tests re: CHECK CONSTRAINTS, etc which
# BQ does not support
# class ComponentReflectionTest(_ComponentReflectionTest):
# @pytest.mark.skip("Big query types don't track precision, length, etc.")
# def course_grained_types():
# pass
class ComponentReflectionTest(_ComponentReflectionTest):
@pytest.mark.skip("Big query types don't track precision, length, etc.")
def course_grained_types():
pass
# test_numeric_reflection = test_varchar_reflection = course_grained_types
test_numeric_reflection = test_varchar_reflection = course_grained_types
# @pytest.mark.skip("BQ doesn't have indexes (in the way these tests expect).")
# def test_get_indexes(self):
# pass
del ArrayTest # only appears to apply to postgresql
del BizarroCharacterFKResolutionTest
del HasTableTest.test_has_table_cache # TODO confirm whether BQ has table caching
del DistinctOnTest # expects unquoted table names.
del HasIndexTest # BQ doesn't do the indexes that SQLA is loooking for.
del IdentityAutoincrementTest # BQ doesn't do autoincrement
del LongNameBlowoutTest # Requires features (indexes, primary keys, etc., that BigQuery doesn't have.
del PostCompileParamsTest # BQ adds backticks to bind parameters, causing failure of tests TODO: fix this?
del QuotedNameArgumentTest # Quotes aren't allowed in BigQuery table names.
@pytest.mark.skip("BQ doesn't have indexes (in the way these tests expect).")
def test_get_indexes(self):
pass

@@ -57,3 +57,3 @@ # Copyright (c) 2021 The sqlalchemy-bigquery Authors

assert list(conn.execute(sqlalchemy.select(table))) == [
assert list(conn.execute(sqlalchemy.select([table]))) == [
(

@@ -66,12 +66,12 @@ {

]
assert list(conn.execute(sqlalchemy.select(table.c.person.NAME))) == [("bob",)]
assert list(conn.execute(sqlalchemy.select(table.c.person.children[0]))) == [
assert list(conn.execute(sqlalchemy.select([table.c.person.NAME]))) == [("bob",)]
assert list(conn.execute(sqlalchemy.select([table.c.person.children[0]]))) == [
({"name": "billy", "bdate": datetime.date(2020, 1, 1)},)
]
assert list(conn.execute(sqlalchemy.select(table.c.person.children[0].bdate))) == [
(datetime.date(2020, 1, 1),)
]
assert list(
conn.execute(sqlalchemy.select([table.c.person.children[0].bdate]))
) == [(datetime.date(2020, 1, 1),)]
assert list(
conn.execute(
sqlalchemy.select(table).where(table.c.person.children[0].NAME == "billy")
sqlalchemy.select([table]).where(table.c.person.children[0].NAME == "billy")
)

@@ -89,3 +89,3 @@ ) == [

conn.execute(
sqlalchemy.select(table).where(
sqlalchemy.select([table]).where(
table.c.person.children[0].NAME == "sally"

@@ -105,13 +105,10 @@ )

table_name = f"{bigquery_dataset}.test_comples_literals_pr_67"
with engine.connect() as conn:
conn.execute(
sqlalchemy.text(
f"""
create table {table_name} as (
select 'a' as id,
struct(1 as x__count, 2 as y__count, 3 as z__count) as dimensions
)
"""
engine.execute(
f"""
create table {table_name} as (
select 'a' as id,
struct(1 as x__count, 2 as y__count, 3 as z__count) as dimensions
)
)
"""
)

@@ -121,3 +118,5 @@ table = sqlalchemy.Table(table_name, metadata, autoload_with=engine)

got = str(
sqlalchemy.select((table.c.dimensions.x__count + 5).label("c")).compile(engine)
sqlalchemy.select([(table.c.dimensions.x__count + 5).label("c")]).compile(
engine
)
)

@@ -158,7 +157,5 @@ want = (

mock_table.insert(),
[
dict(mock_id="x"),
dict(mock_id="y"),
dict(mock_id="z"),
],
dict(mock_id="x"),
dict(mock_id="y"),
dict(mock_id="z"),
)

@@ -165,0 +162,0 @@ conn.execute(

@@ -77,3 +77,3 @@ # Copyright (c) 2021 The PyBigQuery Authors

assert sorted(
(r.name, r.geog.desc[:4]) for r in conn.execute(select(lake_table))
(r.name, r.geog.desc[:4]) for r in conn.execute(select([lake_table]))
) == [("Garde", "0103"), ("Majeur", "0103"), ("Orta", "0103")]

@@ -86,5 +86,3 @@

[[result]] = conn.execute(
select(lake_table.c.name).where(
func.ST_Contains(lake_table.c.geog, "POINT(4 1)")
)
select([lake_table.c.name], func.ST_Contains(lake_table.c.geog, "POINT(4 1)"))
)

@@ -96,3 +94,3 @@ assert result == "Orta"

for r in conn.execute(
select(lake_table.c.name, lake_table.c.geog.ST_AREA().label("area"))
select([lake_table.c.name, lake_table.c.geog.ST_AREA().label("area")])
)

@@ -103,5 +101,3 @@ ) == [("Garde", 49452374328), ("Majeur", 12364036567), ("Orta", 111253664228)]

[[geog]] = conn.execute(
select(lake_table.c.geog).where(lake_table.c.name == "Garde")
)
[[geog]] = conn.execute(select([lake_table.c.geog], lake_table.c.name == "Garde"))
conn.execute(lake_table.insert().values(name="test", geog=geog))

@@ -112,5 +108,3 @@ assert (

conn.execute(
select(lake_table.c.geog.st_area()).where(
lake_table.c.name == "test"
)
select([lake_table.c.geog.st_area()], lake_table.c.name == "test")
)

@@ -136,5 +130,3 @@ )[0][0]

conn.execute(
select(lake_table.c.geog.st_area()).where(
lake_table.c.name == "test2"
)
select([lake_table.c.geog.st_area()], lake_table.c.name == "test2")
)

@@ -141,0 +133,0 @@ )[0][0]

@@ -160,3 +160,3 @@ # Copyright (c) 2017 The sqlalchemy-bigquery Authors

def table(engine, bigquery_dataset):
return Table(f"{bigquery_dataset}.sample", MetaData(), autoload_with=engine)
return Table(f"{bigquery_dataset}.sample", MetaData(bind=engine), autoload=True)

@@ -166,3 +166,3 @@

def table_using_test_dataset(engine_using_test_dataset):
return Table("sample", MetaData(), autoload_with=engine_using_test_dataset)
return Table("sample", MetaData(bind=engine_using_test_dataset), autoload=True)

@@ -172,3 +172,5 @@

def table_one_row(engine, bigquery_dataset):
return Table(f"{bigquery_dataset}.sample_one_row", MetaData(), autoload_with=engine)
return Table(
f"{bigquery_dataset}.sample_one_row", MetaData(bind=engine), autoload=True
)

@@ -178,3 +180,3 @@

def table_dml(engine, bigquery_empty_table):
return Table(bigquery_empty_table, MetaData(), autoload_with=engine)
return Table(bigquery_empty_table, MetaData(bind=engine), autoload=True)

@@ -221,3 +223,3 @@

query = (
select(col1, col2, col3)
select([col1, col2, col3])
.where(col1 < "2017-01-01 00:00:00")

@@ -233,30 +235,21 @@ .group_by(col1)

def test_engine_with_dataset(engine_using_test_dataset, bigquery_dataset):
with engine_using_test_dataset.connect() as conn:
rows = conn.execute(sqlalchemy.text("SELECT * FROM sample_one_row")).fetchall()
assert list(rows[0]) == ONE_ROW_CONTENTS
rows = engine_using_test_dataset.execute("SELECT * FROM sample_one_row").fetchall()
assert list(rows[0]) == ONE_ROW_CONTENTS
table_one_row = Table(
"sample_one_row", MetaData(), autoload_with=engine_using_test_dataset
)
rows = conn.execute(
table_one_row.select().set_label_style(
sqlalchemy.LABEL_STYLE_TABLENAME_PLUS_COL
)
).fetchall()
assert list(rows[0]) == ONE_ROW_CONTENTS_EXPANDED
table_one_row = Table(
"sample_one_row", MetaData(bind=engine_using_test_dataset), autoload=True
)
rows = table_one_row.select(use_labels=True).execute().fetchall()
assert list(rows[0]) == ONE_ROW_CONTENTS_EXPANDED
table_one_row = Table(
f"{bigquery_dataset}.sample_one_row",
MetaData(),
autoload_with=engine_using_test_dataset,
)
rows = conn.execute(
table_one_row.select().set_label_style(
sqlalchemy.LABEL_STYLE_TABLENAME_PLUS_COL
)
).fetchall()
# verify that we are pulling from the specifically-named dataset,
# instead of pulling from the default dataset of the engine (which
# does not have this table at all)
assert list(rows[0]) == ONE_ROW_CONTENTS_EXPANDED
table_one_row = Table(
f"{bigquery_dataset}.sample_one_row",
MetaData(bind=engine_using_test_dataset),
autoload=True,
)
rows = table_one_row.select(use_labels=True).execute().fetchall()
# verify that we are pulling from the specifically-named dataset,
# instead of pulling from the default dataset of the engine (which
# does not have this table at all)
assert list(rows[0]) == ONE_ROW_CONTENTS_EXPANDED

@@ -267,10 +260,9 @@

):
with engine_with_location.connect() as conn:
rows = conn.execute(
sqlalchemy.text(f"SELECT * FROM {bigquery_regional_dataset}.sample_one_row")
).fetchall()
assert list(rows[0]) == ONE_ROW_CONTENTS
rows = engine_with_location.execute(
f"SELECT * FROM {bigquery_regional_dataset}.sample_one_row"
).fetchall()
assert list(rows[0]) == ONE_ROW_CONTENTS
def test_reflect_select(table, engine_using_test_dataset, table_using_test_dataset):
def test_reflect_select(table, table_using_test_dataset):
for table in [table, table_using_test_dataset]:

@@ -296,37 +288,26 @@ assert table.comment == "A sample table containing most data types."

with engine_using_test_dataset.connect() as conn:
rows = conn.execute(
table.select().set_label_style(
sqlalchemy.LABEL_STYLE_TABLENAME_PLUS_COL
)
).fetchall()
assert len(rows) == 1000
# Force unique column labels using `use_labels` below to deal
# with BQ sometimes complaining about duplicate column names
# when a destination table is specified, even though no
# destination table is specified. When this test was written,
# `use_labels` was forced by the dialect.
rows = table.select(use_labels=True).execute().fetchall()
assert len(rows) == 1000
def test_content_from_raw_queries(engine, bigquery_dataset):
with engine.connect() as conn:
rows = conn.execute(
sqlalchemy.text(f"SELECT * FROM {bigquery_dataset}.sample_one_row")
).fetchall()
assert list(rows[0]) == ONE_ROW_CONTENTS
rows = engine.execute(f"SELECT * FROM {bigquery_dataset}.sample_one_row").fetchall()
assert list(rows[0]) == ONE_ROW_CONTENTS
def test_record_content_from_raw_queries(engine, bigquery_dataset):
with engine.connect() as conn:
rows = conn.execute(
sqlalchemy.text(
f"SELECT record.name FROM {bigquery_dataset}.sample_one_row"
)
).fetchall()
assert rows[0][0] == "John Doe"
rows = engine.execute(
f"SELECT record.name FROM {bigquery_dataset}.sample_one_row"
).fetchall()
assert rows[0][0] == "John Doe"
def test_content_from_reflect(engine, table_one_row):
with engine.connect() as conn:
rows = conn.execute(
table_one_row.select().set_label_style(
sqlalchemy.LABEL_STYLE_TABLENAME_PLUS_COL
)
).fetchall()
assert list(rows[0]) == ONE_ROW_CONTENTS_EXPANDED
rows = table_one_row.select(use_labels=True).execute().fetchall()
assert list(rows[0]) == ONE_ROW_CONTENTS_EXPANDED

@@ -336,8 +317,6 @@

unicode_str = "白人看不懂"
with engine.connect() as conn:
returned_str = conn.execute(
sqlalchemy.select(expression.bindparam("好", unicode_str)).select_from(
table_one_row
)
).scalar()
returned_str = sqlalchemy.select(
[expression.bindparam("好", unicode_str)],
from_obj=table_one_row,
).scalar()
assert returned_str == unicode_str

@@ -348,7 +327,6 @@

one_row = Table(
"bigquery-public-data.samples.natality", MetaData(), autoload_with=engine
"bigquery-public-data.samples.natality", MetaData(bind=engine), autoload=True
)
with engine.connect() as conn:
row = conn.execute(one_row.select().limit(1)).first()
assert len(row) >= 1
row = one_row.select().limit(1).execute().first()
assert len(row) >= 1

@@ -360,8 +338,10 @@

f"{bigquery_dataset}.table_does_not_exist",
MetaData(),
autoload_with=engine,
MetaData(bind=engine),
autoload=True,
)
assert (
sqlalchemy.inspect(engine).has_table(f"{bigquery_dataset}.table_does_not_exist")
Table(
f"{bigquery_dataset}.table_does_not_exist", MetaData(bind=engine)
).exists()
is False

@@ -375,4 +355,4 @@ )

"dataset_does_not_exist.table_does_not_exist",
MetaData(),
autoload_with=engine,
MetaData(bind=engine),
autoload=True,
)

@@ -382,3 +362,3 @@

def test_tables_list(engine, engine_using_test_dataset, bigquery_dataset):
tables = sqlalchemy.inspect(engine).get_table_names()
tables = engine.table_names()
assert f"{bigquery_dataset}.sample" in tables

@@ -388,3 +368,3 @@ assert f"{bigquery_dataset}.sample_one_row" in tables

tables = sqlalchemy.inspect(engine_using_test_dataset).get_table_names()
tables = engine_using_test_dataset.table_names()
assert "sample" in tables

@@ -416,3 +396,3 @@ assert "sample_one_row" in tables

sqlalchemy.func.sum(
sqlalchemy.case((sqlalchemy.literal(True), col.label("inner"))).label(
sqlalchemy.case([[sqlalchemy.literal(True), col.label("inner")]]).label(
"outer"

@@ -423,3 +403,3 @@ )

sqlalchemy.func.sum(
sqlalchemy.case((sqlalchemy.literal(True), col.label("inner"))).label(
sqlalchemy.case([[sqlalchemy.literal(True), col.label("inner")]]).label(
"middle"

@@ -451,3 +431,3 @@ )

func.sum(
case((table.c.boolean == sqlalchemy.literal(True), 1), else_=0)
case([(table.c.boolean == sqlalchemy.literal(True), 1)], else_=0)
),

@@ -485,12 +465,11 @@ )

q = query(table)
with engine.connect() as conn:
result = conn.execute(q).fetchall()
assert len(result) > 0
result = engine.execute(q).fetchall()
assert len(result) > 0
q = query(table_using_test_dataset)
with engine_using_test_dataset.connect() as conn:
result = conn.execute(q).fetchall()
assert len(result) > 0
result = engine_using_test_dataset.execute(q).fetchall()
assert len(result) > 0
def test_compiled_query_literal_binds(

@@ -501,5 +480,4 @@ engine, engine_using_test_dataset, table, table_using_test_dataset, query

compiled = q.compile(engine, compile_kwargs={"literal_binds": True})
with engine.connect() as conn:
result = conn.execute(compiled).fetchall()
assert len(result) > 0
result = engine.execute(compiled).fetchall()
assert len(result) > 0

@@ -510,5 +488,4 @@ q = query(table_using_test_dataset)

)
with engine_using_test_dataset.connect() as conn:
result = conn.execute(compiled).fetchall()
assert len(result) > 0
result = engine_using_test_dataset.execute(compiled).fetchall()
assert len(result) > 0

@@ -542,42 +519,27 @@

table = Table(
"bigquery-public-data.noaa_gsod.gsod*", MetaData(), autoload_with=engine
"bigquery-public-data.noaa_gsod.gsod*", MetaData(bind=engine), autoload=True
)
with engine.connect() as conn:
rows = conn.execute(table.select().limit(1)).first()
assert len(rows) > 0
rows = table.select().limit(1).execute().first()
assert len(rows) > 0
def test_dml(engine, session, table_dml):
"""
Test DML operations on a table with no data. This table is created
in the `bigquery_empty_table` fixture.
Modern versions of sqlalchemy does not really require setting the
label style. This has been maintained to retain this test.
"""
# test insert
with engine.connect() as conn:
conn.execute(table_dml.insert().values(ONE_ROW_CONTENTS_DML))
result = conn.execute(
table_dml.select().set_label_style(sqlalchemy.LABEL_STYLE_DEFAULT)
).fetchall()
assert len(result) == 1
engine.execute(table_dml.insert(ONE_ROW_CONTENTS_DML))
result = table_dml.select(use_labels=True).execute().fetchall()
assert len(result) == 1
# test update
session.query(table_dml).filter(table_dml.c.string == "test").update(
{"string": "updated_row"}, synchronize_session=False
)
updated_result = conn.execute(
table_dml.select().set_label_style(sqlalchemy.LABEL_STYLE_DEFAULT)
).fetchone()
assert updated_result._mapping[table_dml.c.string] == "updated_row"
# test update
session.query(table_dml).filter(table_dml.c.string == "test").update(
{"string": "updated_row"}, synchronize_session=False
)
updated_result = table_dml.select(use_labels=True).execute().fetchone()
assert updated_result[table_dml.c.string] == "updated_row"
# test delete
session.query(table_dml).filter(table_dml.c.string == "updated_row").delete(
synchronize_session=False
)
result = conn.execute(
table_dml.select().set_label_style(sqlalchemy.LABEL_STYLE_DEFAULT)
).fetchall()
assert len(result) == 0
# test delete
session.query(table_dml).filter(table_dml.c.string == "updated_row").delete(
synchronize_session=False
)
result = table_dml.select(use_labels=True).execute().fetchall()
assert len(result) == 0

@@ -741,30 +703,12 @@

def test_has_table(engine, engine_using_test_dataset, bigquery_dataset):
assert sqlalchemy.inspect(engine).has_table("sample", bigquery_dataset) is True
assert sqlalchemy.inspect(engine).has_table(f"{bigquery_dataset}.sample") is True
assert (
sqlalchemy.inspect(engine).has_table(f"{bigquery_dataset}.nonexistent_table")
is False
)
assert (
sqlalchemy.inspect(engine).has_table("nonexistent_table", "nonexistent_dataset")
is False
)
assert engine.has_table("sample", bigquery_dataset) is True
assert engine.has_table(f"{bigquery_dataset}.sample") is True
assert engine.has_table(f"{bigquery_dataset}.nonexistent_table") is False
assert engine.has_table("nonexistent_table", "nonexistent_dataset") is False
assert sqlalchemy.inspect(engine_using_test_dataset).has_table("sample") is True
assert (
sqlalchemy.inspect(engine_using_test_dataset).has_table(
"sample", bigquery_dataset
)
is True
)
assert (
sqlalchemy.inspect(engine_using_test_dataset).has_table(
f"{bigquery_dataset}.sample"
)
is True
)
assert engine_using_test_dataset.has_table("sample") is True
assert engine_using_test_dataset.has_table("sample", bigquery_dataset) is True
assert engine_using_test_dataset.has_table(f"{bigquery_dataset}.sample") is True
assert (
sqlalchemy.inspect(engine_using_test_dataset).has_table("sample_alt") is False
)
assert engine_using_test_dataset.has_table("sample_alt") is False

@@ -816,3 +760,3 @@

conn.execute(
sqlalchemy.select(sqlalchemy.literal(-1).in_(list(range(99999))))
sqlalchemy.select([sqlalchemy.literal(-1).in_(list(range(99999)))])
)

@@ -847,3 +791,3 @@ ) == [(False,)]

)
query = select(func.unnest(table.c.objects).alias("foo_objects").column)
query = select([func.unnest(table.c.objects).alias("foo_objects").column])
compiled = str(query.compile(engine))

@@ -883,4 +827,6 @@ assert " ".join(compiled.strip().split()) == (

query = select(
selectable.c.foo,
func.unnest(selectable.c.bars).column_valued("unnest_bars"),
[
selectable.c.foo,
func.unnest(selectable.c.bars).column_valued("unnest_bars"),
]
).select_from(selectable)

@@ -887,0 +833,0 @@ compiled = str(query.compile(engine))

@@ -33,10 +33,14 @@ # Copyright (c) 2021 The sqlalchemy-bigquery Authors

sqlalchemy_version = packaging.version.parse(sqlalchemy.__version__)
sqlalchemy_before_2_0 = pytest.mark.skipif(
sqlalchemy_version >= packaging.version.parse("2.0"),
reason="requires sqlalchemy 1.3 or lower",
sqlalchemy_1_3_or_higher = pytest.mark.skipif(
sqlalchemy_version < packaging.version.parse("1.3"),
reason="requires sqlalchemy 1.3 or higher",
)
sqlalchemy_2_0_or_higher = pytest.mark.skipif(
sqlalchemy_version < packaging.version.parse("2.0"),
sqlalchemy_1_4_or_higher = pytest.mark.skipif(
sqlalchemy_version < packaging.version.parse("1.4"),
reason="requires sqlalchemy 1.4 or higher",
)
sqlalchemy_before_1_4 = pytest.mark.skipif(
sqlalchemy_version >= packaging.version.parse("1.4"),
reason="requires sqlalchemy 1.3 or lower",
)

@@ -43,0 +47,0 @@

@@ -87,3 +87,3 @@ # Copyright (c) 2017 The sqlalchemy-bigquery Authors

sql = f"SELECT {sql} AS `anon_1` \nFROM `t`"
assert str(sqlalchemy.select(expr).compile(faux_conn.engine)) == sql
assert str(sqlalchemy.select([expr]).compile(faux_conn.engine)) == sql

@@ -121,3 +121,3 @@

want = f"SELECT `t`.`person` \nFROM `t`, `t` \nWHERE {sql}"
got = str(sqlalchemy.select(_col()).where(expr).compile(faux_conn.engine))
got = str(sqlalchemy.select([_col()]).where(expr).compile(faux_conn.engine))
assert got == want

@@ -124,0 +124,0 @@

@@ -24,6 +24,3 @@ # Copyright (c) 2021 The sqlalchemy-bigquery Authors

from .conftest import setup_table
from .conftest import (
sqlalchemy_2_0_or_higher,
sqlalchemy_before_2_0,
)
from .conftest import sqlalchemy_1_4_or_higher, sqlalchemy_before_1_4

@@ -65,2 +62,3 @@

@sqlalchemy_1_4_or_higher
def test_no_alias_for_known_tables(faux_conn, metadata):

@@ -87,2 +85,3 @@ # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/353

@sqlalchemy_1_4_or_higher
def test_no_alias_for_known_tables_cte(faux_conn, metadata):

@@ -149,6 +148,6 @@ # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/368

@sqlalchemy_before_2_0
def test_no_implicit_join_asterix_for_inner_unnest_before_2_0(faux_conn, metadata):
@sqlalchemy_before_1_4
def test_no_implicit_join_asterix_for_inner_unnest_before_1_4(faux_conn, metadata):
# See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/368
q = prepare_implicit_join_base_query(faux_conn, metadata, True, False)
q = prepare_implicit_join_base_query(faux_conn, metadata, True, True)
expected_initial_sql = (

@@ -161,4 +160,3 @@ "SELECT `table1`.`foo`, `table2`.`bar` \n"

q = q.subquery()
q = sqlalchemy.select("*").select_from(q)
q = sqlalchemy.select(["*"]).select_from(q)

@@ -168,3 +166,3 @@ expected_outer_sql = (

"FROM (SELECT `table1`.`foo` AS `foo`, `table2`.`bar` AS `bar` \n"
"FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`) AS `anon_1`"
"FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`)"
)

@@ -175,3 +173,3 @@ found_outer_sql = q.compile(faux_conn).string

@sqlalchemy_2_0_or_higher
@sqlalchemy_1_4_or_higher
def test_no_implicit_join_asterix_for_inner_unnest(faux_conn, metadata):

@@ -182,3 +180,3 @@ # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/368

"SELECT `table1`.`foo`, `table2`.`bar` \n"
"FROM unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`, `table2`"
"FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`"
)

@@ -194,3 +192,3 @@ found_initial_sql = q.compile(faux_conn).string

"FROM (SELECT `table1`.`foo` AS `foo`, `table2`.`bar` AS `bar` \n"
"FROM unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`, `table2`) AS `anon_1`"
"FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`) AS `anon_1`"
)

@@ -201,6 +199,6 @@ found_outer_sql = q.compile(faux_conn).string

@sqlalchemy_before_2_0
def test_no_implicit_join_for_inner_unnest_before_2_0(faux_conn, metadata):
@sqlalchemy_before_1_4
def test_no_implicit_join_for_inner_unnest_before_1_4(faux_conn, metadata):
# See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/368
q = prepare_implicit_join_base_query(faux_conn, metadata, True, False)
q = prepare_implicit_join_base_query(faux_conn, metadata, True, True)
expected_initial_sql = (

@@ -213,9 +211,8 @@ "SELECT `table1`.`foo`, `table2`.`bar` \n"

q = q.subquery()
q = sqlalchemy.select(q.c.foo).select_from(q)
q = sqlalchemy.select([q.c.foo]).select_from(q)
expected_outer_sql = (
"SELECT `anon_1`.`foo` \n"
"SELECT `foo` \n"
"FROM (SELECT `table1`.`foo` AS `foo`, `table2`.`bar` AS `bar` \n"
"FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`) AS `anon_1`"
"FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`)"
)

@@ -226,3 +223,3 @@ found_outer_sql = q.compile(faux_conn).string

@sqlalchemy_2_0_or_higher
@sqlalchemy_1_4_or_higher
def test_no_implicit_join_for_inner_unnest(faux_conn, metadata):

@@ -233,3 +230,3 @@ # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/368

"SELECT `table1`.`foo`, `table2`.`bar` \n"
"FROM unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`, `table2`"
"FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`"
)

@@ -245,3 +242,3 @@ found_initial_sql = q.compile(faux_conn).string

"FROM (SELECT `table1`.`foo` AS `foo`, `table2`.`bar` AS `bar` \n"
"FROM unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`, `table2`) AS `anon_1`"
"FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`) AS `anon_1`"
)

@@ -252,2 +249,3 @@ found_outer_sql = q.compile(faux_conn).string

@sqlalchemy_1_4_or_higher
def test_no_implicit_join_asterix_for_inner_unnest_no_table2_column(

@@ -277,2 +275,3 @@ faux_conn, metadata

@sqlalchemy_1_4_or_higher
def test_no_implicit_join_for_inner_unnest_no_table2_column(faux_conn, metadata):

@@ -279,0 +278,0 @@ # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/368

@@ -30,3 +30,3 @@ # Copyright (c) 2021 The sqlalchemy-bigquery Authors

from .conftest import setup_table
from .conftest import setup_table, sqlalchemy_1_3_or_higher

@@ -56,4 +56,4 @@

table = some_table(faux_conn)
s1 = select(table).where(table.c.id == 2).distinct()
s2 = select(table).where(table.c.id == 3).distinct()
s1 = select([table]).where(table.c.id == 2).distinct()
s2 = select([table]).where(table.c.id == 3).distinct()

@@ -67,3 +67,3 @@ u1 = union(s1, s2).limit(2)

s1 = (
select(table)
select([table])
.where(table.c.id == 2)

@@ -76,3 +76,3 @@ .limit(1)

s2 = (
select(table)
select([table])
.where(table.c.id == 3)

@@ -100,3 +100,3 @@ .limit(1)

faux_conn.scalar(
select(t.c.data).where(t.c.data == literal_column("'some % value'"))
select([t.c.data]).where(t.c.data == literal_column("'some % value'"))
),

@@ -108,3 +108,5 @@ "some % value",

faux_conn.scalar(
select(t.c.data).where(t.c.data == literal_column("'some %% other value'"))
select([t.c.data]).where(
t.c.data == literal_column("'some %% other value'")
)
),

@@ -115,2 +117,3 @@ "some %% other value",

@sqlalchemy_1_3_or_higher
def test_empty_set_against_integer(faux_conn):

@@ -120,3 +123,3 @@ table = some_table(faux_conn)

stmt = (
select(table.c.id)
select([table.c.id])
.where(table.c.x.in_(sqlalchemy.bindparam("q", expanding=True)))

@@ -129,13 +132,18 @@ .order_by(table.c.id)

@sqlalchemy_1_3_or_higher
def test_null_in_empty_set_is_false(faux_conn):
stmt = select(
sqlalchemy.case(
(
sqlalchemy.null().in_(
sqlalchemy.bindparam("foo", value=(), expanding=True)
),
sqlalchemy.true(),
),
else_=sqlalchemy.false(),
)
[
sqlalchemy.case(
[
(
sqlalchemy.null().in_(
sqlalchemy.bindparam("foo", value=(), expanding=True)
),
sqlalchemy.true(),
)
],
else_=sqlalchemy.false(),
)
]
)

@@ -174,3 +182,3 @@ in_(faux_conn.execute(stmt).fetchone()[0], (False, 0))

expr = getattr(table.c.data, meth)(arg)
rows = {value for value, in faux_conn.execute(select(table.c.id).where(expr))}
rows = {value for value, in faux_conn.execute(select([table.c.id]).where(expr))}
eq_(rows, expected)

@@ -180,3 +188,3 @@

expr = sqlalchemy.not_(expr)
rows = {value for value, in faux_conn.execute(select(table.c.id).where(expr))}
rows = {value for value, in faux_conn.execute(select([table.c.id]).where(expr))}
eq_(rows, all - expected)

@@ -202,3 +210,5 @@

expr = (table.c.x + table.c.y).label("lx")
stmt = select(sqlalchemy.func.count(table.c.id), expr).group_by(expr).order_by(expr)
stmt = (
select([sqlalchemy.func.count(table.c.id), expr]).group_by(expr).order_by(expr)
)
assert_result(faux_conn, stmt, [(1, 3), (1, 5), (1, 7)])

@@ -205,0 +215,0 @@

@@ -79,3 +79,3 @@ # Copyright (c) 2021 The PyBigQuery Authors

try:
conn.execute(select(lake_table))
conn.execute(select([lake_table]))
except Exception:

@@ -93,4 +93,4 @@ pass # sqlite had no special functions :)

conn.execute(
select(lake_table.c.name).where(
func.ST_Contains(lake_table.c.geog, "POINT(4 1)")
select(
[lake_table.c.name], func.ST_Contains(lake_table.c.geog, "POINT(4 1)")
)

@@ -109,3 +109,3 @@ )

conn.execute(
select(lake_table.c.name, lake_table.c.geog.ST_Area().label("area"))
select([lake_table.c.name, lake_table.c.geog.ST_Area().label("area")])
)

@@ -177,3 +177,3 @@ except Exception:

try:
faux_conn.execute(select(func.ST_GeogFromText("point(0 0)")))
faux_conn.execute(select([func.ST_GeogFromText("point(0 0)")]))
except Exception:

@@ -180,0 +180,0 @@ pass # sqlite had no special functions :)

@@ -23,2 +23,3 @@ # Copyright (c) 2021 The sqlalchemy-bigquery Authors

import packaging.version
import pytest

@@ -30,3 +31,9 @@ import sqlalchemy

from .conftest import setup_table
from .conftest import (
setup_table,
sqlalchemy_version,
sqlalchemy_1_3_or_higher,
sqlalchemy_1_4_or_higher,
sqlalchemy_before_1_4,
)

@@ -36,3 +43,3 @@

table = setup_table(faux_conn, "t", sqlalchemy.Column("id", sqlalchemy.Integer))
result = faux_conn.execute(sqlalchemy.select(table.c.id))
result = faux_conn.execute(sqlalchemy.select([table.c.id]))
assert result.keys() == ["id"] # Look! Just the column name!

@@ -153,3 +160,3 @@

assert list(map(list, faux_conn.execute(sqlalchemy.select(table)))) == [[val]] * 2
assert list(map(list, faux_conn.execute(sqlalchemy.select([table])))) == [[val]] * 2
assert faux_conn.test_data["execute"][-1][0] == "SELECT `t`.`foo` \nFROM `t`"

@@ -161,7 +168,3 @@

list,
faux_conn.execute(
sqlalchemy.select(table.c.foo).set_label_style(
sqlalchemy.LABEL_STYLE_TABLENAME_PLUS_COL
)
),
faux_conn.execute(sqlalchemy.select([table.c.foo], use_labels=True)),
)

@@ -188,3 +191,3 @@ )

row = list(faux_conn.execute(sqlalchemy.select(table)))[0]
row = list(faux_conn.execute(sqlalchemy.select([table])))[0]
# We expect the raw string, because sqlite3, unlike BigQuery

@@ -197,3 +200,3 @@ # doesn't deserialize for us.

# Make sure label names are legal identifiers
faux_conn.execute(sqlalchemy.select(sqlalchemy.literal(1).label("2foo")))
faux_conn.execute(sqlalchemy.select([sqlalchemy.literal(1).label("2foo")]))
assert (

@@ -212,3 +215,3 @@ faux_conn.test_data["execute"][-1][0] == "SELECT %(param_1:INT64)s AS `_2foo`"

)
faux_conn.execute(sqlalchemy.select(table))
faux_conn.execute(sqlalchemy.select([table]))
assert faux_conn.test_data["execute"][-1][0] == ("SELECT `t`.`foo` \nFROM `t`")

@@ -225,8 +228,22 @@

)
faux_conn.execute(sqlalchemy.select(table))
faux_conn.execute(sqlalchemy.select([table]))
assert faux_conn.test_data["execute"][-1][0] == ("SELECT `t`.foo \nFROM `t`")
@sqlalchemy_before_1_4
def test_select_in_lit_13(faux_conn):
[[isin]] = faux_conn.execute(
sqlalchemy.select([sqlalchemy.literal(1).in_([1, 2, 3])])
)
assert isin
assert faux_conn.test_data["execute"][-1] == (
"SELECT %(param_1:INT64)s IN "
"(%(param_2:INT64)s, %(param_3:INT64)s, %(param_4:INT64)s) AS `anon_1`",
{"param_1": 1, "param_2": 1, "param_3": 2, "param_4": 3},
)
@sqlalchemy_1_4_or_higher
def test_select_in_lit(faux_conn, last_query):
faux_conn.execute(sqlalchemy.select(sqlalchemy.literal(1).in_([1, 2, 3])))
faux_conn.execute(sqlalchemy.select([sqlalchemy.literal(1).in_([1, 2, 3])]))
last_query(

@@ -239,43 +256,79 @@ "SELECT %(param_1:INT64)s IN UNNEST(%(param_2:INT64)s) AS `anon_1`",

def test_select_in_param(faux_conn, last_query):
faux_conn.execute(
[[isin]] = faux_conn.execute(
sqlalchemy.select(
sqlalchemy.literal(1).in_(sqlalchemy.bindparam("q", expanding=True))
[sqlalchemy.literal(1).in_(sqlalchemy.bindparam("q", expanding=True))]
),
dict(q=[1, 2, 3]),
)
if sqlalchemy_version >= packaging.version.parse("1.4"):
last_query(
"SELECT %(param_1:INT64)s IN UNNEST(%(q:INT64)s) AS `anon_1`",
{"param_1": 1, "q": [1, 2, 3]},
)
else:
assert isin
last_query(
"SELECT %(param_1:INT64)s IN UNNEST("
"[ %(q_1:INT64)s, %(q_2:INT64)s, %(q_3:INT64)s ]"
") AS `anon_1`",
{"param_1": 1, "q_1": 1, "q_2": 2, "q_3": 3},
)
last_query(
"SELECT %(param_1:INT64)s IN UNNEST(%(q:INT64)s) AS `anon_1`",
{"param_1": 1, "q": [1, 2, 3]},
)
def test_select_in_param1(faux_conn, last_query):
faux_conn.execute(
[[isin]] = faux_conn.execute(
sqlalchemy.select(
sqlalchemy.literal(1).in_(sqlalchemy.bindparam("q", expanding=True))
[sqlalchemy.literal(1).in_(sqlalchemy.bindparam("q", expanding=True))]
),
dict(q=[1]),
)
last_query(
"SELECT %(param_1:INT64)s IN UNNEST(%(q:INT64)s) AS `anon_1`",
{"param_1": 1, "q": [1]},
)
if sqlalchemy_version >= packaging.version.parse("1.4"):
last_query(
"SELECT %(param_1:INT64)s IN UNNEST(%(q:INT64)s) AS `anon_1`",
{"param_1": 1, "q": [1]},
)
else:
assert isin
last_query(
"SELECT %(param_1:INT64)s IN UNNEST(" "[ %(q_1:INT64)s ]" ") AS `anon_1`",
{"param_1": 1, "q_1": 1},
)
@sqlalchemy_1_3_or_higher
def test_select_in_param_empty(faux_conn, last_query):
faux_conn.execute(
[[isin]] = faux_conn.execute(
sqlalchemy.select(
sqlalchemy.literal(1).in_(sqlalchemy.bindparam("q", expanding=True))
[sqlalchemy.literal(1).in_(sqlalchemy.bindparam("q", expanding=True))]
),
dict(q=[]),
)
last_query(
"SELECT %(param_1:INT64)s IN UNNEST(%(q:INT64)s) AS `anon_1`",
{"param_1": 1, "q": []},
if sqlalchemy_version >= packaging.version.parse("1.4"):
last_query(
"SELECT %(param_1:INT64)s IN UNNEST(%(q:INT64)s) AS `anon_1`",
{"param_1": 1, "q": []},
)
else:
assert not isin
last_query(
"SELECT %(param_1:INT64)s IN UNNEST([ ]) AS `anon_1`", {"param_1": 1}
)
@sqlalchemy_before_1_4
def test_select_notin_lit13(faux_conn):
[[isnotin]] = faux_conn.execute(
sqlalchemy.select([sqlalchemy.literal(0).notin_([1, 2, 3])])
)
assert isnotin
assert faux_conn.test_data["execute"][-1] == (
"SELECT (%(param_1:INT64)s NOT IN "
"(%(param_2:INT64)s, %(param_3:INT64)s, %(param_4:INT64)s)) AS `anon_1`",
{"param_1": 0, "param_2": 1, "param_3": 2, "param_4": 3},
)
@sqlalchemy_1_4_or_higher
def test_select_notin_lit(faux_conn, last_query):
faux_conn.execute(sqlalchemy.select(sqlalchemy.literal(0).notin_([1, 2, 3])))
faux_conn.execute(sqlalchemy.select([sqlalchemy.literal(0).notin_([1, 2, 3])]))
last_query(

@@ -288,25 +341,41 @@ "SELECT (%(param_1:INT64)s NOT IN UNNEST(%(param_2:INT64)s)) AS `anon_1`",

def test_select_notin_param(faux_conn, last_query):
faux_conn.execute(
[[isnotin]] = faux_conn.execute(
sqlalchemy.select(
sqlalchemy.literal(1).notin_(sqlalchemy.bindparam("q", expanding=True))
[sqlalchemy.literal(1).notin_(sqlalchemy.bindparam("q", expanding=True))]
),
dict(q=[1, 2, 3]),
)
last_query(
"SELECT (%(param_1:INT64)s NOT IN UNNEST(%(q:INT64)s)) AS `anon_1`",
{"param_1": 1, "q": [1, 2, 3]},
)
if sqlalchemy_version >= packaging.version.parse("1.4"):
last_query(
"SELECT (%(param_1:INT64)s NOT IN UNNEST(%(q:INT64)s)) AS `anon_1`",
{"param_1": 1, "q": [1, 2, 3]},
)
else:
assert not isnotin
last_query(
"SELECT (%(param_1:INT64)s NOT IN UNNEST("
"[ %(q_1:INT64)s, %(q_2:INT64)s, %(q_3:INT64)s ]"
")) AS `anon_1`",
{"param_1": 1, "q_1": 1, "q_2": 2, "q_3": 3},
)
@sqlalchemy_1_3_or_higher
def test_select_notin_param_empty(faux_conn, last_query):
faux_conn.execute(
[[isnotin]] = faux_conn.execute(
sqlalchemy.select(
sqlalchemy.literal(1).notin_(sqlalchemy.bindparam("q", expanding=True))
[sqlalchemy.literal(1).notin_(sqlalchemy.bindparam("q", expanding=True))]
),
dict(q=[]),
)
last_query(
"SELECT (%(param_1:INT64)s NOT IN UNNEST(%(q:INT64)s)) AS `anon_1`",
{"param_1": 1, "q": []},
)
if sqlalchemy_version >= packaging.version.parse("1.4"):
last_query(
"SELECT (%(param_1:INT64)s NOT IN UNNEST(%(q:INT64)s)) AS `anon_1`",
{"param_1": 1, "q": []},
)
else:
assert isnotin
last_query(
"SELECT (%(param_1:INT64)s NOT IN UNNEST([ ])) AS `anon_1`", {"param_1": 1}
)

@@ -321,3 +390,3 @@

)
q = sqlalchemy.select(table.c.val).where(table.c.val.in_([2]))
q = sqlalchemy.select([table.c.val]).where(table.c.val.in_([2]))

@@ -333,2 +402,3 @@ def nstr(q):

@sqlalchemy_1_4_or_higher
@pytest.mark.parametrize("alias", [True, False])

@@ -351,2 +421,3 @@ def test_unnest(faux_conn, alias):

@sqlalchemy_1_4_or_higher
@pytest.mark.parametrize("alias", [True, False])

@@ -370,2 +441,3 @@ def test_table_valued_alias_w_multiple_references_to_the_same_table(faux_conn, alias):

@sqlalchemy_1_4_or_higher
@pytest.mark.parametrize("alias", [True, False])

@@ -390,6 +462,10 @@ def test_unnest_w_no_table_references(faux_conn, alias):

)
got = str(sqlalchemy.select(t.c.a[0]).compile(faux_conn.engine))
got = str(sqlalchemy.select([t.c.a[0]]).compile(faux_conn.engine))
assert got == "SELECT `t`.`a`[OFFSET(%(a_1:INT64)s)] AS `anon_1` \nFROM `t`"
@pytest.mark.skipif(
packaging.version.parse(sqlalchemy.__version__) < packaging.version.parse("1.4"),
reason="regexp_match support requires version 1.4 or higher",
)
def test_visit_regexp_match_op_binary(faux_conn):

@@ -411,2 +487,6 @@ table = setup_table(

@pytest.mark.skipif(
packaging.version.parse(sqlalchemy.__version__) < packaging.version.parse("1.4"),
reason="regexp_match support requires version 1.4 or higher",
)
def test_visit_not_regexp_match_op_binary(faux_conn):

@@ -413,0 +493,0 @@ table = setup_table(

@@ -13,2 +13,3 @@ # Copyright 2021 The sqlalchemy-bigquery Authors

from google.cloud.bigquery.table import TableListItem
import packaging.version
import pytest

@@ -101,3 +102,3 @@ import sqlalchemy

mock_bigquery_client.list_tables.side_effect = tables_lists
table_names = sqlalchemy.inspect(engine_under_test).get_table_names()
table_names = engine_under_test.table_names()
mock_bigquery_client.list_datasets.assert_called_once()

@@ -231,3 +232,8 @@ assert mock_bigquery_client.list_tables.call_count == len(datasets_list)

assert isinstance(f.type, sqlalchemy.String)
assert isinstance(sqlalchemy.select(f).subquery().c.unnest.type, sqlalchemy.String)
if packaging.version.parse(sqlalchemy.__version__) >= packaging.version.parse(
"1.4"
):
assert isinstance(
sqlalchemy.select([f]).subquery().c.unnest.type, sqlalchemy.String
)

@@ -234,0 +240,0 @@