Skip to content

Commit

Permalink
Merge branch 'ploomber:master' into 658-snippet-support-sqlcmd
Browse files Browse the repository at this point in the history
  • Loading branch information
AnirudhVIyer authored Jul 14, 2023
2 parents ed2ff7c + 6d60df9 commit 466178d
Show file tree
Hide file tree
Showing 6 changed files with 209 additions and 57 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
* [Fix] Added support for `profile` and `explore` commands with saved snippets (#658)
* [Doc] Re-organized sections. Adds section showing how to share notebooks via Ploomber Cloud
* [Fix] Adding `--with` back because of issues with sqlglot query parser (#684)
* [Fix] Improving << parsing logic (#610)


## 0.7.9 (2023-06-19)
Expand Down
39 changes: 11 additions & 28 deletions src/sql/parse.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@ def parse(cell, config):
We're grandfathering the
connection string and `<<` operator in.
"""

result = {
"connection": "",
"sql": "",
Expand All @@ -53,37 +52,21 @@ def parse(cell, config):
if len(pieces) == 1:
return result
cell = pieces[1]
# handle no space situation around =
if pieces[0].endswith("=<<"):
result["result_var"] = pieces[0][:-3]
result["return_result_var"] = True
cell = pieces[1]

pieces = cell.split(None, 2)
# handle flexible spacing around <<
if len(pieces) > 1 and pieces[1] == "<<":
if pieces[0].endswith("="):
result["result_var"] = pieces[0][:-1]
result["return_result_var"] = True
else:
result["result_var"] = pieces[0]
pointer = cell.find("<<")
if pointer != -1:
left = cell[:pointer].replace(" ", "").replace("\n", "")
right = cell[pointer + 2 :].strip(" ")

if len(pieces) == 2:
return result
cell = pieces[2]
# handle flexible spacing around =<<
elif len(pieces) > 1 and (
(pieces[1] == "=<<") or (pieces[1] == "=" and pieces[2].startswith("<<"))
):
result["result_var"] = pieces[0]
result["return_result_var"] = True
if pieces[1] == "=<<":
cell = pieces[2]
if "=" in left:
result["result_var"] = left[:-1]
result["return_result_var"] = True
else:
pieces = cell.split(None, 3)
cell = pieces[3]
result["result_var"] = left

result["sql"] = cell
result["sql"] = right
else:
result["sql"] = cell
return result


Expand Down
49 changes: 49 additions & 0 deletions src/tests/integration/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from sqlalchemy import MetaData, Table, create_engine
from sql import _testing
import uuid
import duckdb


def pytest_addoption(parser):
Expand Down Expand Up @@ -222,6 +223,54 @@ def ip_with_SQLite(ip_empty, setup_SQLite):
ip_empty.run_cell("%sql -x " + alias)


@pytest.fixture(scope="session")
def setup_duckDB_native(test_table_name_dict, skip_on_live_mode):
engine = duckdb.connect(database=":memory:", read_only=False)
return engine


def load_generic_testing_data_duckdb_native(ip, test_table_name_dict):
ip.run_cell("import pandas as pd")
ip.run_cell(
f"""{test_table_name_dict['taxi']} = pd.DataFrame({{'taxi_driver_name':
["Eric Ken", "John Smith", "Kevin Kelly"] * 15}} )"""
)
ip.run_cell(
f"""{test_table_name_dict['plot_something']} = pd.DataFrame(
{{"x": range(0, 5), "y": range(5, 10)}} )"""
)
ip.run_cell(
f"""{test_table_name_dict['numbers']} = pd.DataFrame(
{{"numbers_elements": [1, 2, 3] * 20}} )"""
)
return ip


def teardown_generic_testing_data_duckdb_native(ip, test_table_name_dict):
ip.run_cell(f"del {test_table_name_dict['taxi']}")
ip.run_cell(f"del {test_table_name_dict['plot_something']}")
ip.run_cell(f"del {test_table_name_dict['numbers']}")
return ip


@pytest.fixture
def ip_with_duckDB_native(ip_empty, setup_duckDB_native, test_table_name_dict):
configKey = "duckDB"
alias = _testing.DatabaseConfigHelper.get_database_config(configKey)["alias"]

engine = setup_duckDB_native
ip_empty.push({"conn": engine})

ip_empty.run_cell("%sql conn" + " --alias " + alias)
ip_empty = load_generic_testing_data_duckdb_native(ip_empty, test_table_name_dict)
yield ip_empty

ip_empty = teardown_generic_testing_data_duckdb_native(
ip_empty, test_table_name_dict
)
ip_empty.run_cell("%sql --close " + alias)


@pytest.fixture(scope="session")
def setup_duckDB(test_table_name_dict, skip_on_live_mode):
engine = create_engine(_testing.DatabaseConfigHelper.get_database_url("duckDB"))
Expand Down
56 changes: 38 additions & 18 deletions src/tests/integration/test_duckDB.py
Original file line number Diff line number Diff line change
@@ -1,29 +1,49 @@
import logging
import pytest


def test_auto_commit_mode_on(ip_with_duckDB, caplog):
with caplog.at_level(logging.DEBUG):
ip_with_duckDB.run_cell("%config SqlMagic.autocommit=True")
ip_with_duckDB.run_cell("%sql CREATE TABLE weather4 (city VARCHAR,);")
assert caplog.record_tuples == [
@pytest.mark.parametrize(
"ip, exp",
[
(
"ip_with_duckDB",
"'duckdb.DuckDBPyConnection' object has no attribute "
"'set_isolation_level'\n",
),
(
"root",
logging.DEBUG,
"The database driver doesn't support such AUTOCOMMIT "
"execution option\nPerhaps you can try running a manual "
"COMMIT command\nMessage from the database driver\n\t"
"Exception: 'duckdb.DuckDBPyConnection' object has no attribute"
" 'set_isolation_level'\n",
)
]
"ip_with_duckDB_native",
"'CustomSession' object has no attribute '_has_events'",
),
],
)
def test_auto_commit_mode_on(ip, exp, caplog, request):
ip = request.getfixturevalue(ip)
with caplog.at_level(logging.DEBUG):
ip.run_cell("%config SqlMagic.autocommit=True")
ip.run_cell("%sql CREATE TABLE weather4 (city VARCHAR,);")
assert caplog.record_tuples[0][0] == "root"
assert caplog.record_tuples[0][1] == logging.DEBUG
assert (
"The database driver doesn't support such AUTOCOMMIT"
in caplog.record_tuples[0][2]
)
assert exp in caplog.record_tuples[0][2]


def test_auto_commit_mode_off(ip_with_duckDB, caplog):
@pytest.mark.parametrize(
"ip",
[
("ip_with_duckDB"),
("ip_with_duckDB_native"),
],
)
def test_auto_commit_mode_off(ip, caplog, request):
ip = request.getfixturevalue(ip)
with caplog.at_level(logging.DEBUG):
ip_with_duckDB.run_cell("%config SqlMagic.autocommit=False")
ip_with_duckDB.run_cell("%sql CREATE TABLE weather (city VARCHAR,);")
ip.run_cell("%config SqlMagic.autocommit=False")
ip.run_cell("%sql CREATE TABLE weather (city VARCHAR,);")
# Check there is no message gets printed
assert caplog.record_tuples == []
# Check the tables is created
tables_out = ip_with_duckDB.run_cell("%sql SHOW TABLES;").result
tables_out = ip.run_cell("%sql SHOW TABLES;").result
assert any("weather" == table[0] for table in tables_out)
Loading

0 comments on commit 466178d

Please sign in to comment.