Skip to content

BUG: data written with to_sql legacy mode (sqlite/mysql) not persistent GH6846 #6875

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Apr 13, 2014
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions pandas/io/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -785,6 +785,7 @@ def insert(self):
data.insert(0, self.maybe_asscalar(r[0]))
cur.execute(ins, tuple(data))
cur.close()
self.pd_sql.con.commit()

def _create_table_statement(self):
"Return a CREATE TABLE statement to suit the contents of a DataFrame."
Expand Down
64 changes: 54 additions & 10 deletions pandas/io/tests/test_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,7 @@ def test_to_sql_append(self):
def test_to_sql_series(self):
s = Series(np.arange(5, dtype='int64'), name='series')
sql.to_sql(s, "test_series", self.conn, flavor='sqlite', index=False)
s2 = sql.read_sql("SELECT * FROM test_series", self.conn,
s2 = sql.read_sql("SELECT * FROM test_series", self.conn,
flavor='sqlite')
tm.assert_frame_equal(s.to_frame(), s2)

Expand Down Expand Up @@ -473,7 +473,7 @@ def connect(self):

def test_to_sql_index_label(self):
temp_frame = DataFrame({'col1': range(4)})

# no index name, defaults to 'pandas_index'
sql.to_sql(temp_frame, 'test_index_label', self.conn)
frame = sql.read_table('test_index_label', self.conn)
Expand Down Expand Up @@ -507,8 +507,52 @@ class TestSQLLegacyApi(_TestSQLApi):
"""
flavor = 'sqlite'

def connect(self):
return sqlite3.connect(':memory:')
def connect(self, database=":memory:"):
return sqlite3.connect(database)

def _load_test2_data(self):
columns = ['index', 'A', 'B']
data = [(
'2000-01-03 00:00:00', 2 ** 31 - 1, -1.987670),
('2000-01-04 00:00:00', -29, -0.0412318367011),
('2000-01-05 00:00:00', 20000, 0.731167677815),
('2000-01-06 00:00:00', -290867, 1.56762092543)]

self.test_frame2 = DataFrame(data, columns=columns)

def test_sql_open_close(self):
"""
Test if the IO in the database still work if the connection
is closed between the writing and reading (as in many real
situations).
"""
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can you put this here as 'normal' comments (with some #)?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

To be clear: it is super that you put docstrings, certainly. But for some reason, when you run the test suite it displays the docstring if there is any, and you have a better overview when it uses just the name of the test functions, so that's is the reason.


self._load_test2_data()

with tm.ensure_clean() as name:

conn = self.connect(name)

sql.to_sql(
self.test_frame2,
"test_frame2_legacy",
conn,
flavor="sqlite",
index=False,
)

conn.close()
conn = self.connect(name)

result = sql.read_sql(
"SELECT * FROM test_frame2_legacy;",
conn,
flavor="sqlite",
)

conn.close()

tm.assert_frame_equal(self.test_frame2, result)


class _TestSQLAlchemy(PandasSQLTest):
Expand Down Expand Up @@ -601,7 +645,7 @@ def test_default_type_conversion(self):
self.assertTrue(issubclass(df.IntColWithNull.dtype.type, np.floating),
"IntColWithNull loaded with incorrect type")
# Bool column with NA values becomes object
self.assertTrue(issubclass(df.BoolColWithNull.dtype.type, np.object),
self.assertTrue(issubclass(df.BoolColWithNull.dtype.type, np.object),
"BoolColWithNull loaded with incorrect type")

def test_default_date_load(self):
Expand Down Expand Up @@ -696,14 +740,14 @@ def test_default_type_conversion(self):
self.assertTrue(issubclass(df.IntColWithNull.dtype.type, np.floating),
"IntColWithNull loaded with incorrect type")
# Non-native Bool column with NA values stays as float
self.assertTrue(issubclass(df.BoolColWithNull.dtype.type, np.floating),
self.assertTrue(issubclass(df.BoolColWithNull.dtype.type, np.floating),
"BoolColWithNull loaded with incorrect type")

def test_default_date_load(self):
df = sql.read_table("types_test_data", self.conn)

# IMPORTANT - sqlite has no native date type, so shouldn't parse, but
self.assertFalse(issubclass(df.DateCol.dtype.type, np.datetime64),
self.assertFalse(issubclass(df.DateCol.dtype.type, np.datetime64),
"DateCol loaded with incorrect type")


Expand Down Expand Up @@ -865,21 +909,21 @@ def test_default_type_conversion(self):
"FloatCol loaded with incorrect type")
self.assertTrue(issubclass(df.IntCol.dtype.type, np.integer),
"IntCol loaded with incorrect type")
# MySQL has no real BOOL type (it's an alias for TINYINT)
# MySQL has no real BOOL type (it's an alias for TINYINT)
self.assertTrue(issubclass(df.BoolCol.dtype.type, np.integer),
"BoolCol loaded with incorrect type")

# Int column with NA values stays as float
self.assertTrue(issubclass(df.IntColWithNull.dtype.type, np.floating),
"IntColWithNull loaded with incorrect type")
# Bool column with NA = int column with NA values => becomes float
self.assertTrue(issubclass(df.BoolColWithNull.dtype.type, np.floating),
self.assertTrue(issubclass(df.BoolColWithNull.dtype.type, np.floating),
"BoolColWithNull loaded with incorrect type")


class TestPostgreSQLAlchemy(_TestSQLAlchemy):
flavor = 'postgresql'

def connect(self):
return sqlalchemy.create_engine(
'postgresql+{driver}://postgres@localhost/pandas_nosetest'.format(driver=self.driver))
Expand Down