summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMicah Anderson <micah@riseup.net>2013-11-17 17:25:01 -0500
committerMicah Anderson <micah@riseup.net>2013-11-17 17:25:01 -0500
commit79d16d72cd530acbee682ebee44d5b1d2010c661 (patch)
treed04b84f51e5023cc7382ec91674e2967ffb533c7
initial import of debian package to build with autobuilderupstream
-rw-r--r--INSTALL.win32107
-rw-r--r--LICENSE8
-rw-r--r--MANIFEST.in1
-rw-r--r--README38
-rw-r--r--_sqlite.c2014
-rw-r--r--debian/README.Debian7
-rw-r--r--debian/changelog83
-rw-r--r--debian/compat1
-rw-r--r--debian/control42
-rw-r--r--debian/copyright64
-rw-r--r--debian/patches/dont-search-for-libraries-in-usr-lib.patch16
-rw-r--r--debian/patches/fix-space-before-keywords.patch16
-rw-r--r--debian/patches/fix-spelling.patch25
-rw-r--r--debian/patches/series3
-rw-r--r--debian/python-pysqlite1.1-dbg.lintian-overrides1
-rw-r--r--debian/python-pysqlite1.1.lintian-overrides1
-rwxr-xr-xdebian/rules19
-rw-r--r--debian/source/format1
-rw-r--r--debian/watch2
-rw-r--r--doc/rest/manual.txt388
-rw-r--r--encode.c245
-rw-r--r--examples/converters.py39
-rw-r--r--examples/dbapi_transactions.py107
-rw-r--r--examples/manual_transactions.py102
-rw-r--r--misc/multithreading_crash.py112
-rw-r--r--port/strsep.c73
-rw-r--r--port/strsep.h1
-rw-r--r--setup.py105
-rw-r--r--sqlite/__init__.py72
-rw-r--r--sqlite/main.py587
-rw-r--r--test/all_tests.py23
-rw-r--r--test/api_tests.py524
-rw-r--r--test/logging_tests.py80
-rw-r--r--test/lowlevel_tests.py162
-rw-r--r--test/pgresultset_tests.py202
-rw-r--r--test/testsupport.py14
-rw-r--r--test/transaction_tests.py106
-rw-r--r--test/type_tests.py342
-rw-r--r--test/userfunction_tests.py259
39 files changed, 5992 insertions, 0 deletions
diff --git a/INSTALL.win32 b/INSTALL.win32
new file mode 100644
index 0000000..01f6e70
--- /dev/null
+++ b/INSTALL.win32
@@ -0,0 +1,107 @@
+===========================
+Building PySQLite on Windows
+============================
+
+On Windows, Python's distutils defaults to the Visual C++ compiler from
+Microsoft. If you want to use other compilers for compiling Python extensions
+on Windows, look into chapter 3.1 "Using non-Microsoft compilers on Windows" in
+the "Installing Python Modules" of your Python documentation. It's available
+online at http://www.python.org/doc/current/inst/non-ms-compilers.html
+
+The following are build instructions for the GNU C compiler, Borland C++ and
+for Microsoft's Visual C++ environment.
+
+
+========================
+Using the GNU C compiler
+========================
+
+As you have read the Python documentation for non-Microsoft compilers by now,
+you have mingw or Cygwin installed and created the required import library for
+the Python DLL. Fine, let's continue.
+
+From http://www.hwaci.com/sw/sqlite/download.html get the sqlite_source.zip and
+sqlitedll.zip files. Unpack them all in the same directory.
+
+Create an import library for the GNU linker:
+
+$ dlltool --def sqlite.def --dllname sqlite.dll --output-lib libsqlite.a
+
+Unpack the PySQLite sources and open setup.py in your editor. Search for
+"win32". Change the include_dirs and library_dirs variable to point the place
+where you've unpacked the SQLite files and where you created the import
+library.
+
+Build PySQLite:
+
+$ python setup.py build --compiler=mingw32
+
+
+==========================
+Using the Borland compiler
+==========================
+
+As you have read the Python documentation for non-Microsoft compilers by now,
+you have installed the Borland C++ compiler and created the required import
+library for the Python DLL. Fine, let's continue.
+
+From http://www.hwaci.com/sw/sqlite/download.html get the sqlite_source.zip and
+sqlitedll.zip files. Unpack them all in the same directory.
+
+Create an import library for the Borland linker:
+
+$ implib -a sqlite.lib sqlite.dll
+
+Unpack the PySQLite sources and open setup.py in your editor. Search for
+"win32". Change the include_dirs and library_dirs variable to point the place
+where you've unpacked the SQLite files and where you created the import
+library.
+
+Build PySQLite:
+
+$ python setup.py build --compiler=bcpp
+
+
+==========================
+Using Microsoft Visual C++
+==========================
+
+From http://www.hwaci.com/sw/sqlite/download.html get the sqlite_source.zip and
+sqlitedll.zip files. Unpack them all in the same directory.
+
+Create an import library for the Microsoft linker:
+
+$ lib /def:sqlite.def
+
+Unpack the PySQLite sources and open setup.py in your editor. Search for
+"win32". Change the include_dirs and library_dirs variable to point the place
+where you've unpacked the SQLite files and where you created the import
+library.
+
+Build PySQLite:
+
+$ python setup.py build
+
+
+==================================
+What to do after building pySQLite
+==================================
+
+- Make sure the sqlite.dll can be found. Either copy it into your system
+ directory, somewhere else in your PATH or in the same directory as the .pyd
+ file from pySQLite.
+
+- Run the included test suite with these or similar commands:
+ $ copy ..\sqlite\sqlite.dll build\lib.win32-2.2
+ $ copy test\*.py build\lib.win32-2.2
+ $ cd build\lib.win32-2.2
+ $ python all_tests.py
+
+ All the tests should pass. If something goes wrong, report it to the pySQLite
+ developers using the Sourceforge bug tracker.
+
+- Install pySQLite:
+ $ python setup.py install
+
+ Again make sure the sqlite.dll can be found
+
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..a081ed9
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,8 @@
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted, provided that
+the above copyright notice appear in all copies and that both that copyright
+notice and this permission notice appear in supporting documentation.
+
+This program is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE.
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..8f49b92
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1 @@
+include port/strsep.h
diff --git a/README b/README
new file mode 100644
index 0000000..1e63c15
--- /dev/null
+++ b/README
@@ -0,0 +1,38 @@
+ _ _ _
+ _ __ _ _ ___ __ _| (_) |_ ___
+ | '_ \| | | / __|/ _` | | | __/ _ \
+ | |_) | |_| \__ \ (_| | | | || __/
+ | .__/ \__, |___/\__, |_|_|\__\___|
+ |_| |___/ |_|
+
+ A DB API v2.0 compatible interface to SQLite 3.0
+ Embedded Relational Database.
+ Copyright (c) 2001-2003
+ Michael Owens <mike@mikesclutter.com>
+ Gerhard Hring <gh@ghaering.de>
+
+Python versions supported: Python 2.1 or later
+SQLite version required: SQLite 3.0 or later
+
+Overview:
+
+This is an extension module for the SQLite embedded relational database. It
+tries to conform to the Python DB-API Spec v2 as far as possible. One problem
+is that SQLite returns everything as text. This is a result of SQLite's
+internal representation of data, however it still may be possible to return
+data in the type specified by the table definitions. I am still working on
+that, and will require some study of the SQLite source.
+
+Installation:
+
+Installation should be relatively simple. Following that prescribed by Python,
+it can be done in two steps:
+
+ python setup.py build
+ python setup.py install
+
+For building PySQLite on Windows, check out the file INSTALL.win32.
+
+Other:
+
+Check out the doc/rest and examples folders.
diff --git a/_sqlite.c b/_sqlite.c
new file mode 100644
index 0000000..9f98101
--- /dev/null
+++ b/_sqlite.c
@@ -0,0 +1,2014 @@
+/* _ _ _
+** _ __ _ _ ___ __ _| (_) |_ ___
+** | '_ \| | | / __|/ _` | | | __/ _ \
+** | |_) | |_| \__ \ (_| | | | || __/
+** | .__/ \__, |___/\__, |_|_|\__\___|
+** |_| |___/ |_|
+**
+** A DB API v2.0 compatible interface to SQLite
+** Embedded Relational Database.
+** Copyright (c) 2001-2003
+** Michael Owens <mike@mikesclutter.com>
+** Gerhard Hring <gh@ghaering.de>
+**
+** All Rights Reserved
+**
+** Permission to use, copy, modify, and distribute this software and its
+** documentation for any purpose and without fee is hereby granted, provided
+** that the above copyright notice appear in all copies and that both that
+** copyright notice and this permission notice appear in supporting
+** documentation,
+**
+** This program is distributed in the hope that it will be useful, but WITHOUT
+** ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+** FOR A PARTICULAR PURPOSE.
+*/
+
+#include "Python.h"
+#include "structmember.h"
+
+#include "sqlite3.h"
+
+#include "port/strsep.h"
+
+/* Compatibility macros
+ *
+ * From Python 2.2 to 2.3, the way to export the module init function
+ * has changed. These macros keep the code compatible to both ways.
+ */
+#if PY_VERSION_HEX >= 0x02030000
+# define PySQLite_DECLARE_MODINIT_FUNC(name) PyMODINIT_FUNC name(void)
+# define PySQLite_MODINIT_FUNC(name) PyMODINIT_FUNC name(void)
+#else
+# define PySQLite_DECLARE_MODINIT_FUNC(name) void name(void)
+# define PySQLite_MODINIT_FUNC(name) DL_EXPORT(void) name(void)
+#endif
+
+/*
+ * These are needed because there is no "official" way to specify
+ * WHERE to save the thread state. (At least not until Python 2.3)
+ */
+#ifdef WITH_THREAD
+# define MY_BEGIN_ALLOW_THREADS(st) \
+ { st = PyEval_SaveThread(); }
+# define MY_END_ALLOW_THREADS(st) \
+ { PyEval_RestoreThread(st); st = NULL; }
+#else
+# define MY_BEGIN_ALLOW_THREADS(st)
+# define MY_END_ALLOW_THREADS(st) { st = NULL; }
+#endif
+
+/* Workaround for mingw32 + Python 2.4: if we don't do this, mystrdup is used
+ * from msvcrt60.dll and the buffer is then free()-ed from msvcr71.dll, which
+ * results in a crash.
+ */
+#if defined(__MINGW32__)
+#define mystrdup _strdup
+#else
+#define mystrdup strdup
+#endif
+
+/*------------------------------------------------------------------------------
+** Object Declarations
+**------------------------------------------------------------------------------
+*/
+
+/** A connection object */
+typedef struct
+{
+ PyObject_HEAD
+ const char* database_name;
+ const char* sql;
+ sqlite3* p_db;
+ PyObject* converters;
+ PyObject* expected_types;
+ PyObject* command_logfile;
+ PyObject* busy_callback;
+ PyObject* busy_callback_param;
+ PyThreadState *tstate;
+} pysqlc;
+
+/** A result set object. */
+typedef struct
+{
+ PyObject_HEAD
+ pysqlc* con;
+ PyObject* p_row_list;
+ PyObject* p_col_def_list;
+ int row_count;
+} pysqlrs;
+
+/** Exception objects */
+
+static PyObject* _sqlite_Warning;
+static PyObject* _sqlite_Error;
+static PyObject* _sqlite_DatabaseError;
+static PyObject* _sqlite_InterfaceError;
+static PyObject* _sqlite_DataError;
+static PyObject* _sqlite_OperationalError;
+static PyObject* _sqlite_IntegrityError;
+static PyObject* _sqlite_InternalError;
+static PyObject* _sqlite_ProgrammingError;
+static PyObject* _sqlite_NotSupportedError;
+
+static int debug_callbacks = 1;
+
+#define PRINT_OR_CLEAR_ERROR if (debug_callbacks) PyErr_Print(); else PyErr_Clear();
+
+/* A tuple describing the minimum required SQLite version */
+static PyObject* required_sqlite_version;
+
+/*** Type codes */
+
+static PyObject* tc_INTEGER;
+static PyObject* tc_FLOAT;
+static PyObject* tc_TIMESTAMP;
+static PyObject* tc_DATE;
+static PyObject* tc_TIME;
+static PyObject* tc_INTERVAL;
+static PyObject* tc_STRING;
+static PyObject* tc_UNICODESTRING;
+static PyObject* tc_BINARY;
+
+/*------------------------------------------------------------------------------
+** Function Prototypes
+**------------------------------------------------------------------------------
+*/
+
+static int process_record(sqlite3_stmt* statement, void* p_data, int num_fields, char** p_fields, char** p_col_names);
+
+PySQLite_DECLARE_MODINIT_FUNC(init_sqlite);
+static int _seterror(sqlite3* db);
+static void _con_dealloc(pysqlc *self);
+static PyObject* sqlite_version_info(PyObject* self, PyObject* args);
+static PyObject* pysqlite_connect(PyObject *self, PyObject *args, PyObject *kwargs);
+static PyObject* sqlite_library_version(PyObject *self, PyObject *args);
+static PyObject* sqlite_enable_callback_debugging(PyObject *self, PyObject *args);
+static PyObject* pysqlite_encode(PyObject *self, PyObject *args);
+static PyObject* pysqlite_decode(PyObject *self, PyObject *args);
+
+/* Defined in encode.c */
+int sqlite_encode_binary(const unsigned char *in, int n, unsigned char *out);
+int sqlite_decode_binary(const unsigned char *in, unsigned char *out);
+
+/** Connection Object Methods */
+static PyObject* _con_get_attr(pysqlc *self, char *attr);
+static PyObject* _con_close(pysqlc *self, PyObject *args);
+static PyObject* _con_execute(pysqlc *self, PyObject *args);
+static PyObject* _con_register_converter(pysqlc* self, PyObject *args, PyObject* kwargs);
+static PyObject* _con_set_expected_types(pysqlc* self, PyObject *args, PyObject* kwargs);
+static PyObject* _con_create_function(pysqlc *self, PyObject *args, PyObject *kwargs);
+static PyObject* _con_create_aggregate(pysqlc *self, PyObject *args, PyObject *kwargs);
+static PyObject* _con_sqlite_exec(pysqlc *self, PyObject *args, PyObject *kwargs);
+static PyObject* _con_sqlite_last_insert_rowid(pysqlc *self, PyObject *args);
+static PyObject* _con_sqlite_changes(pysqlc *self, PyObject *args);
+static PyObject* _con_sqlite_busy_handler(pysqlc* self, PyObject *args, PyObject* kwargs);
+static PyObject* _con_sqlite_busy_timeout(pysqlc* self, PyObject *args, PyObject* kwargs);
+static PyObject* _con_set_command_logfile(pysqlc* self, PyObject *args, PyObject* kwargs);
+
+/** Result set Object Methods */
+static void _rs_dealloc(pysqlrs* self);
+static PyObject* _rs_get_attr(pysqlrs* self, char *attr);
+
+#ifdef _MSC_VER
+#define staticforward extern
+#endif
+
+staticforward PyMethodDef _con_methods[];
+staticforward struct memberlist _con_memberlist[];
+
+PyTypeObject pysqlc_Type =
+{
+ PyObject_HEAD_INIT(NULL)
+ 0,
+ "Connection",
+ sizeof(pysqlc),
+ 0,
+ (destructor) _con_dealloc,
+ 0,
+ (getattrfunc) _con_get_attr,
+ (setattrfunc) NULL,
+};
+
+PyTypeObject pysqlrs_Type =
+{
+ PyObject_HEAD_INIT(NULL)
+ 0,
+ "ResultSet",
+ sizeof(pysqlrs),
+ 0,
+ (destructor) _rs_dealloc,
+ 0,
+ (getattrfunc) _rs_get_attr,
+ (setattrfunc) NULL,
+};
+
+static void
+_con_dealloc(pysqlc* self)
+{
+ if(self)
+ {
+ if(self->p_db != 0)
+ {
+ /* Close the database */
+ sqlite3_close(self->p_db);
+ self->p_db = 0;
+ }
+
+ if(self->sql != NULL)
+ {
+ /* Free last SQL statement string */
+ free((void*)self->sql);
+ self->sql = NULL;
+ }
+
+ if(self->database_name != NULL)
+ {
+ /* Free database name string */
+ free((void*)self->database_name);
+ self->database_name = NULL;
+ }
+
+ Py_DECREF(self->converters);
+ Py_DECREF(self->expected_types);
+ Py_DECREF(self->command_logfile);
+ Py_DECREF(self->busy_callback);
+ Py_DECREF(self->busy_callback_param);
+
+ PyObject_Del(self);
+ }
+}
+
+static char pysqlite_connect_doc[] =
+"connect(db, mode=0777) -> Connection.\n\
+Opens a new database connection.";
+
+/* return a new instance of sqlite_connection */
+PyObject* pysqlite_connect(PyObject *self, PyObject *args, PyObject *kwargs)
+{
+ const char* db_name = 0;
+ int mode = 0777;
+ pysqlc* obj;
+ int rc;
+
+ static char *kwlist[] = { "filename", "mode", NULL };
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, "s|i:pysqlite_connect",
+ kwlist, &db_name, &mode))
+ {
+ return NULL;
+ }
+
+ if ((obj = PyObject_New(pysqlc, &pysqlc_Type)) == NULL)
+ {
+ return NULL;
+ }
+
+ /* Open the database */
+ rc = sqlite3_open(db_name, &obj->p_db);
+ if (rc != SQLITE_OK)
+ {
+ PyErr_SetString(_sqlite_DatabaseError, sqlite3_errmsg(obj->p_db));
+ return NULL;
+ }
+
+
+ /* Assign the database name */
+ if ((obj->database_name = mystrdup(db_name)) == NULL)
+ {
+ PyErr_SetString(PyExc_MemoryError, "Cannot allocate memory for database name.");
+ return NULL;
+ }
+
+ /* Init sql string to NULL */
+ obj->sql = NULL;
+
+ /* Set the thread state to NULL */
+ obj->tstate = NULL;
+
+ if ((obj->converters = PyDict_New()) == NULL)
+ {
+ PyErr_SetString(PyExc_MemoryError, "Cannot allocate memory for converters.");
+ return NULL;
+ }
+
+ Py_INCREF(Py_None);
+ obj->expected_types = Py_None;
+
+ Py_INCREF(Py_None);
+ obj->command_logfile = Py_None;
+
+ Py_INCREF(Py_None);
+ obj->busy_callback = Py_None;
+
+ Py_INCREF(Py_None);
+ obj->busy_callback_param = Py_None;
+
+ return (PyObject *) obj;
+}
+
+static PyObject* _con_get_attr(pysqlc *self, char *attr)
+{
+ PyObject *res;
+
+ res = Py_FindMethod(_con_methods, (PyObject *) self,attr);
+
+ if(NULL != res)
+ {
+ return res;
+ }
+ else
+ {
+ PyErr_Clear();
+ return PyMember_Get((char *) self, _con_memberlist, attr);
+ }
+}
+
+static char _con_close_doc [] =
+"close()\n\
+Close the database connection.";
+
+static PyObject* _con_close(pysqlc *self, PyObject *args)
+{
+ if (!PyArg_ParseTuple(args,""))
+ {
+ return NULL;
+ }
+
+ if(self->p_db != 0)
+ {
+ /* Close the database, ignore return code */
+ sqlite3_close(self->p_db);
+ self->p_db = 0;
+ }
+ else
+ {
+ PyErr_SetString(_sqlite_ProgrammingError, "Database is not open.");
+ return NULL;
+ }
+
+ Py_INCREF(Py_None);
+
+ return Py_None;
+}
+
+static void function_callback(sqlite3_context *context, int argc, sqlite3_value** params)
+{
+ int i;
+ PyObject* function_result;
+ PyObject* args;
+ PyObject* userdata;
+ PyObject* func;
+ PyObject* s;
+ char* cstr;
+ pysqlc* con;
+
+ userdata = (PyObject*)sqlite3_user_data(context);
+ func = PyTuple_GetItem(userdata, 0);
+ con = (pysqlc*)PyTuple_GetItem(userdata, 1);
+ MY_END_ALLOW_THREADS(con->tstate)
+
+ args = PyTuple_New(argc);
+ for (i = 0; i < argc; i++)
+ {
+ /* TODO: can params[i] ever be NULL? */
+ if (params[i] == NULL)
+ {
+ Py_INCREF(Py_None);
+ PyTuple_SetItem(args, i, Py_None);
+ }
+ else
+ {
+ cstr = (char*)sqlite3_value_text(params[i]);
+ if (cstr)
+ {
+ PyTuple_SetItem(args, i, PyString_FromString(cstr));
+ }
+ else
+ {
+ Py_INCREF(Py_None);
+ PyTuple_SetItem(args, i, Py_None);
+ }
+ }
+ }
+
+ function_result = PyObject_CallObject(func, args);
+ Py_DECREF(args);
+
+ if (PyErr_Occurred())
+ {
+ PRINT_OR_CLEAR_ERROR
+ sqlite3_result_error(context, NULL, -1);
+ MY_BEGIN_ALLOW_THREADS(con->tstate)
+ return;
+ }
+
+ if (function_result == Py_None)
+ {
+ sqlite3_result_null(context);
+ }
+ else
+ {
+ s = PyObject_Str(function_result);
+ sqlite3_result_text(context, PyString_AsString(s), -1, SQLITE_TRANSIENT);
+ Py_DECREF(s);
+ }
+
+ Py_DECREF(function_result);
+ MY_BEGIN_ALLOW_THREADS(con->tstate)
+}
+
+static void aggregate_step(sqlite3_context *context, int argc, sqlite3_value** params)
+{
+ int i;
+ PyObject* args;
+ PyObject* function_result;
+ PyObject* userdata;
+ PyObject* aggregate_class;
+ pysqlc* con;
+ PyObject** aggregate_instance;
+ PyObject* stepmethod;
+ char* strparam;
+
+ userdata = (PyObject*)sqlite3_user_data(context);
+ aggregate_class = PyTuple_GetItem(userdata, 0);
+
+ con = (pysqlc*)PyTuple_GetItem(userdata, 1);
+ MY_END_ALLOW_THREADS(con->tstate)
+
+ aggregate_instance = (PyObject**)sqlite3_aggregate_context(context, sizeof(PyObject*));
+
+ if (*aggregate_instance == 0) {
+ args = PyTuple_New(0);
+ *aggregate_instance = PyObject_CallObject(aggregate_class, args);
+ Py_DECREF(args);
+
+ if (PyErr_Occurred())
+ {
+ PRINT_OR_CLEAR_ERROR
+ MY_BEGIN_ALLOW_THREADS(con->tstate)
+ return;
+ }
+ }
+
+ stepmethod = PyObject_GetAttrString(*aggregate_instance, "step");
+ if (!stepmethod)
+ {
+ /* PRINT_OR_CLEAR_ERROR */
+ MY_BEGIN_ALLOW_THREADS(con->tstate)
+ return;
+ }
+
+ args = PyTuple_New(argc);
+ for (i = 0; i < argc; i++) {
+ strparam = (char*)sqlite3_value_text(params[i]);
+ if (!strparam)
+ {
+ Py_INCREF(Py_None);
+ PyTuple_SetItem(args, i, Py_None);
+ } else {
+ PyTuple_SetItem(args, i, PyString_FromString(strparam));
+ }
+ }
+
+ if (PyErr_Occurred())
+ {
+ PRINT_OR_CLEAR_ERROR
+ }
+
+ function_result = PyObject_CallObject(stepmethod, args);
+ Py_DECREF(args);
+ Py_DECREF(stepmethod);
+
+ if (function_result == NULL)
+ {
+ PRINT_OR_CLEAR_ERROR
+ /* Don't use sqlite_set_result_error here. Else an assertion in
+ * the SQLite code will trigger and create a core dump.
+ *
+ * This was true with SQLite 2.x. Not checked with 3.x, yet.
+ */
+ }
+ else
+ {
+ Py_DECREF(function_result);
+ }
+
+ MY_BEGIN_ALLOW_THREADS(con->tstate)
+}
+
+static void aggregate_finalize(sqlite3_context *context)
+{
+ PyObject* args;
+ PyObject* function_result;
+ PyObject* s;
+ PyObject** aggregate_instance;
+ PyObject* userdata;
+ pysqlc* con;
+ PyObject* aggregate_class;
+ PyObject* finalizemethod;
+
+ userdata = (PyObject*)sqlite3_user_data(context);
+ aggregate_class = PyTuple_GetItem(userdata, 0);
+ con = (pysqlc*)PyTuple_GetItem(userdata, 1);
+ MY_END_ALLOW_THREADS(con->tstate)
+
+ aggregate_instance = (PyObject**)sqlite3_aggregate_context(context, sizeof(PyObject*));
+
+ finalizemethod = PyObject_GetAttrString(*aggregate_instance, "finalize");
+
+ if (!finalizemethod)
+ {
+ PyErr_SetString(PyExc_ValueError, "finalize method missing");
+ goto error;
+ }
+
+ args = PyTuple_New(0);
+ function_result = PyObject_CallObject(finalizemethod, args);
+ Py_DECREF(args);
+ Py_DECREF(finalizemethod);
+
+ if (PyErr_Occurred())
+ {
+ PRINT_OR_CLEAR_ERROR
+ sqlite3_result_error(context, NULL, -1);
+ }
+ else if (function_result == Py_None)
+ {
+ Py_DECREF(function_result);
+ sqlite3_result_null(context);
+ }
+ else
+ {
+ s = PyObject_Str(function_result);
+ Py_DECREF(function_result);
+ sqlite3_result_text(context, PyString_AsString(s), -1, SQLITE_TRANSIENT);
+ Py_DECREF(s);
+ }
+
+error:
+ Py_XDECREF(*aggregate_instance);
+
+ MY_BEGIN_ALLOW_THREADS(con->tstate)
+}
+
+static char _con_sqlite_busy_handler_doc[] =
+"sqlite_busy_handler(func, data)\n\
+Register a busy handler.\n\
+\n\
+ The sqlite_busy_handler() procedure can be used to register a busy\n\
+ callback with an open SQLite database. The busy callback will be invoked\n\
+ whenever SQLite tries to access a database that is locked. The callback\n\
+ will typically do some other useful work, or perhaps sleep, in order to\n\
+ give the lock a chance to clear. If the callback returns non-zero, then\n\
+ SQLite tries again to access the database and the cycle repeats. If the\n\
+ callback returns zero, then SQLite aborts the current operation and returns\n\
+ SQLITE_BUSY, which PySQLite will make throw an OperationalError.\n\
+ \n\
+ The arguments to sqlite_busy_handler() are the callback function (func) and\n\
+ an additional argument (data) that will be passed to the busy callback\n\
+ function.\n\
+ \n\
+ When the busy callback is invoked, it is sent three arguments. The first\n\
+ argument will be the 'data' that was set as the third argument to\n\
+ sqlite_busy_handler. The second will be the name of the database table or\n\
+ index that SQLite was trying to access and the third one will be the number\n\
+ of times that the library has attempted to access the database table or\n\
+ index.";
+
+static PyObject* _con_sqlite_busy_handler(pysqlc* self, PyObject *args, PyObject* kwargs)
+{
+ static char *kwlist[] = {"func", "data", NULL};
+ PyObject* func;
+ PyObject* data = Py_None;
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O|O:sqlite_busy_handler",
+ kwlist, &func, &data))
+ {
+ return NULL;
+ }
+
+ Py_DECREF(self->busy_callback);
+ Py_INCREF(func);
+ self->busy_callback = func;
+
+ Py_DECREF(self->busy_callback_param);
+ Py_INCREF(data);
+ self->busy_callback_param = data;
+
+ Py_INCREF(Py_None);
+ return Py_None;
+}
+
+static char _con_sqlite_busy_timeout_doc[] =
+"Not supported any longer";
+
+static PyObject* _con_sqlite_busy_timeout(pysqlc* self, PyObject *args, PyObject* kwargs)
+{
+ PyErr_SetString(_sqlite_InterfaceError,
+ "This method is not supported any longer.\n\
+You will have to write a timeout handler yourself and register it\n\
+with sqlite_busy_handler.");
+ return NULL;
+}
+
+static char _con_create_function_doc[] =
+"create_function(name, n_args, func)\n\
+Create a new SQL function.\n\
+\n\
+ A new function under the name 'name', with 'n_args' arguments is created.\n\
+ The callback 'func' will be called for this function.";
+
+static PyObject* _con_create_function(pysqlc* self, PyObject *args, PyObject* kwargs)
+{
+ int n_args;
+ char* name;
+ PyObject* func;
+ PyObject* userdata;
+ static char *kwlist[] = {"name", "n_args", "func", NULL};
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, "siO:create_function",
+ kwlist, &name, &n_args,
+ &func))
+ {
+ return NULL;
+ }
+
+ if (!(userdata = PyTuple_New(2))) return NULL;
+ Py_INCREF(func);
+ PyTuple_SetItem(userdata, 0, func);
+ Py_INCREF(self);
+ PyTuple_SetItem(userdata, 1, (PyObject*)self);
+
+ if (!PyCallable_Check(func))
+ {
+ PyErr_SetString(PyExc_ValueError, "func must be a callable!");
+ return NULL;
+ }
+
+ Py_INCREF(func);
+ if (0 != sqlite3_create_function(self->p_db, name, n_args, SQLITE_UTF8, (void*)userdata, &function_callback, 0, 0))
+ {
+ PyErr_SetString(_sqlite_ProgrammingError, "Cannot create function.");
+ return NULL;
+ }
+ else
+ {
+ Py_INCREF(Py_None);
+ return Py_None;
+ }
+}
+
+static char _con_create_aggregate_doc[] =
+"create_aggregate(name, n_args, step_func, finalize_func)\n\
+Create a new SQL function.\n\
+\n\
+ A new aggregate function under the name 'name', with 'n_args' arguments to\n\
+ the 'step_func' function will be created. 'finalize_func' will be called\n\
+ without arguments for finishing the aggregate.";
+
+static PyObject* _con_create_aggregate(pysqlc* self, PyObject *args, PyObject* kwargs)
+{
+ PyObject* aggregate_class;
+
+ int n_args;
+ char* name;
+ static char *kwlist[] = { "name", "n_args", "aggregate_class", NULL };
+ PyObject* userdata;
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, "siO:create_aggregate",
+ kwlist, &name, &n_args, &aggregate_class))
+ {
+ return NULL;
+ }
+
+ if (!(userdata = PyTuple_New(2))) return NULL;
+ Py_INCREF(aggregate_class);
+ PyTuple_SetItem(userdata, 0, aggregate_class);
+ Py_INCREF(self);
+ PyTuple_SetItem(userdata, 1, (PyObject*)self);
+
+ if (0 != sqlite3_create_function(self->p_db, name, n_args, SQLITE_UTF8, (void*)userdata, 0, &aggregate_step, &aggregate_finalize))
+ {
+ PyErr_SetString(_sqlite_ProgrammingError, "Cannot create aggregate.");
+ return NULL;
+ }
+ else
+ {
+ Py_INCREF(Py_None);
+ return Py_None;
+ }
+}
+
+static char _con_set_command_logfile_doc[] =
+"set_command_logfile(logfile)\n\
+Registers a writable file-like object as logfile where all SQL commands\n\
+that get executed are written to.";
+
+static PyObject* _con_set_command_logfile(pysqlc* self, PyObject *args, PyObject* kwargs)
+{
+ PyObject* logfile;
+ PyObject* o;
+
+ static char *kwlist[] = { "logfile", NULL };
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O:set_command_logfile",
+ kwlist, &logfile))
+ {
+ return NULL;
+ }
+
+ if (logfile == Py_None)
+ {
+ Py_INCREF(Py_None);
+ return Py_None;
+ }
+
+ o = PyObject_GetAttrString(logfile, "write");
+ if (!o)
+ {
+ PyErr_SetString(PyExc_ValueError, "logfile must have a 'write' attribute!");
+ return NULL;
+ }
+
+ if (!PyCallable_Check(o))
+ {
+ PyErr_SetString(PyExc_ValueError, "logfile must have a callable 'write' attribute!");
+ Py_DECREF(o);
+ return NULL;
+ }
+
+ Py_DECREF(o);
+ Py_INCREF(logfile);
+ self->command_logfile = logfile;
+
+ Py_INCREF(Py_None);
+ return Py_None;
+}
+
+int sqlite_exec_callback(void* pArg, int argc, char **argv, char **columnNames)
+{
+ PyObject* parg;
+ PyObject* callback;
+ PyObject* arg1;
+ pysqlc* con;
+ PyObject* values;
+ PyObject* colnames;
+ PyObject* calling_args;
+ PyObject* function_result;
+ int i;
+
+ parg = (PyObject*)pArg;
+
+ callback = PyTuple_GetItem(parg, 0);
+ arg1 = PyTuple_GetItem(parg, 1);
+ con = (pysqlc*)PyTuple_GetItem(parg, 2);
+
+ MY_END_ALLOW_THREADS(con->tstate)
+
+ colnames = PyTuple_New(argc);
+ for (i = 0; i < argc; i++)
+ {
+ PyTuple_SetItem(colnames, i, PyString_FromString(columnNames[i]));
+ }
+
+ values = PyTuple_New(argc);
+ for (i = 0; i < argc; i++)
+ {
+ if (argv[i] == NULL)
+ {
+ Py_INCREF(Py_None);
+ PyTuple_SetItem(values, i, Py_None);
+ }
+ else
+ {
+ PyTuple_SetItem(values, i, PyString_FromString(argv[i]));
+ }
+ }
+
+ calling_args = PyTuple_New(3);
+ Py_INCREF(arg1);
+ PyTuple_SetItem(calling_args, 0, arg1);
+ PyTuple_SetItem(calling_args, 1, values);
+ PyTuple_SetItem(calling_args, 2, colnames);
+
+ function_result = PyObject_CallObject(callback, calling_args);
+ if (PyErr_Occurred())
+ {
+ PRINT_OR_CLEAR_ERROR
+ MY_BEGIN_ALLOW_THREADS(con->tstate)
+ return 1;
+ }
+
+ Py_DECREF(function_result);
+ Py_DECREF(calling_args);
+
+ MY_BEGIN_ALLOW_THREADS(con->tstate)
+ return 0;
+}
+
+static char _con_sqlite_exec_doc[] =
+"sqlite_exec(sql, func, arg, use_types=0)\n\
+Execute SQL.\n\
+\n\
+ Executes the SQL string 'sql' and uses the callback function 'func' for\n\
+ each returned row. The argument 'arg' will be passed to the callback\n\
+ function.\n\
+ \n\
+ The signature of the callback function is (arg, values, colnames{, types}).\n\
+ types is omitted unless use_types is true. If you use 'use_types', you\n\
+ MUST have issued 'pragma show_datatypes=ON' before.";
+
+static PyObject* _con_sqlite_exec(pysqlc* self, PyObject *args, PyObject* kwargs)
+{
+ static char *kwlist[] = {"sql", "func", "arg", "use_types", NULL};
+ char* sql;
+ PyObject* callback;
+ PyObject* arg1;
+ int use_types = 0;
+
+ PyObject* cb_args;
+
+ /* TODO add errmsg handling */
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, "sOO|i:sqlite_exec",
+ kwlist, &sql, &callback, &arg1, &use_types))
+ {
+ return NULL;
+ }
+
+ cb_args = PyTuple_New(3);
+ Py_INCREF(callback);
+ Py_INCREF(arg1);
+ Py_INCREF(self);
+ PyTuple_SetItem(cb_args, 0, callback);
+ PyTuple_SetItem(cb_args, 1, arg1);
+ PyTuple_SetItem(cb_args, 2, (PyObject*)self);
+
+ MY_BEGIN_ALLOW_THREADS(self->tstate)
+ /* TODO: error condition is ignored here? */
+ sqlite3_exec(self->p_db, sql, &sqlite_exec_callback, cb_args, NULL);
+ MY_END_ALLOW_THREADS(self->tstate)
+
+ Py_DECREF(cb_args);
+
+ Py_INCREF(Py_None);
+ return Py_None;
+}
+
+static PyObject* _con_sqlite_last_insert_rowid(pysqlc *self, PyObject *args)
+{
+ PyObject* value;
+
+ if (!PyArg_ParseTuple(args,""))
+ {
+ return NULL;
+ }
+
+ value = PyInt_FromLong((long)sqlite3_last_insert_rowid(self->p_db));
+
+ return value;
+}
+
+static PyObject* _con_sqlite_changes(pysqlc *self, PyObject *args)
+{
+ PyObject* value;
+
+ if (!PyArg_ParseTuple(args,""))
+ {
+ return NULL;
+ }
+
+ value = PyInt_FromLong((long)sqlite3_changes(self->p_db));
+
+ return value;
+}
+
+static PyObject * sqlite_library_version(PyObject *self, PyObject *args)
+{
+ if (!PyArg_ParseTuple(args, ""))
+ {
+ return NULL;
+ }
+
+ return Py_BuildValue("s", sqlite3_libversion());
+}
+
+static PyObject* sqlite_enable_callback_debugging(PyObject *self, PyObject *args)
+{
+ if (!PyArg_ParseTuple(args, "i", &debug_callbacks))
+ {
+ return NULL;
+ }
+
+ Py_INCREF(Py_None);
+ return Py_None;
+}
+
+static char pysqlite_encode_doc[] =
+"encode(s) -> encoded binary string.\n\
+Encode binary string 's' for storage in SQLite.";
+
+static PyObject* pysqlite_encode(PyObject *self, PyObject *args)
+{
+ char *in, *out;
+ int n;
+ PyObject *res;
+
+ if (!PyArg_ParseTuple(args, "s#", &in, &n))
+ {
+ return NULL;
+ }
+
+ /* See comments in encode.c for details on maximum size of encoded data. */
+ out = malloc(2 + (257 * (sqlite_uint64)n) / 254);
+ if (out == NULL)
+ {
+ return PyErr_NoMemory();
+ }
+ sqlite_encode_binary((unsigned char*)in, n, (unsigned char*)out);
+ res = Py_BuildValue("s", out);
+ free(out);
+ return res;
+}
+
+static char pysqlite_decode_doc[] =
+"decode(s) -> decoded binary string.\n\
+Decode encoded binary string retrieved from SQLite.";
+
+static PyObject* pysqlite_decode(PyObject *self, PyObject *args)
+{
+ char *in, *out;
+ int n;
+ PyObject *res;
+
+ if (!PyArg_ParseTuple(args, "s", &in))
+ {
+ return NULL;
+ }
+
+ /* Decoded string is always shorter than encoded string. */
+ out = malloc(strlen(in));
+ if (out == NULL)
+ {
+ return PyErr_NoMemory();
+ }
+ n = sqlite_decode_binary((unsigned char*)in, (unsigned char*)out);
+ res = Py_BuildValue("s#", out, n);
+ free(out);
+ return res;
+}
+
+/**
+ * Checks the SQLite error code and sets the appropriate DB-API exception.
+ * Returns the error code (0 means no error occured).
+ */
+static int _seterror(sqlite3* db)
+{
+ int errorcode;
+
+ errorcode = sqlite3_errcode(db);
+
+ switch (errorcode)
+ {
+ case SQLITE_OK:
+ PyErr_Clear();
+ break;
+ case SQLITE_ERROR:
+ PyErr_SetString(_sqlite_DatabaseError, sqlite3_errmsg(db));
+ break;
+ case SQLITE_INTERNAL:
+ PyErr_SetString(_sqlite_InternalError, sqlite3_errmsg(db));
+ break;
+ case SQLITE_PERM:
+ PyErr_SetString(_sqlite_OperationalError, sqlite3_errmsg(db));
+ break;
+ case SQLITE_ABORT:
+ PyErr_SetString(_sqlite_OperationalError, sqlite3_errmsg(db));
+ break;
+ case SQLITE_BUSY:
+ PyErr_SetString(_sqlite_OperationalError, sqlite3_errmsg(db));
+ break;
+ case SQLITE_LOCKED:
+ PyErr_SetString(_sqlite_OperationalError, sqlite3_errmsg(db));
+ break;
+ case SQLITE_NOMEM:
+ (void)PyErr_NoMemory();
+ break;
+ case SQLITE_READONLY:
+ PyErr_SetString(_sqlite_DatabaseError, sqlite3_errmsg(db));
+ break;
+ case SQLITE_INTERRUPT:
+ PyErr_SetString(_sqlite_OperationalError, sqlite3_errmsg(db));
+ break;
+ case SQLITE_IOERR:
+ PyErr_SetString(_sqlite_OperationalError, sqlite3_errmsg(db));
+ break;
+ case SQLITE_CORRUPT:
+ PyErr_SetString(_sqlite_DatabaseError, sqlite3_errmsg(db));
+ break;
+ case SQLITE_NOTFOUND:
+ PyErr_SetString(_sqlite_InternalError, sqlite3_errmsg(db));
+ break;
+ case SQLITE_FULL:
+ PyErr_SetString(_sqlite_DatabaseError, sqlite3_errmsg(db));
+ break;
+ case SQLITE_CANTOPEN:
+ PyErr_SetString(_sqlite_DatabaseError, sqlite3_errmsg(db));
+ break;
+ case SQLITE_PROTOCOL:
+ PyErr_SetString(_sqlite_OperationalError, sqlite3_errmsg(db));
+ break;
+ case SQLITE_EMPTY:
+ PyErr_SetString(_sqlite_InternalError, sqlite3_errmsg(db));
+ break;
+ case SQLITE_SCHEMA:
+ PyErr_SetString(_sqlite_DatabaseError, sqlite3_errmsg(db));
+ break;
+ case SQLITE_TOOBIG:
+ PyErr_SetString(_sqlite_DataError, sqlite3_errmsg(db));
+ break;
+ case SQLITE_CONSTRAINT:
+ PyErr_SetString(_sqlite_IntegrityError, sqlite3_errmsg(db));
+ break;
+ case SQLITE_MISMATCH:
+ PyErr_SetString(_sqlite_IntegrityError, sqlite3_errmsg(db));
+ break;
+ case SQLITE_MISUSE:
+ PyErr_SetString(_sqlite_ProgrammingError, sqlite3_errmsg(db));
+ break;
+ default:
+ PyErr_SetString(_sqlite_DatabaseError, sqlite3_errmsg(db));
+ }
+
+ return errorcode;
+}
+
+static int my_sqlite3_exec(
+ pysqlc* con, /* the PySQLite connection object */
+ const char *sql, /* SQL to be executed */
+ pysqlrs* p_rset /* 1st argument to callback function */
+)
+{
+ sqlite3* db;
+ int rc;
+ sqlite3_stmt* statement;
+ const char* tail;
+ int abort;
+ int num_fields;
+ char** p_fields;
+ char** p_col_names;
+ int i;
+ char* data;
+ int busy_counter;
+ PyObject* cbargs;
+ PyObject* cb_result;
+ PyObject* remaining_string;
+ PyObject* remaining_string_stripped;
+ PyObject* func_args;
+ PyObject* stripmethod;
+ int remaining_size;
+ char* coltype;
+ PyObject* p_col_def;
+
+ tail = sql;
+ while (1)
+ {
+ db = con->p_db;
+ MY_BEGIN_ALLOW_THREADS(con->tstate)
+ rc = sqlite3_prepare(db, tail, -1, &statement, &tail);
+ MY_END_ALLOW_THREADS(con->tstate)
+
+ if (rc != SQLITE_OK)
+ {
+ break;
+ }
+
+ /* first try at getting meta data, will be overwritten if we retrieve rows */
+ num_fields = sqlite3_column_count(statement);
+ p_fields = malloc(num_fields * sizeof(char*));
+ p_col_names = malloc(2 * num_fields * sizeof(char*));
+ for (i = 0; i < num_fields; i++)
+ {
+ p_col_names[i] = (char*)sqlite3_column_name(statement, i);
+ p_col_names[num_fields + i] = NULL;
+ }
+
+ busy_counter = 0;
+ while (1)
+ {
+ busy_counter++;
+ MY_BEGIN_ALLOW_THREADS(con->tstate)
+ rc = sqlite3_step(statement);
+ MY_END_ALLOW_THREADS(con->tstate)
+ if (rc != SQLITE_BUSY)
+ break;
+
+ if (con->busy_callback != Py_None)
+ {
+ cbargs = PyTuple_New(3);
+ Py_INCREF(con->busy_callback_param);
+ PyTuple_SetItem(cbargs, 0, con->busy_callback_param);
+ Py_INCREF(Py_None);
+ PyTuple_SetItem(cbargs, 1, Py_None);
+ PyTuple_SetItem(cbargs, 2, PyInt_FromLong((long)busy_counter));
+
+ cb_result = PyObject_CallObject(con->busy_callback, cbargs);
+ Py_DECREF(cbargs);
+
+ if (PyErr_Occurred())
+ {
+ PRINT_OR_CLEAR_ERROR
+ abort = 1;
+ }
+ else
+ {
+ Py_DECREF(cb_result);
+ abort = !PyObject_IsTrue(cb_result);
+ }
+
+ if (abort)
+ break;
+ }
+ else
+ {
+ break;
+ }
+ }
+
+ if (rc == SQLITE_ROW)
+ {
+ num_fields = sqlite3_data_count(statement);
+ free(p_fields);
+ free(p_col_names);
+ p_fields = malloc(num_fields * sizeof(char*));
+ p_col_names = malloc(2 * num_fields * sizeof(char*));
+ for (i = 0; i < num_fields; i++)
+ {
+ p_col_names[i] = (char*)sqlite3_column_name(statement, i);
+ coltype = (char*)sqlite3_column_decltype(statement, i);
+ if (!coltype)
+ {
+ if (sqlite3_column_text(statement, i) != NULL)
+ {
+ switch (sqlite3_column_type(statement, i))
+ {
+ case SQLITE_INTEGER:
+ coltype = "INTEGER";
+ break;
+ case SQLITE_FLOAT:
+ coltype = "FLOAT";
+ break;
+ case SQLITE_BLOB:
+ coltype = "BINARY";
+ case SQLITE_TEXT:
+ default:
+ coltype = "TEXT";
+ }
+ }
+ else
+ {
+ coltype = NULL;
+ }
+ }
+ p_col_names[num_fields + i] = coltype;
+ }
+
+ for (;;)
+ {
+ for (i = 0; i < num_fields; i++)
+ {
+ data = (char*)sqlite3_column_text(statement, i);
+ p_fields[i] = data;
+ }
+
+ abort = process_record(statement, p_rset, num_fields, p_fields, p_col_names);
+ if (abort)
+ break;
+
+ MY_BEGIN_ALLOW_THREADS(con->tstate)
+ rc = sqlite3_step(statement);
+ MY_END_ALLOW_THREADS(con->tstate)
+ /* TODO: check rc */
+ if (rc == SQLITE_DONE)
+ break;
+ }
+ }
+ else if (rc != SQLITE_BUSY)
+ {
+ p_rset->p_col_def_list = PyTuple_New(num_fields);
+ for (i=0; i < num_fields; i++)
+ {
+ p_col_def = PyTuple_New(7);
+
+ /* 1. Column Name */
+ PyTuple_SetItem(p_col_def, 0, Py_BuildValue("s", p_col_names[i]));
+
+ /* 2. Type code */
+ Py_INCREF(Py_None);
+ PyTuple_SetItem(p_col_def, 1, Py_None);
+
+ /* 3. Display Size */
+ Py_INCREF(Py_None);
+ PyTuple_SetItem(p_col_def, 2, Py_None);
+
+ /* 4. Internal Size */
+ Py_INCREF(Py_None);
+ PyTuple_SetItem(p_col_def, 3, Py_None);
+
+ /* 5. Precision */
+ Py_INCREF(Py_None);
+ PyTuple_SetItem(p_col_def, 4, Py_None);
+
+ /* 6. Scale */
+ Py_INCREF(Py_None);
+ PyTuple_SetItem(p_col_def, 5, Py_None);
+
+ /* 7. NULL Okay */
+ Py_INCREF(Py_None);
+ PyTuple_SetItem(p_col_def, 6, Py_None);
+
+ PyTuple_SetItem(p_rset->p_col_def_list, i, p_col_def);
+ }
+ }
+
+ free(p_fields);
+ free(p_col_names);
+
+ rc = sqlite3_finalize(statement);
+
+ /* Because it worked for older PySQLite versions, we allow here to
+ * execute multiple statements per .execute() call.
+ * To do so, we strip the remaining string after `tail` and look if
+ * size != 0. If so, we loop again.
+ */
+ remaining_string = PyString_FromString(tail);
+ stripmethod = PyObject_GetAttrString(remaining_string, "strip");
+ func_args = PyTuple_New(0);
+ remaining_string_stripped = PyObject_CallObject(stripmethod, func_args);
+ Py_DECREF(func_args);
+ Py_DECREF(stripmethod);
+
+ remaining_size = PyString_Size(remaining_string_stripped);
+
+ Py_DECREF(remaining_string);
+ Py_DECREF(remaining_string_stripped);
+
+ if (remaining_size == 0) {
+ break;
+ }
+ }
+
+ return rc;
+}
+
+static PyObject* _con_execute(pysqlc* self, PyObject *args)
+{
+ int ret;
+ int record_number;
+ char* sql;
+ pysqlrs* p_rset;
+ char* buf;
+ char* iterator;
+ char* token;
+ PyObject* logfile_writemethod;
+ PyObject* logfile_writeargs;
+
+ record_number = 0;
+
+ if(!PyArg_ParseTuple(args,"s:execute", &sql))
+ {
+ return NULL;
+ }
+
+ if(self->p_db == 0)
+ {
+ /* There is no open database. */
+ PyErr_SetString(_sqlite_ProgrammingError, "There is no open database.");
+ return NULL;
+ }
+
+ if(self->sql != NULL)
+ {
+ /* Free last SQL statment string */
+ free((void*)self->sql);
+ self->sql = NULL;
+ }
+
+ /* Save SQL statement */
+ self->sql = mystrdup(sql);
+
+ /* Log SQL statement */
+ if (self->command_logfile != Py_None)
+ {
+ logfile_writemethod = PyObject_GetAttrString(self->command_logfile,
+ "write");
+ logfile_writeargs = PyTuple_New(1);
+ PyTuple_SetItem(logfile_writeargs, 0, PyString_FromString(sql));
+
+ PyObject_CallObject(logfile_writemethod, logfile_writeargs);
+
+ Py_DECREF(logfile_writeargs);
+
+ logfile_writeargs = PyTuple_New(1);
+ PyTuple_SetItem(logfile_writeargs, 0, PyString_FromString("\n"));
+ PyObject_CallObject(logfile_writemethod, logfile_writeargs);
+
+ Py_DECREF(logfile_writeargs);
+ Py_DECREF(logfile_writemethod);
+
+ if (PyErr_Occurred())
+ {
+ free((void*)(self->sql));
+ self->sql = NULL;
+ return NULL;
+ }
+ }
+
+ p_rset = PyObject_New(pysqlrs, &pysqlrs_Type);
+ if (p_rset == NULL)
+ {
+ return NULL;
+ }
+
+ Py_INCREF(self);
+ p_rset->con = self;
+ p_rset->p_row_list = PyList_New(0);
+ p_rset->p_col_def_list = NULL;
+ p_rset->row_count = 0;
+
+ if (strstr(sql, "-- types "))
+ {
+ Py_DECREF(self->expected_types);
+ self->expected_types = PyList_New(0);
+ if (PyErr_Occurred())
+ {
+ Py_INCREF(Py_None);
+ self->expected_types = Py_None;
+ return NULL;
+ }
+
+ if ((buf = mystrdup(sql)) == NULL)
+ {
+ PyErr_SetString(PyExc_MemoryError, "Cannot allocate buffer for copying SQL statement!");
+ return NULL;
+ }
+
+ iterator = buf + strlen("-- types ");
+
+ if (*iterator == 0)
+ {
+ free(buf);
+ PyErr_SetString(PyExc_ValueError, "Illegal pragma!");
+ return NULL;
+ }
+
+ while (iterator != NULL)
+ {
+ token = pysqlite_strsep(&iterator, ",");
+ while (*token == ' ')
+ {
+ token++;
+ }
+
+ PyList_Append(self->expected_types, Py_BuildValue("s", token));
+ }
+
+ free(buf);
+ p_rset->p_col_def_list = PyTuple_New(0);
+ return (PyObject*)p_rset;
+ }
+
+ /* Run a query: process_record is called back for each record returned. */
+ ret = my_sqlite3_exec( self,
+ sql,
+ p_rset);
+
+ if (0 && ret)
+ {
+ PyErr_SetString(_sqlite_DatabaseError, sqlite3_errmsg(self->p_db));
+ return NULL;
+ }
+
+ Py_DECREF(self->expected_types);
+ Py_INCREF(Py_None);
+ self->expected_types = Py_None;
+
+ /* Maybe there occurred an error in a user-defined function */
+ if (PyErr_Occurred())
+ {
+ free((void*)(self->sql));
+ self->sql = NULL;
+ Py_DECREF(p_rset);
+ return NULL;
+ }
+
+ if (p_rset->p_col_def_list == NULL)
+ {
+ p_rset->p_col_def_list = PyTuple_New(0);
+ }
+
+ if(_seterror(self->p_db))
+ {
+ free((void*)(self->sql));
+ self->sql = NULL;
+ Py_DECREF(p_rset);
+ return NULL;
+ }
+
+ return (PyObject*)p_rset;
+}
+
+int process_record(sqlite3_stmt* statement, void* p_data, int num_fields, char** p_fields, char** p_col_names)
+{
+ int i;
+ pysqlrs* p_rset;
+ PyObject* p_row;
+ PyObject* p_col_def;
+
+ int l, j;
+ char type_name[255];
+ PyObject* type_code;
+
+ PyObject* expected_types;
+ PyObject* expected_type_name = NULL;
+ PyObject* converters;
+ PyObject* converted;
+ PyObject* callable;
+ PyObject* callable_args;
+
+ p_rset = (pysqlrs*)p_data;
+
+ expected_types = p_rset->con->expected_types;
+ converters = p_rset->con->converters;
+
+ if(p_rset->row_count == 0)
+ {
+ if ((p_rset->p_col_def_list = PyTuple_New(num_fields)) == NULL)
+ {
+ PRINT_OR_CLEAR_ERROR
+ MY_BEGIN_ALLOW_THREADS(p_rset->con->tstate)
+ return 1;
+ }
+
+ for (i=0; i < num_fields; i++)
+ {
+ p_col_def = PyTuple_New(7);
+
+ /* 1. Column Name */
+ PyTuple_SetItem(p_col_def, 0, Py_BuildValue("s", p_col_names[i]));
+
+ /* 2. Type code */
+ /* Make a copy of column type. */
+ if (p_col_names[num_fields + i] == NULL)
+ {
+ strcpy(type_name, "");
+ }
+ else
+ {
+ strncpy(type_name, p_col_names[num_fields + i], sizeof(type_name) - 1);
+ }
+
+ /* Get its length. */
+ l = strlen(type_name);
+
+ /* Convert to uppercase. */
+ for (j=0; j < l; j++)
+ {
+ type_name[j] = toupper(type_name[j]);
+ }
+
+ /* Init/unset value */
+ type_code = NULL;
+
+ /* Try to determine column type. */
+ if (strcmp(type_name, "") == 0)
+ {
+ type_code = Py_None;
+ }
+ else if (strstr(type_name, "INTERVAL"))
+ {
+ type_code = tc_INTERVAL;
+ }
+ else if (strstr(type_name, "INT"))
+ {
+ type_code = tc_INTEGER;
+ }
+ else if (strstr(type_name, "UNICODE"))
+ {
+ type_code = tc_UNICODESTRING;
+ }
+ else if (strstr(type_name, "CHAR")
+ || strstr(type_name, "TEXT"))
+ {
+ type_code = tc_STRING;
+ }
+ else if (strstr(type_name, "BINARY")
+ || strstr(type_name, "BLOB"))
+ {
+ type_code = tc_BINARY;
+ }
+ else if (strstr(type_name, "FLOAT")
+ || strstr(type_name, "NUMERIC")
+ || strstr(type_name, "NUMBER")
+ || strstr(type_name, "DECIMAL")
+ || strstr(type_name, "REAL")
+ || strstr(type_name, "DOUBLE"))
+ {
+ type_code = tc_FLOAT;
+ }
+ else if (strstr(type_name, "TIMESTAMP"))
+ {
+ type_code = tc_TIMESTAMP;
+ }
+ else if (strstr(type_name, "DATE"))
+ {
+ type_code = tc_DATE;
+ }
+ else if (strstr(type_name, "TIME"))
+ {
+ type_code = tc_TIME;
+ }
+ else
+ {
+ type_code = Py_None;
+ }
+
+ /* Assign type. */
+ Py_INCREF(type_code);
+ PyTuple_SetItem(p_col_def, 1, type_code);
+
+ /* 3. Display Size */
+ Py_INCREF(Py_None);
+ PyTuple_SetItem(p_col_def, 2, Py_None);
+
+ /* 4. Internal Size */
+ Py_INCREF(Py_None);
+ PyTuple_SetItem(p_col_def, 3, Py_None);
+
+ /* 5. Precision */
+ Py_INCREF(Py_None);
+ PyTuple_SetItem(p_col_def, 4, Py_None);
+
+ /* 6. Scale */
+ Py_INCREF(Py_None);
+ PyTuple_SetItem(p_col_def, 5, Py_None);
+
+ /* 7. NULL Okay */
+ Py_INCREF(Py_None);
+ PyTuple_SetItem(p_col_def, 6, Py_None);
+
+ PyTuple_SetItem(p_rset->p_col_def_list, i, p_col_def);
+ }
+ }
+
+ if (p_fields != NULL)
+ {
+ /* Create a row */
+ p_row = PyTuple_New(num_fields);
+
+ p_rset->row_count++;
+
+ for (i=0; i < num_fields; i++)
+ {
+ /* Store the field value */
+ if(p_fields[i] != 0)
+ {
+ p_col_def = PyTuple_GetItem(p_rset->p_col_def_list, i);
+
+ type_code = PyTuple_GetItem(p_col_def, 1);
+
+ if (expected_types != Py_None)
+ {
+ if (i < PySequence_Length(expected_types))
+ {
+ expected_type_name = PySequence_GetItem(expected_types, i);
+ callable = PyDict_GetItem(converters, expected_type_name);
+ if (callable == NULL)
+ {
+ Py_INCREF(Py_None);
+ PyTuple_SetItem(p_row, i, Py_None);
+ }
+ else
+ {
+ callable_args = PyTuple_New(1);
+ PyTuple_SetItem(callable_args, 0, Py_BuildValue("s", p_fields[i]));
+
+ converted = PyObject_CallObject(callable, callable_args);
+ if (PyErr_Occurred())
+ {
+ PRINT_OR_CLEAR_ERROR
+ Py_INCREF(Py_None);
+ converted = Py_None;
+ }
+
+ PyTuple_SetItem(p_row, i, converted);
+
+ Py_DECREF(callable_args);
+ }
+ }
+ else
+ {
+ Py_INCREF(Py_None);
+ PyTuple_SetItem(p_row, i, Py_None);
+ }
+ }
+ else if (type_code == tc_INTEGER)
+ {
+ PyTuple_SetItem(p_row, i, Py_BuildValue("i", atol(p_fields[i])));
+ }
+ else if (type_code == tc_FLOAT)
+ {
+ PyTuple_SetItem(p_row, i, Py_BuildValue("f", atof(p_fields[i])));
+ }
+ else if (type_code == tc_DATE || type_code == tc_TIME
+ || type_code == tc_TIMESTAMP || type_code == tc_INTERVAL)
+ {
+ if (type_code == tc_DATE)
+ expected_type_name = PyString_FromString("date");
+ else if (type_code == tc_TIME)
+ expected_type_name = PyString_FromString("time");
+ else if (type_code == tc_TIMESTAMP)
+ expected_type_name = PyString_FromString("timestamp");
+ else if (type_code == tc_INTERVAL)
+ expected_type_name = PyString_FromString("interval");
+
+ callable = PyDict_GetItem(converters, expected_type_name);
+ if (callable == NULL)
+ {
+ PyTuple_SetItem(p_row, i, PyString_FromString(p_fields[i]));
+ }
+ else
+ {
+ callable_args = PyTuple_New(1);
+ PyTuple_SetItem(callable_args, 0, Py_BuildValue("s", p_fields[i]));
+
+ converted = PyObject_CallObject(callable, callable_args);
+ if (PyErr_Occurred())
+ {
+ PRINT_OR_CLEAR_ERROR
+ converted = PyString_FromString(p_fields[i]);
+ }
+
+ PyTuple_SetItem(p_row, i, converted);
+
+ Py_DECREF(callable_args);
+ }
+
+ Py_DECREF(expected_type_name);
+ }
+ else if ((type_code == tc_UNICODESTRING) || (type_code == tc_BINARY))
+ {
+ if (type_code == tc_UNICODESTRING)
+ expected_type_name = PyString_FromString("unicode");
+ else
+ expected_type_name = PyString_FromString("binary");
+
+ callable = PyDict_GetItem(converters, expected_type_name);
+
+ if (callable == NULL)
+ {
+ PyTuple_SetItem(p_row, i, PyString_FromString(p_fields[i]));
+ }
+ else
+ {
+ callable_args = PyTuple_New(1);
+ PyTuple_SetItem(callable_args, 0, Py_BuildValue("s", p_fields[i]));
+
+ converted = PyObject_CallObject(callable, callable_args);
+ if (PyErr_Occurred())
+ {
+ PRINT_OR_CLEAR_ERROR
+ converted = PyString_FromString(p_fields[i]);
+ }
+
+ PyTuple_SetItem(p_row, i, converted);
+
+ Py_DECREF(callable_args);
+ }
+
+ Py_DECREF(expected_type_name);
+ }
+ else if (type_code == tc_STRING)
+ {
+ PyTuple_SetItem(p_row, i, Py_BuildValue("s", p_fields[i]));
+ }
+ else
+ {
+ /* type_code == None */
+ switch (sqlite3_column_type(statement, i))
+ {
+ case SQLITE_INTEGER:
+ PyTuple_SetItem(p_row, i, PyInt_FromLong((long)sqlite3_column_int(statement, i)));
+
+ break;
+ case SQLITE_FLOAT:
+ PyTuple_SetItem(p_row, i, PyFloat_FromDouble(sqlite3_column_double(statement, i)));
+ break;
+ case SQLITE_NULL:
+ Py_INCREF(Py_None);
+ PyTuple_SetItem(p_row, i, Py_None);
+ break;
+ default:
+ PyTuple_SetItem(p_row, i, PyString_FromString((char*)sqlite3_column_text(statement, i)));
+ }
+ }
+ }
+ else
+ {
+ /* A NULL field */
+ Py_INCREF(Py_None);
+ PyTuple_SetItem(p_row, i, Py_None);
+ }
+ }
+
+ PyList_Append(p_rset->p_row_list, p_row);
+ Py_DECREF(p_row);
+ }
+
+ return 0;
+}
+
+static PyObject* _con_register_converter(pysqlc* self, PyObject *args, PyObject* kwargs)
+{
+ static char *kwlist[] = { "name", "converter", NULL };
+
+ PyObject* name;
+ PyObject* converter;
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, "OO:register_converter",
+ kwlist, &name, &converter))
+ {
+ return NULL;
+ }
+
+ if (!PyString_Check(name)) {
+ PyErr_SetString(PyExc_ValueError, "name must be a string");
+ return NULL;
+ }
+
+ PyDict_SetItem(self->converters, name, converter);
+
+ Py_INCREF(Py_None);
+ return Py_None;
+}
+
+static PyObject* _con_set_expected_types(
+ pysqlc* self,
+ PyObject *args,
+ PyObject* kwargs)
+{
+ static char *kwlist[] = {"types", NULL};
+
+ PyObject* types;
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O:set_expected_types", kwlist, &types))
+ {
+ return NULL;
+ }
+
+ if ((types != Py_None) && (!PySequence_Check(types)))
+ {
+ PyErr_SetString(PyExc_ValueError, "types must be a sequence");
+ return NULL;
+ }
+
+ Py_DECREF(self->expected_types);
+
+ Py_INCREF(types);
+ self->expected_types = types;
+
+ Py_INCREF(Py_None);
+ return Py_None;
+}
+
+/*------------------------------------------------------------------------------
+** Result Set Object Implementation
+**------------------------------------------------------------------------------
+*/
+
+static struct memberlist _rs_memberlist[] =
+{
+ {"row_list", T_OBJECT, offsetof(pysqlrs, p_row_list), RO},
+ {"col_defs", T_OBJECT, offsetof(pysqlrs, p_col_def_list), RO},
+ {"rowcount", T_INT, offsetof(pysqlrs, row_count), RO},
+ {NULL}
+};
+
+static PyMethodDef _rs_methods[] =
+{
+ { NULL, NULL}
+};
+
+static void
+_rs_dealloc(pysqlrs* self)
+{
+ if(self)
+ {
+ Py_DECREF(self->con);
+
+ if(self->p_row_list != 0)
+ {
+ Py_DECREF(self->p_row_list);
+
+ self->p_row_list = 0;
+ }
+
+ if(self->p_col_def_list != 0)
+ {
+ Py_DECREF(self->p_col_def_list);
+
+ self->p_col_def_list = 0;
+ }
+
+ PyObject_Del(self);
+ }
+}
+
+static PyObject* _rs_get_attr(pysqlrs *self, char *attr)
+{
+ PyObject *res;
+
+ res = Py_FindMethod(_rs_methods, (PyObject *) self,attr);
+
+ if(NULL != res)
+ {
+ return res;
+ }
+ else
+ {
+ PyErr_Clear();
+ return PyMember_Get((char *) self, _rs_memberlist, attr);
+ }
+}
+
+static PyObject* sqlite_version_info(PyObject* self, PyObject* args)
+{
+ PyObject* vi_list;
+ PyObject* vi_tuple;
+ char* buf;
+ char* iterator;
+ char* token;
+
+ if (!PyArg_ParseTuple(args, ""))
+ {
+ return NULL;
+ }
+
+ buf = mystrdup(sqlite3_libversion());
+ iterator = buf;
+
+ vi_list = PyList_New(0);
+
+ while ((token = pysqlite_strsep(&iterator, ".")) != NULL)
+ {
+ PyList_Append(vi_list, PyInt_FromLong((long)atoi(token)));
+ }
+
+ vi_tuple = PyList_AsTuple(vi_list);
+ Py_DECREF(vi_list);
+
+ return vi_tuple;
+}
+
+
+/*------------------------------------------------------------------------------
+** Module Definitions / Initialization
+**------------------------------------------------------------------------------
+*/
+static PyMethodDef pysqlite_functions[] =
+{
+ { "connect", (PyCFunction)pysqlite_connect, METH_VARARGS | METH_KEYWORDS, pysqlite_connect_doc},
+ { "sqlite_version", (PyCFunction)sqlite_library_version, METH_VARARGS},
+ { "sqlite_version_info", (PyCFunction)sqlite_version_info, METH_VARARGS},
+ { "enable_callback_debugging", (PyCFunction)sqlite_enable_callback_debugging, METH_VARARGS},
+ { "encode", (PyCFunction)pysqlite_encode, METH_VARARGS, pysqlite_encode_doc},
+ { "decode", (PyCFunction)pysqlite_decode, METH_VARARGS, pysqlite_decode_doc},
+ { NULL, NULL }
+};
+
+/*------------------------------------------------------------------------------
+** Connection Object Implementation
+**------------------------------------------------------------------------------
+*/
+
+static struct memberlist _con_memberlist[] =
+{
+ {"sql", T_STRING, offsetof(pysqlc, sql), RO},
+ {"filename", T_STRING, offsetof(pysqlc, database_name), RO},
+ {NULL}
+};
+
+static PyMethodDef _con_methods[] =
+{
+ {"close", (PyCFunction) _con_close, METH_VARARGS, _con_close_doc},
+ {"execute", (PyCFunction)_con_execute, METH_VARARGS},
+ {"register_converter", (PyCFunction)_con_register_converter, METH_VARARGS | METH_KEYWORDS},
+ {"set_expected_types", (PyCFunction)_con_set_expected_types, METH_VARARGS | METH_KEYWORDS},
+ {"set_command_logfile", (PyCFunction)_con_set_command_logfile, METH_VARARGS | METH_KEYWORDS, _con_set_command_logfile_doc},
+ {"create_function", (PyCFunction)_con_create_function, METH_VARARGS | METH_KEYWORDS, _con_create_function_doc},
+ {"create_aggregate", (PyCFunction)_con_create_aggregate, METH_VARARGS | METH_KEYWORDS, _con_create_aggregate_doc},
+ {"sqlite_exec", (PyCFunction)_con_sqlite_exec, METH_VARARGS | METH_KEYWORDS, _con_sqlite_exec_doc},
+ {"sqlite_last_insert_rowid", (PyCFunction)_con_sqlite_last_insert_rowid, METH_VARARGS},
+ {"sqlite_changes", (PyCFunction)_con_sqlite_changes, METH_VARARGS},
+ {"sqlite_busy_handler", (PyCFunction)_con_sqlite_busy_handler, METH_VARARGS | METH_KEYWORDS, _con_sqlite_busy_handler_doc},
+ {"sqlite_busy_timeout", (PyCFunction)_con_sqlite_busy_timeout, METH_VARARGS | METH_KEYWORDS, _con_sqlite_busy_timeout_doc},
+ { NULL, NULL}
+};
+
+PySQLite_MODINIT_FUNC(init_sqlite)
+{
+ PyObject *module, *dict;
+ PyObject* sqlite_version;
+ PyObject* args;
+ long tc = 0L;
+
+ pysqlc_Type.ob_type = &PyType_Type;
+ pysqlrs_Type.ob_type = &PyType_Type;
+
+ module = Py_InitModule("_sqlite", pysqlite_functions);
+
+ if (!(dict = PyModule_GetDict(module)))
+ {
+ goto error;
+ }
+
+ required_sqlite_version = PyTuple_New(3);
+ PyTuple_SetItem(required_sqlite_version, 0, PyInt_FromLong((long)2));
+ PyTuple_SetItem(required_sqlite_version, 1, PyInt_FromLong((long)5));
+ PyTuple_SetItem(required_sqlite_version, 2, PyInt_FromLong((long)6));
+
+ args = PyTuple_New(0);
+ sqlite_version = sqlite_version_info(NULL, args);
+ Py_DECREF(args);
+ if (PyObject_Compare(sqlite_version, required_sqlite_version) < 0)
+ {
+ Py_DECREF(sqlite_version);
+ PyErr_SetString(PyExc_ImportError, "Need to be linked against SQLite 2.5.6 or higher.");
+ return;
+ }
+ Py_DECREF(sqlite_version);
+
+ /*** Initialize type codes */
+ tc_INTEGER = PyInt_FromLong(tc++);
+ tc_FLOAT = PyInt_FromLong(tc++);
+ tc_TIMESTAMP = PyInt_FromLong(tc++);
+ tc_DATE = PyInt_FromLong(tc++);
+ tc_TIME = PyInt_FromLong(tc++);
+ tc_INTERVAL = PyInt_FromLong(tc++);
+ tc_STRING = PyInt_FromLong(tc++);
+ tc_UNICODESTRING = PyInt_FromLong(tc++);
+ tc_BINARY = PyInt_FromLong(tc++);
+
+ PyDict_SetItemString(dict, "INTEGER", tc_INTEGER);
+ PyDict_SetItemString(dict, "FLOAT", tc_FLOAT);
+ PyDict_SetItemString(dict, "TIMESTAMP", tc_TIMESTAMP);
+ PyDict_SetItemString(dict, "DATE", tc_DATE);
+ PyDict_SetItemString(dict, "TIME", tc_TIME);
+ PyDict_SetItemString(dict, "INTERVAL", tc_INTERVAL);
+ PyDict_SetItemString(dict, "STRING", tc_STRING);
+ PyDict_SetItemString(dict, "UNICODESTRING", tc_UNICODESTRING);
+ PyDict_SetItemString(dict, "BINARY", tc_BINARY);
+
+ /*** Create DB-API Exception hierarchy */
+
+ _sqlite_Error = PyErr_NewException("_sqlite.Error", PyExc_StandardError, NULL);
+ PyDict_SetItemString(dict, "Error", _sqlite_Error);
+
+ _sqlite_Warning = PyErr_NewException("_sqlite.Warning", PyExc_StandardError, NULL);
+ PyDict_SetItemString(dict, "Warning", _sqlite_Warning);
+
+ /* Error subclasses */
+
+ _sqlite_InterfaceError = PyErr_NewException("_sqlite.InterfaceError", _sqlite_Error, NULL);
+ PyDict_SetItemString(dict, "InterfaceError", _sqlite_InterfaceError);
+
+ _sqlite_DatabaseError = PyErr_NewException("_sqlite.DatabaseError", _sqlite_Error, NULL);
+ PyDict_SetItemString(dict, "DatabaseError", _sqlite_DatabaseError);
+
+ /* DatabaseError subclasses */
+
+ _sqlite_InternalError = PyErr_NewException("_sqlite.InternalError", _sqlite_DatabaseError, NULL);
+ PyDict_SetItemString(dict, "InternalError", _sqlite_InternalError);
+
+ _sqlite_OperationalError = PyErr_NewException("_sqlite.OperationalError", _sqlite_DatabaseError, NULL);
+ PyDict_SetItemString(dict, "OperationalError", _sqlite_OperationalError);
+
+ _sqlite_ProgrammingError = PyErr_NewException("_sqlite.ProgrammingError", _sqlite_DatabaseError, NULL);
+ PyDict_SetItemString(dict, "ProgrammingError", _sqlite_ProgrammingError);
+
+ _sqlite_IntegrityError = PyErr_NewException("_sqlite.IntegrityError", _sqlite_DatabaseError,NULL);
+ PyDict_SetItemString(dict, "IntegrityError", _sqlite_IntegrityError);
+
+ _sqlite_DataError = PyErr_NewException("_sqlite.DataError", _sqlite_DatabaseError, NULL);
+ PyDict_SetItemString(dict, "DataError", _sqlite_DataError);
+
+ _sqlite_NotSupportedError = PyErr_NewException("_sqlite.NotSupportedError", _sqlite_DatabaseError, NULL);
+ PyDict_SetItemString(dict, "NotSupportedError", _sqlite_NotSupportedError);
+
+ error:
+
+ if (PyErr_Occurred())
+ {
+ PyErr_SetString(PyExc_ImportError, "sqlite: init failed");
+ }
+}
diff --git a/debian/README.Debian b/debian/README.Debian
new file mode 100644
index 0000000..e298b9f
--- /dev/null
+++ b/debian/README.Debian
@@ -0,0 +1,7 @@
+This package is built against SQLite version 3 but uses the same API
+as pysqlite 1.0.x (for SQLite 2, packaged as python-sqlite).
+
+Newer interfaces to SQLite 3 can be found in the package
+python-pysqlite2 and python-apsw.
+
+ -- Joel Rosdahl <joel@debian.org>, Fri Sep 9 17:50:47 2005
diff --git a/debian/changelog b/debian/changelog
new file mode 100644
index 0000000..bf1cbcf
--- /dev/null
+++ b/debian/changelog
@@ -0,0 +1,83 @@
+python-pysqlite1.1 (1.1.8a-7) unstable; urgency=low
+
+ * Don't use defunct googlecode.debian.net redirector anymore
+ * Don't explicitly search for libraries in /usr/lib (closes: #723508)
+ * Bump Standards-Version to 3.9.4
+ * Use build hardening flags
+ * Use debhelper sequencer
+
+ -- Joel Rosdahl <joel@debian.org> Sun, 22 Sep 2013 20:59:55 +0200
+
+python-pysqlite1.1 (1.1.8a-6) unstable; urgency=low
+
+ * Ignore space before non-modifying commands for auto-transactions
+
+ -- Joel Rosdahl <joel@debian.org> Mon, 19 Dec 2011 21:58:39 +0100
+
+python-pysqlite1.1 (1.1.8a-5) unstable; urgency=low
+
+ * Change priority to extra (closes: bug#641322)
+ * Let python-pysqlite1.1-dbg conflict with python-sqlite-dbg (closes:
+ bug#641323)
+
+ -- Joel Rosdahl <joel@debian.org> Sun, 18 Dec 2011 19:38:37 +0100
+
+python-pysqlite1.1 (1.1.8a-4) unstable; urgency=low
+
+ * Convert from dh_pycentral to dh_python2 (closes: #617023)
+ * Use debhelper compat level 8
+ * Bump Standards-Version to 3.9.2
+ * Pass --install-layout=deb to "setup.py install"
+ * Use source format 3.0 (quilt)
+ * Add build-arch and build-indep targets to debian/rules
+ * Add watch file
+ * Correct upstream source URL
+ * Use "Section: debug" and "Priority: extra" for the debug package
+ * Some spelling fixes in the sqlite module
+
+ -- Joel Rosdahl <joel@debian.org> Wed, 29 Jun 2011 22:31:59 +0200
+
+python-pysqlite1.1 (1.1.8a-3) unstable; urgency=low
+
+ * Rebuild to add Python 2.5 support.
+ * Build python-pysqlite1.1-dbg package.
+
+ -- Joel Rosdahl <joel@debian.org> Thu, 21 Jun 2007 22:41:27 +0200
+
+python-pysqlite1.1 (1.1.8a-2) unstable; urgency=low
+
+ * Removed DH_COMPAT variable from debian/rules to fix Lintian error.
+
+ -- Joel Rosdahl <joel@debian.org> Sun, 22 Apr 2007 21:55:57 +0200
+
+python-pysqlite1.1 (1.1.8a-1) unstable; urgency=low
+
+ * New upstream release.
+
+ -- Joel Rosdahl <joel@debian.org> Fri, 3 Nov 2006 23:34:48 +0100
+
+python-pysqlite1.1 (1.1.7-2) unstable; urgency=low
+
+ * Adapt to new Python policy. Closes: bug#373347.
+ * Updated Standards-Version to 3.7.2.
+
+ -- Joel Rosdahl <joel@debian.org> Mon, 19 Jun 2006 18:11:33 +0000
+
+python-pysqlite1.1 (1.1.7-1) unstable; urgency=low
+
+ * New upstream release. Hopefully fixes bug#361507.
+ * Added build dependency on libsqlite3-dev >= 3.0.8.
+
+ -- Joel Rosdahl <joel@debian.org> Sat, 8 Apr 2006 22:18:11 +0200
+
+python-pysqlite1.1 (1.1.6-2) unstable; urgency=low
+
+ * Corrected build dependency on libsqlite3-dev. Closes: bug#331373.
+
+ -- Joel Rosdahl <joel@debian.org> Mon, 3 Oct 2005 10:44:53 +0200
+
+python-pysqlite1.1 (1.1.6-1) unstable; urgency=low
+
+ * New package. Closes: bug#304067, bug#327128.
+
+ -- Joel Rosdahl <joel@debian.org> Wed, 14 Sep 2005 10:19:22 +0200
diff --git a/debian/compat b/debian/compat
new file mode 100644
index 0000000..45a4fb7
--- /dev/null
+++ b/debian/compat
@@ -0,0 +1 @@
+8
diff --git a/debian/control b/debian/control
new file mode 100644
index 0000000..51b8a87
--- /dev/null
+++ b/debian/control
@@ -0,0 +1,42 @@
+Source: python-pysqlite1.1
+Section: python
+Priority: extra
+Maintainer: Joel Rosdahl <joel@debian.org>
+Build-Depends: debhelper (>= 8), libsqlite3-dev (>= 3.0.8), python-all-dev (>= 2.6.6-3~), python-all-dbg
+Standards-Version: 3.9.4
+
+Package: python-pysqlite1.1
+Architecture: any
+Depends: ${shlibs:Depends}, ${python:Depends}, ${misc:Depends}
+Provides: ${python:Provides}
+Conflicts: python2.3-pysqlite1.1, python2.4-pysqlite1.1, python-sqlite, python2.3-sqlite, python2.4-sqlite
+Replaces: python2.3-pysqlite1.1, python2.4-pysqlite1.1
+Suggests: python-pysqlite1.1-dbg
+Description: python interface to SQLite 3
+ pysqlite is an interface to the SQLite database server for
+ Python. It aims to be fully compliant with Python database
+ API version 2.0 while also exploiting the unique features of
+ SQLite.
+ .
+ pysqlite 1.1.x is built against SQLite 3 but uses the same API as
+ pysqlite 1.0.x (for SQLite 2, packaged as python-sqlite). Newer
+ interfaces to SQLite 3 can be found in the package python-pysqlite2
+ and python-apsw.
+
+Package: python-pysqlite1.1-dbg
+Section: debug
+Architecture: any
+Depends: python-pysqlite1.1 (= ${binary:Version}), python-dbg, ${shlibs:Depends}, ${misc:Depends}
+Conflicts: python-sqlite-dbg
+Description: python interface to SQLite 3 (debug extension)
+ pysqlite is an interface to the SQLite database server for
+ Python. It aims to be fully compliant with Python database
+ API version 2.0 while also exploiting the unique features of
+ SQLite.
+ .
+ pysqlite 1.1.x is built against SQLite 3 but uses the same API as
+ pysqlite 1.0.x (for SQLite 2, packaged as python-sqlite). Newer
+ interfaces to SQLite 3 can be found in the package python-pysqlite2
+ and python-apsw.
+ .
+ This package contains the extension built for the Python debug interpreter.
diff --git a/debian/copyright b/debian/copyright
new file mode 100644
index 0000000..092f735
--- /dev/null
+++ b/debian/copyright
@@ -0,0 +1,64 @@
+This Debian package was created by Joel Rosdahl <joel@debian.org>.
+
+The upstream source was found on the following address:
+
+ http://code.google.com/p/pysqlite/downloads/list
+
+Upstream authors:
+
+ Michael Owens <mike@mikesclutter.com>
+ Gerhard Häring <gh@ghaering.de>
+
+License:
+
+ License for all documentation and source code except port/strsep.c:
+
+ ==================================================================
+ Permission to use, copy, modify, and distribute this software and
+ its documentation for any purpose and without fee is hereby
+ granted, provided that the above copyright notice appear in all
+ copies and that both that copyright notice and this permission
+ notice appear in supporting documentation.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+ ==================================================================
+
+ License for port/strsep.c:
+
+ ==================================================================
+ Copyright (c) 1990, 1993
+ The Regents of the University of California. All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ 1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ 2. Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ 3. All advertising materials mentioning features or use of this
+ software must display the following acknowledgement: This
+ product includes software developed by the University of
+ California, Berkeley and its contributors.
+ 4. Neither the name of the University nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS
+ IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
+ EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ ==================================================================
diff --git a/debian/patches/dont-search-for-libraries-in-usr-lib.patch b/debian/patches/dont-search-for-libraries-in-usr-lib.patch
new file mode 100644
index 0000000..47a22b8
--- /dev/null
+++ b/debian/patches/dont-search-for-libraries-in-usr-lib.patch
@@ -0,0 +1,16 @@
+Description: Don't search for libraries in /usr/lib.
+Author: Joel Rosdahl <joel@debian.org>
+
+Index: python-pysqlite1.1/setup.py
+===================================================================
+--- python-pysqlite1.1.orig/setup.py 2006-09-02 00:47:26.000000000 +0200
++++ python-pysqlite1.1/setup.py 2013-09-22 19:27:52.626779595 +0200
+@@ -12,7 +12,7 @@
+
+ if sys.platform in ("linux-i386", "linux2"): # most Linux
+ include_dirs = ['/usr/include/sqlite']
+- library_dirs = ['/usr/lib/']
++ library_dirs = []
+ libraries = [sqlite]
+ runtime_library_dirs = []
+ extra_objects = []
diff --git a/debian/patches/fix-space-before-keywords.patch b/debian/patches/fix-space-before-keywords.patch
new file mode 100644
index 0000000..0d44170
--- /dev/null
+++ b/debian/patches/fix-space-before-keywords.patch
@@ -0,0 +1,16 @@
+Description: Allow whitespace before keywords
+Author: Марк Коренберг <socketpair@gmail.com>
+
+Index: python-sqlite/sqlite/main.py
+===================================================================
+--- python-sqlite.orig/sqlite/main.py 2004-10-21 12:13:25.000000000 +0200
++++ python-sqlite/sqlite/main.py 2011-12-19 21:47:47.472609323 +0100
+@@ -233,7 +233,7 @@
+ if self.con.autocommit:
+ pass
+ else:
+- if not(self.con.inTransaction or SQL[:6].upper() in ("SELECT","VACUUM","DETACH")):
++ if not(self.con.inTransaction or SQL.lstrip()[:6].upper() in ("SELECT","VACUUM","DETACH")):
+ self.con._begin()
+ self.con.inTransaction = 1
+
diff --git a/debian/patches/fix-spelling.patch b/debian/patches/fix-spelling.patch
new file mode 100644
index 0000000..6bd9796
--- /dev/null
+++ b/debian/patches/fix-spelling.patch
@@ -0,0 +1,25 @@
+Description: Fix spelling.
+Author: Joel Rosdahl <joel@debian.org>
+
+Index: python-pysqlite1.1/_sqlite.c
+===================================================================
+--- python-pysqlite1.1.orig/_sqlite.c 2006-09-02 00:47:26.000000000 +0200
++++ python-pysqlite1.1/_sqlite.c 2011-06-29 21:58:20.473656828 +0200
+@@ -702,7 +702,7 @@
+
+ static char _con_set_command_logfile_doc[] =
+ "set_command_logfile(logfile)\n\
+-Registers a writeable file-like object as logfile where all SQL commands\n\
++Registers a writable file-like object as logfile where all SQL commands\n\
+ that get executed are written to.";
+
+ static PyObject* _con_set_command_logfile(pysqlc* self, PyObject *args, PyObject* kwargs)
+@@ -816,7 +816,7 @@
+ function.\n\
+ \n\
+ The signature of the callback function is (arg, values, colnames{, types}).\n\
+- types is ommitted unless use_types is true. If you use 'use_types', you\n\
++ types is omitted unless use_types is true. If you use 'use_types', you\n\
+ MUST have issued 'pragma show_datatypes=ON' before.";
+
+ static PyObject* _con_sqlite_exec(pysqlc* self, PyObject *args, PyObject* kwargs)
diff --git a/debian/patches/series b/debian/patches/series
new file mode 100644
index 0000000..1978b14
--- /dev/null
+++ b/debian/patches/series
@@ -0,0 +1,3 @@
+fix-space-before-keywords.patch
+fix-spelling.patch
+dont-search-for-libraries-in-usr-lib.patch
diff --git a/debian/python-pysqlite1.1-dbg.lintian-overrides b/debian/python-pysqlite1.1-dbg.lintian-overrides
new file mode 100644
index 0000000..1598b78
--- /dev/null
+++ b/debian/python-pysqlite1.1-dbg.lintian-overrides
@@ -0,0 +1 @@
+python-pysqlite1.1-dbg: hardening-no-fortify-functions usr/lib/python2.7/dist-packages/_sqlite_d.so
diff --git a/debian/python-pysqlite1.1.lintian-overrides b/debian/python-pysqlite1.1.lintian-overrides
new file mode 100644
index 0000000..8e3b2f1
--- /dev/null
+++ b/debian/python-pysqlite1.1.lintian-overrides
@@ -0,0 +1 @@
+python-pysqlite1.1: hardening-no-fortify-functions usr/lib/python2.7/dist-packages/_sqlite.so
diff --git a/debian/rules b/debian/rules
new file mode 100755
index 0000000..632c729
--- /dev/null
+++ b/debian/rules
@@ -0,0 +1,19 @@
+#!/usr/bin/make -f
+
+DPKG_EXPORT_BUILDFLAGS = 1
+include /usr/share/dpkg/buildflags.mk
+
+export BASECFLAGS := $(CFLAGS) $(CPPFLAGS)
+
+%:
+ dh $@ --with python2
+
+override_dh_install:
+ dh_install -X"*_d.so" "debian/tmp/*" -p python-pysqlite1.1
+ dh_install "debian/tmp/usr/lib/python*/*-packages/*_d.so" -p python-pysqlite1.1-dbg
+ dh_install
+
+override_dh_strip:
+ifeq (,$(filter nostrip,$(DEB_BUILD_OPTIONS)))
+ dh_strip --dbg-package=python-pysqlite1.1-dbg
+endif
diff --git a/debian/source/format b/debian/source/format
new file mode 100644
index 0000000..163aaf8
--- /dev/null
+++ b/debian/source/format
@@ -0,0 +1 @@
+3.0 (quilt)
diff --git a/debian/watch b/debian/watch
new file mode 100644
index 0000000..a19ac29
--- /dev/null
+++ b/debian/watch
@@ -0,0 +1,2 @@
+version=3
+http://code.google.com/p/pysqlite/downloads/list?can=1 .*/pysqlite-(1\.1.*)\.tar\.gz
diff --git a/doc/rest/manual.txt b/doc/rest/manual.txt
new file mode 100644
index 0000000..3d45b41
--- /dev/null
+++ b/doc/rest/manual.txt
@@ -0,0 +1,388 @@
+-----------------------------------------------------------------
+PySQLite: Python DB-API 2.0 Compliant Interface Module for SQLite
+-----------------------------------------------------------------
+
+These are the beginnings of a new manual.
+
+This document was last updated for PySQLite version 1.0.
+
+===============
+0. Front Matter
+===============
+
+0.1 Copyright notice and License
+--------------------------------
+
+(c) 2002 Michael Owens
+(c) 2002-2004 Gerhard Häring
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted, provided that
+the above copyright notice appear in all copies and that both that copyright
+notice and this permission notice appear in supporting documentation.
+
+This program is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE.
+
+0.2 Abstract
+------------
+
+SQLite is a powerful embedded relational database management system in a
+compact C library, developed by D. Richard Hipp. It offers support for a large
+subset of SQL92, multiple tables and indices, transactions, not to mention ODBC
+drivers. The library is self-contained and implemented in under 20,000 lines of
+code, which itself is uncopyrighted, and free to use for any purpose. It runs
+on a wide variety of platforms including Linux, FreeBSD, Windows, Windows CE
+and many others.
+
+PySQLite makes this powerful yet small database engine available to Python
+developers via the Python Database API Specification 2.0. The ambition is to
+expose all of SQLite's functionality in a pythonic way, and to offer additional
+ease-of-use features.
+
+===================================
+1. Building and installing PySQLite
+===================================
+
+1.1 Installing binary packages
+------------------------------
+
+The PySQLite developers currently offer binary packages for Microsoft Windows.
+Just be sure to pick the right one for your Python version (the correct
+download for Python 2.3.x will end in py2.3.exe, for example).
+
+Other binary packages are offered by third parties. Debian GNU/Linux for
+example has binaries for PySQLite in its repository.
+
+1.2 Installing PySQLite from source
+-----------------------------------
+
+First, make sure you have the SQLite library and header files installed. Unless
+it's packaged for your OS, this means building and installing SQLite from
+source. How to do this is beyond the scope of this manual. We'll refer you to
+the SQLite documentation instead. Just one important note: Be sure to compile
+SQLite with threads support. This means be sure that -DTHREADSAFE=1 is defined
+while compiling it.
+
+Next, be sure that you have a complete Python development environment for C
+extensions. This means:
+
+- the Python interpreter
+- the Python development libraries and headers: on some Linux distributions,
+ these need to be installed separately, the package will be called python-dev,
+ python-devel or some such
+- a C compiler, like the GNU C Compiler (gcc)
+
+Now you can try to build PySQLite with::
+
+ $ python setup.py build
+
+The paths to the SQLite headers and libraries should be found automatially, but
+if they're not, you'll have to accordingly edit setup.py manually.
+
+Once you got PySQLite built, it's time to install it. Normally you'll have to
+do this step as a system administrator (on Unix-like systems this means you'll
+have to become root)::
+
+ $ python setup.py install
+
+Now's a good time to run the included test suite::
+
+ $ cd test
+ $ python all_tests.py
+
+Be sure that all tests passed correctly.
+
+1.3 Make date functionality work
+--------------------------------
+
+If you plan to use the SQL types date, timestamp or interval you'll have to
+have the mxDateTime package from the eGenix mxExtensions installed.
+
+Get it here if you don't have it installed already:
+http://www.egenix.com/files/python/eGenix-mx-Extensions.html#Download-mxBASE
+
+If you're on a free Unix, your distribution most probably packages it for you,
+too.
+
+===============================
+2. Connecting to the datatabase
+===============================
+
+2.1 Basic usage
+---------------
+
+Connecting to a datatabase file "db"::
+
+ import sqlite
+ cx = sqlite.connect("db")
+
+
+2.2 Parameters explained
+------------------------
+
+In the most basic usage, we only used the database parameter, which is the
+database file we want SQLite to use, or ":memory:" if we want to use an in-RAM
+database.
+
+Of course there are more parameters::
+
+ def connect(database, mode=0755, converters={}, autocommit=0,
+ encoding=None, timeout=None, command_logfile=None)
+
+:mode: This parameter is passed through to SQLite and means the mode in which
+ to open the file. The idea is to be able to open the database file in
+ read-only mode. But it's currently ignored by SQLite so just forget
+ about it.
+
+:converters:
+ The converters are a mapping from SQL type names to Python conversion
+ callables. You'll only need to define any of these if you want to make
+ PySQLite handle a user-defined type of you transparently.
+
+ => examples/converters.py
+
+:autocommit:
+ see => Transaction management
+
+:encoding:
+ This is an important parameter if you use unicode strings. It can either be
+ a simple encoding name, like "utf-8", or it can be a tuple of encoding name
+ and error policy, like ("utf-8", "ignore"), or ("utf-16", "replace"). Cf.
+ the documentation about the unicode builtin for possible error policies.
+ What it means is in which encoding to save unicode strings in the SQLite
+ database, and from which encoding to construct Unicode strings.
+
+:timeout:
+ A timeout value in milliseconds, for example timeout=1500. An SQLite
+ database can be locked by a different connection being in an transaction.
+ The timeout value means how long to wait for the lock to be released. If
+ after /timeout/ milliseconds the lock is still not released, a
+ DatabaseError will be thrown.
+
+:command_logfile:
+ A file-like object (anything that has a write method) where all statements
+ sent to the SQLite library will be logged into.
+
+
+
+=========================
+3. Transaction management
+=========================
+
+3.1 The standard, DB-API way
+----------------------------
+
+Transactions are opened "when necessary". PySQLite is optimized quite a bit at
+opening transactions as late as possible. I. e. when you have a sequence::
+
+ cu = cx.cursor() # (1)
+ cu.execute("select foo from bar") # (2)
+ cu.execute("update foo set bar=5 where blarg=3") # (3)
+ cx.commit() # (4)
+
+only line number 3 triggers the sending of a BEGIN statement to the SQLIte
+library. That's because under SQLite, it is safe to use even multiple SELECT
+statements outside transactions. The reason is that a BEGIN will lock the whole
+database, so outside transactions, you will always get consistent data using
+SELECT statements.
+
+Ok, so the .execute() in line #3 sees that it has got a DML (data modification
+language) statement as SQL string and will transparently send a BEGIN before
+that to the SQLite engine. .commit() will send the corresponding COMMIT
+statement, of course. To roll back transactions intead, you'd use .rollback()
+on the connection object.
+
+see => examples/dbapi_transactions.py
+
+3.2 The manual way
+------------------
+
+If you used the parameter autocommit=1 in the sqlite.connect() call, PySQLite
+will not get in your way with respect to transactions. You can send
+BEGIN/COMMIT/ROLLBACK statements with the .execute() method of the cursor
+object as you like.
+
+see => examples/manual_transactions.py
+
+I don't recommend you actually use this option, unless you're implementing a
+transaction management system different from the DB-API one.
+
+=====================================
+4. Type Conversions Python <=> SQLite
+=====================================
+
+SQLite is a typeless database engine. Basically this means that apart from
+arithmetic operations, it only knows about strings. PySQLite goes a long way to
+work around this limitation of SQLite. Storing Python data in an SQLite
+database is not the problem, PySQLite will do this correctly for you for its
+supported Python types.
+
+The other way around is normally no problem either. In most cases, PySQLite can
+infer to which Python type it needs to convert the string data the SQLite
+engine delivers it. In other cases, however, where SQLite doesn't deliver
+enough type information or the wrong type information, you will have to help
+PySQLite guessing right. This is what the next section is all about.
+
+4.1 Conversions SQLite => Python
+--------------------------------
+
+SQLite itself is typeless, it only knows about strings, and to some degree
+about numbers. So PySQLite has to work around this limitation. The conversion
+from string to the Python type we want works with a hidden dictionary called
+converters, which consists of the converters you registered in the .connect()
+call yourself, plus a few standard ones from PySQLite, listed below.
+
++-------------------------+-------------------+--------------------------------+
+| column types | converter name | converter callable |
++=========================+===================+================================+
+| \*CHAR\*, \*TEXT\* | str | str() |
++-------------------------+-------------------+--------------------------------+
+| \*INT\* | int | int() |
++-------------------------+-------------------+--------------------------------+
+| | long | long() |
++-------------------------+-------------------+--------------------------------+
+| \*FLOAT\*, \*NUMERIC\*, | float | float() |
+| \*NUMBER\*, \*DECIMAL\*,| | |
+| \*REAL\*, \*DOUBLE\* | | |
++-------------------------+-------------------+--------------------------------+
+| \*UNICODE\* | unicode | UnicodeConverter(self.encoding)|
++-------------------------+-------------------+--------------------------------+
+| \*BINARY\*, \*BLOB\* | binary | sqlite.decode() |
++-------------------------+-------------------+--------------------------------+
+| \*DATE\* | date | DateTime.DateFrom() |
++-------------------------+-------------------+--------------------------------+
+| \*TIME\* | time | DateTime.TimeFrom() |
++-------------------------+-------------------+--------------------------------+
+| \*TIMESTAMP\* | timestamp | DateTime.DateTimeFrom() |
++-------------------------+-------------------+--------------------------------+
+| \*INTERVAL\* | interval | DateTime.DateTimeDeltaFrom() |
++-------------------------+-------------------+--------------------------------+
+
+Now there are two ways to determine which converter to use for a given column
+in the resultset. If the column came directly from a table, and wasn't created
+by an expression, or by a function or aggregate, then SQLite delivers column
+type to PySQLite, and PySQLite will then use a certain converter, depending on
+the column type.
+
+Let's use an example to make this more clear::
+
+ CREATE TABLE TEST (V VARCHAR, I INTEGER);
+ INSERT INTO TEST(V, I) VALUES ('foo', 25);
+
+>>> cu = cx.cursor()
+>>> cu.execute("select v, i from test")
+>>> row = cu.fetchone()
+>>> row, map(type, row)
+(('foo', 25), [<type 'str'>, <type 'int'>])
+
+Now, with the statement "select v, i from test" you directly accessed the
+columns 'v' and 'i' in the table 'test'. SQLite is thus able to deliver the
+types of the columns to PySQLite. PySQLite thus knows that the first column is
+of type VARCHAR, and the second column is of type INTEGER. Now VARCHAR matches
+*CHAR* and INTEGER matches *INT*, so PySQLite finds the converter name 'str'
+for the first column in the resultset, and the converter name 'int' for the
+second column. Now 'str' maps to str() and 'int' maps to int(), so these two
+callables are called for the raw string data PySQLite gets from the SQLite
+engine. For you, this means you transparently got back an integer for the
+second column, even though SQLite basically only knows about strings.
+
+Now let's try something else:
+
+>>> cu.execute("select i*2.3 from test")
+>>> row = cu.fetchone()
+>>> row, map(type, row)
+((57.5,), [<type 'float'>])
+
+There's a little magic going on here. SQLite infers that the result is numeric,
+so it sets "NUMERIC" as the type of the result column, which in turn by
+PySQLite is mapped to the converter name 'float', and then to the callable
+float.
+
+Now of course there are areas where there is no magic left and you have to tell
+PySQLite yourself to which type to convert back. Basically always when result
+columns don't come directly from tables, but from expressions.
+
+One example would be where
+you'd want to concatenate two columns of a UNICODE type:
+
+>>> cx = sqlite.connect("db", encoding="utf-8")
+>>> cu = cx.cursor()
+>>> cu.execute("create table test(u1 unicode, u2 unicode)")
+>>> cu.execute("insert into test(u1, u2) values (%s, %s)", (u"\x99sterreich", u"Ungarn"))
+>>> cu.execute("select u1 || '-' || u2 from test")
+>>> print cu.fetchone()
+('\xc2\x99sterreich-Ungarn',)
+
+We didn't specify what type to convert to, so we just got a normal Python
+string back, with the result in UTF-8 encoding. So let's specifiy the converter
+name with the magical "-- types type1[, type2 ...]" SQL command that PySQLite
+intercepts and interprets itself and try again:
+
+>>> cu.execute("-- types unicode")
+>>> cu.execute("select u1 || '-' || u2 from test")
+>>> row = cu.fetchone()
+>>> row, map(type, row)
+((u'\x99sterreich-Ungarn',), [<type 'unicode'>])
+
+Another problematic area are SQLite functions and aggregates. SQLite will
+always consider their results NUMERIC. Consider this:
+
+>>> import sqlite
+>>> cx = sqlite.connect(":memory:")
+>>> cx.create_function("concat", 2, lambda x, y: "%s--%s" % (x,y))
+>>> cu = cx.cursor()
+>>> cu.execute("select concat('ab', 'cd')")
+>>> res = cu.fetchone()[0]
+>>> res, type(res)
+(0.0, <type 'float'>)
+
+Pretty stupid, right? SQLite tells PySQLite that the result is NUMERIC so
+PySQLite faithfully tries to convert it to a float, which fails so the result
+is 0.0. So we'll have to explicitely tell which types we want:
+
+>>> cu.execute("-- types str")
+>>> cu.execute("select concat('ab', 'cd')")
+>>> res = cu.fetchone()[0]
+>>> res, type(res)
+('ab--cd', <type 'str'>)
+>>>
+
+The same problem exists for aggregates, btw.:
+
+>>> import sqlite
+>>> cx = sqlite.connect(":memory:")
+>>> from mx.DateTime import *
+>>> today = now()
+>>> yesterday = now - DateTimeDelta(1)
+>>> today, yesterday
+(<DateTime object for '2004-07-06 18:50:12.12' at 401f12f8>, <DateTime object for '2004-07-05 18:50:12.12' at 401ca2f8>)
+>>> cu = cx.cursor()
+>>> cu.execute("create table test (d timestamp)")
+>>> cu.executemany("insert into test(d) values (%s)", [(today,), (yesterday,)])
+>>> cu.execute("select max(d) from test")
+>>> res = cu.fetchone()[0]
+>>> res, type(res)
+(2004.0, <type 'float'>)
+
+Bah! Ok let's be explicit then:
+
+>>> cu.execute("-- types timestamp")
+>>> cu.execute("select max(d) from test")
+>>> res = cu.fetchone()[0]
+>>> res, type(res)
+(<DateTime object for '2004-07-06 18:50:12.11' at 40279bf0>, <type 'DateTime'>)
+>>>
+
+4.2 Conversions Python => SQLite
+--------------------------------
+
+This section only matters if you want to create your own types and use them
+transparently with SQLite. Just provide them with a _quote() method that will
+return a string ready to be inserted directly into a SQL statement.
+
+You'll then also want to register a suitable converter callable with the
+converters parameter of the connect() function.
+
+
diff --git a/encode.c b/encode.c
new file mode 100644
index 0000000..1410813
--- /dev/null
+++ b/encode.c
@@ -0,0 +1,245 @@
+/*
+** 2002 April 25
+**
+** The author disclaims copyright to this source code. In place of
+** a legal notice, here is a blessing:
+**
+** May you do good and not evil.
+** May you find forgiveness for yourself and forgive others.
+** May you share freely, never taking more than you give.
+**
+*************************************************************************
+** This file contains helper routines used to translate binary data into
+** a null-terminated string (suitable for use in SQLite) and back again.
+** These are convenience routines for use by people who want to store binary
+** data in an SQLite database. The code in this file is not used by any other
+** part of the SQLite library.
+**
+** $Id: encode.c,v 1.2 2004/07/03 22:51:18 ghaering Exp $
+*/
+#include <string.h>
+
+/*
+** How This Encoder Works
+**
+** The output is allowed to contain any character except 0x27 (') and
+** 0x00. This is accomplished by using an escape character to encode
+** 0x27 and 0x00 as a two-byte sequence. The escape character is always
+** 0x01. An 0x00 is encoded as the two byte sequence 0x01 0x01. The
+** 0x27 character is encoded as the two byte sequence 0x01 0x03. Finally,
+** the escape character itself is encoded as the two-character sequence
+** 0x01 0x02.
+**
+** To summarize, the encoder works by using an escape sequences as follows:
+**
+** 0x00 -> 0x01 0x01
+** 0x01 -> 0x01 0x02
+** 0x27 -> 0x01 0x03
+**
+** If that were all the encoder did, it would work, but in certain cases
+** it could double the size of the encoded string. For example, to
+** encode a string of 100 0x27 characters would require 100 instances of
+** the 0x01 0x03 escape sequence resulting in a 200-character output.
+** We would prefer to keep the size of the encoded string smaller than
+** this.
+**
+** To minimize the encoding size, we first add a fixed offset value to each
+** byte in the sequence. The addition is modulo 256. (That is to say, if
+** the sum of the original character value and the offset exceeds 256, then
+** the higher order bits are truncated.) The offset is chosen to minimize
+** the number of characters in the string that need to be escaped. For
+** example, in the case above where the string was composed of 100 0x27
+** characters, the offset might be 0x01. Each of the 0x27 characters would
+** then be converted into an 0x28 character which would not need to be
+** escaped at all and so the 100 character input string would be converted
+** into just 100 characters of output. Actually 101 characters of output -
+** we have to record the offset used as the first byte in the sequence so
+** that the string can be decoded. Since the offset value is stored as
+** part of the output string and the output string is not allowed to contain
+** characters 0x00 or 0x27, the offset cannot be 0x00 or 0x27.
+**
+** Here, then, are the encoding steps:
+**
+** (1) Choose an offset value and make it the first character of
+** output.
+**
+** (2) Copy each input character into the output buffer, one by
+** one, adding the offset value as you copy.
+**
+** (3) If the value of an input character plus offset is 0x00, replace
+** that one character by the two-character sequence 0x01 0x01.
+** If the sum is 0x01, replace it with 0x01 0x02. If the sum
+** is 0x27, replace it with 0x01 0x03.
+**
+** (4) Put a 0x00 terminator at the end of the output.
+**
+** Decoding is obvious:
+**
+** (5) Copy encoded characters except the first into the decode
+** buffer. Set the first encoded character aside for use as
+** the offset in step 7 below.
+**
+** (6) Convert each 0x01 0x01 sequence into a single character 0x00.
+** Convert 0x01 0x02 into 0x01. Convert 0x01 0x03 into 0x27.
+**
+** (7) Subtract the offset value that was the first character of
+** the encoded buffer from all characters in the output buffer.
+**
+** The only tricky part is step (1) - how to compute an offset value to
+** minimize the size of the output buffer. This is accomplished by testing
+** all offset values and picking the one that results in the fewest number
+** of escapes. To do that, we first scan the entire input and count the
+** number of occurances of each character value in the input. Suppose
+** the number of 0x00 characters is N(0), the number of occurances of 0x01
+** is N(1), and so forth up to the number of occurances of 0xff is N(255).
+** An offset of 0 is not allowed so we don't have to test it. The number
+** of escapes required for an offset of 1 is N(1)+N(2)+N(40). The number
+** of escapes required for an offset of 2 is N(2)+N(3)+N(41). And so forth.
+** In this way we find the offset that gives the minimum number of escapes,
+** and thus minimizes the length of the output string.
+*/
+
+/*
+** Encode a binary buffer "in" of size n bytes so that it contains
+** no instances of characters '\'' or '\000'. The output is
+** null-terminated and can be used as a string value in an INSERT
+** or UPDATE statement. Use sqlite_decode_binary() to convert the
+** string back into its original binary.
+**
+** The result is written into a preallocated output buffer "out".
+** "out" must be able to hold at least 2 +(257*n)/254 bytes.
+** In other words, the output will be expanded by as much as 3
+** bytes for every 254 bytes of input plus 2 bytes of fixed overhead.
+** (This is approximately 2 + 1.0118*n or about a 1.2% size increase.)
+**
+** The return value is the number of characters in the encoded
+** string, excluding the "\000" terminator.
+*/
+int sqlite_encode_binary(const unsigned char *in, int n, unsigned char *out){
+ int i, j, e = 0, m;
+ int cnt[256];
+ if( n<=0 ){
+ out[0] = 'x';
+ out[1] = 0;
+ return 1;
+ }
+ memset(cnt, 0, sizeof(cnt));
+ for(i=n-1; i>=0; i--){ cnt[in[i]]++; }
+ m = n;
+ for(i=1; i<256; i++){
+ int sum;
+ if( i=='\'' ) continue;
+ sum = cnt[i] + cnt[(i+1)&0xff] + cnt[(i+'\'')&0xff];
+ if( sum<m ){
+ m = sum;
+ e = i;
+ if( m==0 ) break;
+ }
+ }
+ out[0] = e;
+ j = 1;
+ for(i=0; i<n; i++){
+ int c = (in[i] - e)&0xff;
+ if( c==0 ){
+ out[j++] = 1;
+ out[j++] = 1;
+ }else if( c==1 ){
+ out[j++] = 1;
+ out[j++] = 2;
+ }else if( c=='\'' ){
+ out[j++] = 1;
+ out[j++] = 3;
+ }else{
+ out[j++] = c;
+ }
+ }
+ out[j] = 0;
+ return j;
+}
+
+/*
+** Decode the string "in" into binary data and write it into "out".
+** This routine reverses the encoding created by sqlite_encode_binary().
+** The output will always be a few bytes less than the input. The number
+** of bytes of output is returned. If the input is not a well-formed
+** encoding, -1 is returned.
+**
+** The "in" and "out" parameters may point to the same buffer in order
+** to decode a string in place.
+*/
+int sqlite_decode_binary(const unsigned char *in, unsigned char *out){
+ int i, c, e;
+ e = *(in++);
+ i = 0;
+ while( (c = *(in++))!=0 ){
+ if( c==1 ){
+ c = *(in++);
+ if( c==1 ){
+ c = 0;
+ }else if( c==2 ){
+ c = 1;
+ }else if( c==3 ){
+ c = '\'';
+ }else{
+ return -1;
+ }
+ }
+ out[i++] = (c + e)&0xff;
+ }
+ return i;
+}
+
+#ifdef ENCODER_TEST
+/*
+** The subroutines above are not tested by the usual test suite. To test
+** these routines, compile just this one file with a -DENCODER_TEST=1 option
+** and run the result.
+*/
+int main(int argc, char **argv){
+ int i, j, n, m, nOut;
+ unsigned char in[30000];
+ unsigned char out[33000];
+
+ for(i=0; i<sizeof(in); i++){
+ printf("Test %d: ", i+1);
+ n = rand() % (i+1);
+ if( i%100==0 ){
+ int k;
+ for(j=k=0; j<n; j++){
+ /* if( k==0 || k=='\'' ) k++; */
+ in[j] = k;
+ k = (k+1)&0xff;
+ }
+ }else{
+ for(j=0; j<n; j++) in[j] = rand() & 0xff;
+ }
+ nOut = sqlite_encode_binary(in, n, out);
+ if( nOut!=strlen(out) ){
+ printf(" ERROR return value is %d instead of %d\n", nOut, strlen(out));
+ exit(1);
+ }
+ m = (256*n + 1262)/253;
+ printf("size %d->%d (max %d)", n, strlen(out)+1, m);
+ if( strlen(out)+1>m ){
+ printf(" ERROR output too big\n");
+ exit(1);
+ }
+ for(j=0; out[j]; j++){
+ if( out[j]=='\'' ){
+ printf(" ERROR contains (')\n");
+ exit(1);
+ }
+ }
+ j = sqlite_decode_binary(out, out);
+ if( j!=n ){
+ printf(" ERROR decode size %d\n", j);
+ exit(1);
+ }
+ if( memcmp(in, out, n)!=0 ){
+ printf(" ERROR decode mismatch\n");
+ exit(1);
+ }
+ printf(" OK\n");
+ }
+}
+#endif /* ENCODER_TEST */
diff --git a/examples/converters.py b/examples/converters.py
new file mode 100644
index 0000000..7a34cb7
--- /dev/null
+++ b/examples/converters.py
@@ -0,0 +1,39 @@
+import os
+import sqlite
+
+# Ok, let's define a user-defined type we can use with the SQLite database
+class Point:
+ def __init__(self, x, y):
+ self.x, self.y = x, y
+
+ # The _quote function is currently the way a PySQLite user-defined type
+ # returns its string representation to write to the database.
+ def _quote(self):
+ return "'%f,%f'" % (self.x, self.y)
+
+ def __str__(self):
+ return "Point(%f, %f)" % (self.x, self.y)
+
+# The conversion callable needs to accept a string, parse it and return an
+# instance of your user-defined type.
+def pointConverter(s):
+ x, y = s.split(",")
+ return Point(float(x), float(y))
+
+# Ensure we have an empty database
+if os.path.exists("db"): os.remove("db")
+
+cx = sqlite.connect("db", converters={"point": pointConverter})
+cu = cx.cursor()
+cu.execute("create table test(p point, n int)")
+cu.execute("insert into test(p, n) values (%s, %s)", (Point(-3.2, 4.5), 25))
+
+# For user-defined types, and for statements which return anything but direct
+# columns, you need to use the "-- types" feature of PySQLite:
+cu.execute("-- types point, int")
+cu.execute("select p, n from test")
+row = cu.fetchone()
+
+print "p:", row.p # .columnname instead of [0] is a PySQLite
+print "n:", row.n # extension to the DB-API!
+cx.close()
diff --git a/examples/dbapi_transactions.py b/examples/dbapi_transactions.py
new file mode 100644
index 0000000..4e8f113
--- /dev/null
+++ b/examples/dbapi_transactions.py
@@ -0,0 +1,107 @@
+import sys
+import sqlite
+
+# The shared connection object
+cx = None
+
+def getCon():
+ # All code gets the connection object via this function
+ global cx
+ return cx
+
+def createSchema():
+ # Create the schema and make sure we're not accessing an old, incompatible schema
+ cu = getCon().cursor()
+ cu.execute("select tbl_name from sqlite_master where type='table' order by tbl_name")
+ tables = []
+ for row in cu.fetchall():
+ tables.append(row.tbl_name)
+ if tables != ["customer", "orders"]:
+ if tables == []:
+ # ok, database is empty
+ cu.execute("""
+ create table customer (
+ cust_id integer primary key,
+ cust_firstname text not null,
+ cust_lastname text not null,
+ cust_no text not null
+ )
+ """)
+ cu.execute("""
+ create table orders (
+ ord_id integer primary key,
+ ord_customer int,
+ ord_item text not null,
+ ord_quantity integer
+ )
+ """)
+ getCon().commit()
+ else:
+ print "We have an unknown schema here. Please fix manually."
+ sys.exit(1)
+
+def createCustomer(firstname, lastname, customerNo):
+ # Create a new customer and return the primary key id.
+ cu = getCon().cursor()
+ cu.execute("""
+ insert into customer(cust_firstname, cust_lastname, cust_no)
+ values (%s, %s, %s)
+ """, (firstname, lastname, customerNo))
+ getCon().commit()
+ return cu.lastrowid
+
+def createOrder(cust_id, ord_item, ord_quantity):
+ # Create a new order for the customer identified by cust_id and return the
+ # primary key of the created order row.
+ cu = getCon().cursor()
+ cu.execute("""
+ insert into orders (ord_customer, ord_item, ord_quantity)
+ values (%s, %s, %s)
+ """, (cust_id, ord_item, ord_quantity))
+ getCon().commit()
+ return cu.lastrowid
+
+def deleteOrder(ord_id):
+ # Delete an order.
+ cu = getCon().cursor()
+ cu.execute("delete from order where ord_id=%s", (ord_id,))
+ getCon().commit()
+
+def deleteCustomer(cust_id):
+ # Delete the customer identified by cust_id and all its orders (recursive
+ # delete).
+
+ # So now, finally, here we have an example where you *really* need
+ # transactions. We either want this to happen all or not at all. So all of
+ # these SQL statements need to be atomic, i. e. we need a transaction here.
+
+ # This will send the BEGIN to SQLite, as soon as the first non-SELECT is
+ # sent.
+ cu = getCon().cursor()
+
+ # So, before the next 'delete' statement, a 'BEGIN' is sent
+ cu.execute("delete from orders where ord_customer=%s", (cust_id,))
+ cu.execute("delete from customer where cust_id=%s", (cust_id,))
+
+ # This will send the "COMMIT" statement to the library.
+ getCon().commit()
+
+def main():
+ global cx
+ cx = sqlite.connect("customerdb")
+ createSchema()
+
+ # Create a customer
+ cust_id = createCustomer("Jane", "Doe", "JD0001")
+
+ # Create two orders for the customer
+ ord_id = createOrder(cust_id, "White Towel", 2)
+ ord_id = createOrder(cust_id, "Blue Cup", 5)
+
+ # Delete the customer, and all her orders.
+ deleteCustomer(cust_id)
+
+ cx.close()
+
+if __name__ == "__main__":
+ main()
diff --git a/examples/manual_transactions.py b/examples/manual_transactions.py
new file mode 100644
index 0000000..114ecbf
--- /dev/null
+++ b/examples/manual_transactions.py
@@ -0,0 +1,102 @@
+import sys
+import sqlite
+
+# The shared connection object
+cx = None
+
+def getCon():
+ # All code gets the connection object via this function
+ global cx
+ return cx
+
+def createSchema():
+ # Create the schema and make sure we're not accessing an old, incompatible schema
+ cu = getCon().cursor()
+ cu.execute("select tbl_name from sqlite_master where type='table' order by tbl_name")
+ tables = []
+ for row in cu.fetchall():
+ tables.append(row.tbl_name)
+ if tables != ["customer", "orders"]:
+ if tables == []:
+ # ok, database is empty
+ cu.execute("begin")
+ cu.execute("""
+ create table customer (
+ cust_id integer primary key,
+ cust_firstname text not null,
+ cust_lastname text not null,
+ cust_no text not null
+ )
+ """)
+ cu.execute("""
+ create table orders (
+ ord_id integer primary key,
+ ord_customer int,
+ ord_item text not null,
+ ord_quantity integer
+ )
+ """)
+ cu.execute("commit")
+ else:
+ print "We have an unknown schema here. Please fix manually."
+ sys.exit(1)
+
+def createCustomer(firstname, lastname, customerNo):
+ # Create a new customer and return the primary key id.
+ cu = getCon().cursor()
+ cu.execute("""
+ insert into customer(cust_firstname, cust_lastname, cust_no)
+ values (%s, %s, %s)
+ """, (firstname, lastname, customerNo))
+ return cu.lastrowid
+
+def createOrder(cust_id, ord_item, ord_quantity):
+ # Create a new order for the customer identified by cust_id and return the
+ # primary key of the created order row.
+ cu = getCon().cursor()
+ cu.execute("""
+ insert into orders (ord_customer, ord_item, ord_quantity)
+ values (%s, %s, %s)
+ """, (cust_id, ord_item, ord_quantity))
+ return cu.lastrowid
+
+def deleteOrder(ord_id):
+ # Delete an order.
+ cu = getCon().cursor()
+ cu.execute("delete from order where ord_id=%s", (ord_id,))
+
+def deleteCustomer(cust_id):
+ # Delete the customer identified by cust_id and all its orders (recursive
+ # delete).
+
+ # So now, finally, here we have an example where you *really* need
+ # transactions. We either want this to happen all or not at all. So all of
+ # these SQL statements need to be atomic, i. e. we need a transaction here.
+ cu = getCon().cursor()
+
+ cu.execute("begin")
+ cu.execute("delete from orders where ord_customer=%s", (cust_id,))
+ cu.execute("delete from customer where cust_id=%s", (cust_id,))
+ cu.execute("commit")
+
+def main():
+ global cx
+ # Open the connection in autocommit mode, because we believe we have reason
+ # to :-/
+ cx = sqlite.connect("customerdb", autocommit=1)
+ createSchema()
+
+ # Create a customer
+ cust_id = createCustomer("Jane", "Doe", "JD0001")
+
+ # Create two orders for the customer
+ ord_id = createOrder(cust_id, "White Towel", 2)
+ ord_id = createOrder(cust_id, "Blue Cup", 5)
+
+ # Delete the customer, and all her orders.
+ deleteCustomer(cust_id)
+
+ cx.close()
+
+if __name__ == "__main__":
+ main()
diff --git a/misc/multithreading_crash.py b/misc/multithreading_crash.py
new file mode 100644
index 0000000..35516f9
--- /dev/null
+++ b/misc/multithreading_crash.py
@@ -0,0 +1,112 @@
+#!/usr/bin/env python
+# This is a test case I got from a user that will crash PySQLite 0.5.0.
+# It stress-tests PySQLite in multithreaded mode.
+
+import os
+import sys
+import threading
+import time
+import random
+import sqlite
+
+dbname = "test.db"
+
+#MECHANISM = "no timeout"
+#MECHANISM = "use timeout"
+#MECHANISM = "use slow busy handler"
+MECHANISM = "use fast busy handler"
+
+class Modifier(threading.Thread):
+ def __init__(self, dbname):
+ threading.Thread.__init__(self)
+ self.dbname = dbname
+ def run(self):
+ print "Modifier: start"
+ cx = sqlite.connect(self.dbname)
+ cu = cx.cursor()
+ print "Modifier: INSERTing"
+ cu.execute("INSERT INTO meta (name, value) VALUES (%s, %s)",
+ "foo", "blah blah blah")
+ for i in range(5):
+ print "Modifier: sleeping %d" % i
+ time.sleep(1)
+ print "Modifier: committing"
+ cx.commit()
+ print "Modifier: committed"
+ cu.close()
+ cx.close()
+ print "Modifier: end"
+
+class Reader(threading.Thread):
+ def __init__(self, name, dbname):
+ threading.Thread.__init__(self, name=name)
+ self.dbname = dbname
+ def busyHandler(self, delay, table, numAttempts):
+ print "Reader %s: busyHandler(delay=%r, table=%r, numAttempts=%r)"\
+ % (self.getName(), delay, table, numAttempts)
+ time.sleep(delay)
+ return 1
+ def run(self):
+ print "Reader %s: start" % self.getName()
+ if MECHANISM == "no timeout":
+ cx = sqlite.connect(self.dbname)
+ elif MECHANISM == "use timeout":
+ cx = sqlite.connect(self.dbname, timeout=5000)
+ elif MECHANISM == "use slow busy handler":
+ cx = sqlite.connect(self.dbname)
+ cx.db.sqlite_busy_handler(self.busyHandler, 1.0)
+ elif MECHANISM == "use fast busy handler":
+ cx = sqlite.connect(self.dbname, Xtimeout=5000.0)
+ cx.db.sqlite_busy_handler(self.busyHandler, 0.1)
+ else:
+ raise ValueError("MECHANISM is not one of the expected values")
+ sleepFor = random.randint(0, 3)
+ print "Reader %s: sleeping for %d seconds" % (self.getName(), sleepFor)
+ time.sleep(sleepFor)
+ print "Reader %s: waking up" % self.getName()
+ cu = cx.cursor()
+ print "Reader %s: SELECTing" % self.getName()
+ cu.execute("SELECT name, value FROM meta WHERE name='%s'" % self.getName())
+ print "Reader %s: SELECTed %s" % (self.getName(), cu.fetchone())
+ cu.close()
+ cx.close()
+ print "Reader %s: end" % self.getName()
+
+def test_sqlite_busy():
+ """Test handling of SQL_BUSY "errors" as discussed here:
+ http://www.hwaci.com/sw/sqlite/faq.html#q7
+ http://www.sqlite.org/cvstrac/wiki?p=MultiThreading
+
+ Algorithm:
+ - start one thread that will open the database and start modifying it
+ then sleep for a while so other threads can get in there
+ - have other thread(s) do selects from the database and see if they
+ error out, if they block, if they timeout (play with timeout
+ .connect() argument)
+ """
+ # Create a fresh starting database.
+ if os.path.exists(dbname):
+ os.remove(dbname)
+ journal = dbname+"-journal"
+ if os.path.exists(journal):
+ os.remove(journal)
+ cx = sqlite.connect(dbname)
+ cu = cx.cursor()
+ cu.execute("CREATE TABLE meta (name STRING, value STRING)")
+ cx.commit()
+ cu.close()
+ cx.close()
+
+ modifier = Modifier(dbname)
+ readerNames = ("foo",) #XXX "bar", "baz")
+ readers = [Reader(name, dbname) for name in readerNames]
+ modifier.start()
+ for reader in readers:
+ reader.start()
+ modifier.join()
+ for reader in readers:
+ reader.join()
+
+
+if __name__ == "__main__":
+ test_sqlite_busy()
diff --git a/port/strsep.c b/port/strsep.c
new file mode 100644
index 0000000..b027589
--- /dev/null
+++ b/port/strsep.c
@@ -0,0 +1,73 @@
+/*-
+ * Copyright (c) 1990, 1993
+ * The Regents of the University of California. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. All advertising materials mentioning features or use of this software
+ * must display the following acknowledgement:
+ * This product includes software developed by the University of
+ * California, Berkeley and its contributors.
+ * 4. Neither the name of the University nor the names of its contributors
+ * may be used to endorse or promote products derived from this software
+ * without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+#include <string.h>
+#include <stdio.h>
+
+/*
+ * Get next token from string *stringp, where tokens are possibly-empty
+ * strings separated by characters from delim.
+ *
+ * Writes NULs into the string at *stringp to end tokens.
+ * delim need not remain constant from call to call.
+ * On return, *stringp points past the last NUL written (if there might
+ * be further tokens), or is NULL (if there are definitely no more tokens).
+ *
+ * If *stringp is NULL, strsep returns NULL.
+ */
+char *
+pysqlite_strsep(char** stringp, const char* delim)
+{
+ char *s;
+ const char *spanp;
+ int c, sc;
+ char *tok;
+
+ if ((s = *stringp) == NULL)
+ return (NULL);
+ for (tok = s;;) {
+ c = *s++;
+ spanp = delim;
+ do {
+ if ((sc = *spanp++) == c) {
+ if (c == 0)
+ s = NULL;
+ else
+ s[-1] = 0;
+ *stringp = s;
+ return (tok);
+ }
+ } while (sc != 0);
+ }
+ /* NOTREACHED */
+}
diff --git a/port/strsep.h b/port/strsep.h
new file mode 100644
index 0000000..a6dd640
--- /dev/null
+++ b/port/strsep.h
@@ -0,0 +1 @@
+char* pysqlite_strsep(char** stringp, const char* delim);
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..8e304cf
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+
+import os, sys
+from distutils.core import setup
+from distutils.extension import Extension
+
+__version__ = "1.1.8"
+
+sqlite = "sqlite3"
+sources = ["_sqlite.c", "encode.c", "port/strsep.c"]
+macros = []
+
+if sys.platform in ("linux-i386", "linux2"): # most Linux
+ include_dirs = ['/usr/include/sqlite']
+ library_dirs = []
+ libraries = [sqlite]
+ runtime_library_dirs = []
+ extra_objects = []
+elif sys.platform in ("freebsd4", "freebsd5", "openbsd2", "cygwin", "darwin"):
+ if sys.platform == "darwin":
+ LOCALBASE = os.environ.get("LOCALBASE", "/opt/local")
+ else:
+ LOCALBASE = os.environ.get("LOCALBASE", "/usr/local")
+ include_dirs = ['%s/include' % LOCALBASE]
+ library_dirs = ['%s/lib/' % LOCALBASE]
+ libraries = [sqlite]
+ runtime_library_dirs = []
+ extra_objects = []
+elif sys.platform == "win32":
+ include_dirs = [r'..\sqlite']
+ library_dirs = [r'..\sqlite']
+ libraries = [sqlite]
+ runtime_library_dirs = []
+ extra_objects = []
+elif os.name == "posix": # most Unixish platforms
+ include_dirs = ['/usr/local/include']
+ library_dirs = ['/usr/local/lib']
+ libraries = [sqlite]
+ # On some platorms, this can be used to find the shared libraries
+ # at runtime, if they are in a non-standard location. Doesn't
+ # work for Linux gcc.
+ ## runtime_library_dirs = library_dirs
+ runtime_library_dirs = []
+ # This can be used on Linux to force use of static sqlite lib
+ ## extra_objects = ['/usr/lib/sqlite/libsqlite.a']
+ extra_objects = []
+else:
+ raise "UnknownPlatform", "sys.platform=%s, os.name=%s" % \
+ (sys.platform, os.name)
+
+long_description = \
+"""Python interface to SQLite
+
+pysqlite is an interface to the SQLite database server for Python. It aims to be
+fully compliant with Python database API version 2.0 while also exploiting the
+unique features of SQLite.
+
+"""
+
+def main():
+ py_modules = ["sqlite.main"]
+
+ # patch distutils if it can't cope with the "classifiers" keyword
+ if sys.version < '2.2.3':
+ from distutils.dist import DistributionMetadata
+ DistributionMetadata.classifiers = None
+ DistributionMetadata.download_url = None
+
+ setup ( # Distribution meta-data
+ name = "pysqlite",
+ version = __version__,
+ description = "An interface to SQLite",
+ long_description=long_description,
+ author = "PySQLite developers",
+ author_email = "pysqlite-devel@lists.sourceforge.net",
+ license = "Python license",
+ platforms = "ALL",
+ url = "http://pysqlite.sourceforge.net/",
+
+ # Description of the modules and packages in the distribution
+ py_modules = py_modules,
+
+ ext_modules = [Extension( name='_sqlite',
+ sources=sources,
+ include_dirs=include_dirs,
+ library_dirs=library_dirs,
+ runtime_library_dirs=runtime_library_dirs,
+ libraries=libraries,
+ extra_objects=extra_objects,
+ define_macros=macros
+ )],
+ classifiers = [
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: MIT License",
+ "Operating System :: Microsoft :: Windows :: Windows NT/2000",
+ "Operating System :: POSIX",
+ "Programming Language :: C",
+ "Programming Language :: Python",
+ "Topic :: Database :: Database Engines/Servers",
+ "Topic :: Database :: Front-Ends"]
+ )
+
+if __name__ == "__main__":
+ main()
diff --git a/sqlite/__init__.py b/sqlite/__init__.py
new file mode 100644
index 0000000..039322e
--- /dev/null
+++ b/sqlite/__init__.py
@@ -0,0 +1,72 @@
+import _sqlite
+
+"""Python interface to the SQLite embedded database engine."""
+
+#-------------------------------------------------------------------------------
+# Module Information
+#-------------------------------------------------------------------------------
+
+__revision__ = """$Revision: 1.22 $"""[11:-2]
+
+threadsafety = 1
+apilevel = "2.0"
+paramstyle = "pyformat"
+
+# This is the version string for the current PySQLite version.
+version = "1.1.8"
+
+# This is a tuple with the same digits as the version string, but it's
+# suitable for comparisons of various versions.
+version_info = (1, 1, 8)
+
+#-------------------------------------------------------------------------------
+# Data type support
+#-------------------------------------------------------------------------------
+
+from main import DBAPITypeObject, Cursor, Connection, PgResultSet
+
+STRING = DBAPITypeObject(_sqlite.STRING)
+
+BINARY = DBAPITypeObject(_sqlite.BINARY)
+
+INT = DBAPITypeObject(_sqlite.INTEGER)
+
+NUMBER = DBAPITypeObject(_sqlite.INTEGER,
+ _sqlite.FLOAT)
+
+DATE = DBAPITypeObject(_sqlite.DATE)
+
+TIME = DBAPITypeObject(_sqlite.TIME)
+
+TIMESTAMP = DBAPITypeObject(_sqlite.TIMESTAMP)
+
+ROWID = DBAPITypeObject()
+
+# Nonstandard extension:
+UNICODESTRING = DBAPITypeObject(_sqlite.UNICODESTRING)
+
+#-------------------------------------------------------------------------------
+# Exceptions
+#-------------------------------------------------------------------------------
+
+from _sqlite import Warning, Error, InterfaceError, \
+ DatabaseError, DataError, OperationalError, IntegrityError, InternalError, \
+ ProgrammingError, NotSupportedError
+
+#-------------------------------------------------------------------------------
+# Global Functions
+#-------------------------------------------------------------------------------
+
+def connect(*args, **kwargs):
+ return Connection(*args, **kwargs)
+
+from _sqlite import encode, decode
+
+Binary = encode
+
+__all__ = ['connect','IntegrityError', 'InterfaceError', 'InternalError',
+ 'NotSupportedError', 'OperationalError',
+ 'ProgrammingError', 'Warning',
+ 'Connection', 'Cursor', 'PgResultSet',
+ 'apilevel', 'paramstyle', 'threadsafety', 'version', 'version_info',
+ 'Binary', 'decode']
diff --git a/sqlite/main.py b/sqlite/main.py
new file mode 100644
index 0000000..6d19ad2
--- /dev/null
+++ b/sqlite/main.py
@@ -0,0 +1,587 @@
+from __future__ import nested_scopes
+import _sqlite
+
+import copy, new, sys, time, weakref
+from types import *
+
+try:
+ from mx import DateTime
+ have_datetime = 1
+except ImportError:
+ have_datetime = 0
+
+if have_datetime:
+ # Make the required Date/Time constructor visable in the PySQLite module.
+ Date = DateTime.Date
+ Time = DateTime.Time
+ Timestamp = DateTime.Timestamp
+ DateFromTicks = DateTime.DateFromTicks
+ TimeFromTicks = DateTime.TimeFromTicks
+ TimestampFromTicks = DateTime.TimestampFromTicks
+
+ # And also the DateTime types
+ DateTimeType = DateTime.DateTimeType
+ DateTimeDeltaType = DateTime.DateTimeDeltaType
+
+class DBAPITypeObject:
+ def __init__(self,*values):
+ self.values = values
+
+ def __cmp__(self,other):
+ if other in self.values:
+ return 0
+ if other < self.values:
+ return 1
+ else:
+ return -1
+
+def _quote(value):
+ """_quote(value) -> string
+
+ This function transforms the Python value into a string suitable to send to
+ the SQLite database in a SQL statement. This function is automatically
+ applied to all parameters sent with an execute() call. Because of this a
+ SQL statement string in an execute() call should only use '%s' [or
+ '%(name)s'] for variable substitution without any quoting."""
+
+ if value is None:
+ return 'NULL'
+ elif type(value) in (IntType, LongType, FloatType):
+ return value
+ elif isinstance(value, StringType):
+ return "'%s'" % value.replace("'", "''")
+ elif hasattr(value, '__quote__'):
+ return value.__quote__()
+ elif hasattr(value, '_quote'):
+ return value._quote()
+ elif have_datetime and type(value) in \
+ (DateTime.DateTimeType, DateTime.DateTimeDeltaType):
+ return "'%s'" % value
+ else:
+ return repr(value)
+
+def _quoteall(vdict):
+ """_quoteall(vdict)->dict
+ Quotes all elements in a list or dictionary to make them suitable for
+ insertion in a SQL statement."""
+
+ if type(vdict) is DictType or isinstance(vdict, PgResultSet):
+ t = {}
+ for k, v in vdict.items():
+ t[k]=_quote(v)
+ elif isinstance(vdict, StringType) or isinstance(vdict, UnicodeType):
+ # Note: a string is a SequenceType, but is treated as a single
+ # entity, not a sequence of characters.
+ t = (_quote(vdict), )
+ elif type(vdict)in (ListType, TupleType):
+ t = tuple(map(_quote, vdict))
+ else:
+ raise TypeError, \
+ "argument to _quoteall must be a sequence or dictionary!"
+
+ return t
+
+class PgResultSet:
+ """A DB-API query result set for a single row.
+ This class emulates a sequence with the added feature of being able to
+ reference a column as attribute or with dictionary access in addition to a
+ zero-based numeric index."""
+
+ def __init__(self, value):
+ self.__dict__['baseObj'] = value
+
+ def __getattr__(self, key):
+ key = key.upper()
+ if self._xlatkey.has_key(key):
+ return self.baseObj[self._xlatkey[key]]
+ raise AttributeError, key
+
+ def __len__(self):
+ return len(self.baseObj)
+
+ def __getitem__(self, key):
+ if isinstance(key, StringType):
+ key = self.__class__._xlatkey[key.upper()]
+ return self.baseObj[key]
+
+ def __contains__(self, key):
+ return self.has_key(key)
+
+ def __getslice__(self, i, j):
+ klass = make_PgResultSetClass(self._desc_[i:j])
+ obj = klass(self.baseObj[i:j])
+ return obj
+
+ def __repr__(self):
+ return repr(self.baseObj)
+
+ def __str__(self):
+ return str(self.baseObj)
+
+ def __cmp__(self, other):
+ return cmp(self.baseObj, other)
+
+ def description(self):
+ return self._desc_
+
+ def keys(self):
+ _k = []
+ for _i in self._desc_:
+ _k.append(_i[0])
+ return _k
+
+ def values(self):
+ return self.baseObj[:]
+
+ def items(self):
+ _items = []
+ for i in range(len(self.baseObj)):
+ _items.append((self._desc_[i][0], self.baseObj[i]))
+
+ return _items
+
+ def has_key(self, key):
+ return self._xlatkey.has_key(key.upper())
+
+ def get(self, key, defaultval=None):
+ if self.has_key(key):
+ return self[key]
+ else:
+ return defaultval
+
+def make_PgResultSetClass(description):
+ NewClass = new.classobj("PgResultSetConcreteClass", (PgResultSet,), {})
+ NewClass.__dict__['_desc_'] = description
+
+ NewClass.__dict__['_xlatkey'] = {}
+
+ for _i in range(len(description)):
+ NewClass.__dict__['_xlatkey'][description[_i][0].upper()] = _i
+
+ return NewClass
+
+class Cursor:
+ """Abstract cursor class implementing what all cursor classes have in
+ common."""
+
+ def __init__(self, conn, rowclass=PgResultSet):
+ self.arraysize = 1
+
+ # Add ourselves to the list of cursors for our owning connection.
+ self.con = weakref.proxy(conn)
+ self.con.cursors[id(self)] = self
+
+ self.rowclass = rowclass
+
+ self._reset()
+ self.current_recnum = -1
+
+ def _reset(self):
+ # closed is a trinary variable:
+ # == None => Cursor has not been opened.
+ # == 0 => Cursor is open.
+ # == 1 => Cursor is closed.
+ self.closed = None
+ self.rowcount = -1
+ self._real_rowcount = 0
+ self.description = None
+ self.rs = None
+ self.current_recnum = 0
+
+ def _checkNotClosed(self, methodname=None):
+ if self.closed:
+ raise _sqlite.ProgrammingError, \
+ "%s failed - the cursor is closed." % (methodname or "")
+
+ def _unicodeConvert(self, obj):
+ """Encode all unicode strings that can be found in obj into
+ byte-strings using the encoding specified in the connection's
+ constructor, available here as self.con.encoding."""
+
+ if isinstance(obj, StringType):
+ return obj
+ elif isinstance(obj, UnicodeType):
+ return obj.encode(*self.con.encoding)
+ elif isinstance(obj, ListType) or isinstance(obj, TupleType):
+ converted_obj = []
+ for item in obj:
+ if isinstance(item, UnicodeType):
+ converted_obj.append(item.encode(*self.con.encoding))
+ else:
+ converted_obj.append(item)
+ return converted_obj
+ elif isinstance(obj, DictType):
+ converted_obj = {}
+ for k, v in obj.items():
+ if isinstance(v, UnicodeType):
+ converted_obj[k] = v.encode(*self.con.encoding)
+ else:
+ converted_obj[k] = v
+ return converted_obj
+ elif isinstance(obj, PgResultSet):
+ obj = copy.copy(obj)
+ for k, v in obj.items():
+ if isinstance(v, UnicodeType):
+ obj[k] = v.encode(*self.con.encoding)
+ return obj
+ else:
+ return obj
+
+ def execute(self, SQL, *parms):
+ self._checkNotClosed("execute")
+
+ if self.con.autocommit:
+ pass
+ else:
+ if not(self.con.inTransaction or SQL.lstrip()[:6].upper() in ("SELECT","VACUUM","DETACH")):
+ self.con._begin()
+ self.con.inTransaction = 1
+
+ SQL = self._unicodeConvert(SQL)
+
+ if len(parms) == 0:
+ # If there are no paramters, just execute the query.
+ self.rs = self.con.db.execute(SQL)
+ else:
+ if len(parms) == 1 and \
+ (type(parms[0]) in (DictType, ListType, TupleType) or \
+ isinstance(parms[0], PgResultSet)):
+ parms = (self._unicodeConvert(parms[0]),)
+ parms = _quoteall(parms[0])
+ else:
+ parms = self._unicodeConvert(parms)
+ parms = tuple(map(_quote, parms))
+
+ self.rs = self.con.db.execute(SQL % parms)
+
+ self.closed = 0
+ self.current_recnum = 0
+
+ self.rowcount, self._real_rowcount = [len(self.rs.row_list)] * 2
+ if self.rowcount == 0 and \
+ SQL.lstrip()[:6].upper() in ("INSERT", "UPDATE", "DELETE"):
+ self.rowcount = self.con.db.sqlite_changes()
+
+ self.description = self.rs.col_defs
+
+ if issubclass(self.rowclass, PgResultSet):
+ self.rowclass = make_PgResultSetClass(self.description[:])
+
+ def executemany(self, query, parm_sequence):
+ self._checkNotClosed("executemany")
+
+ if self.con is None:
+ raise _sqlite.ProgrammingError, "connection is closed."
+
+ for _i in parm_sequence:
+ self.execute(query, _i)
+
+ def close(self):
+ if self.con and self.con.closed:
+ raise _sqlite.ProgrammingError, \
+ "This cursor's connection is already closed."
+ if self.closed:
+ raise _sqlite.ProgrammingError, \
+ "This cursor is already closed."
+ self.closed = 1
+
+ # Disassociate ourselves from our connection.
+ try:
+ cursors = self.con.cursors
+ del cursors.data[id(self)]
+ except:
+ pass
+
+ def __del__(self):
+ # Disassociate ourselves from our connection.
+ try:
+ cursors = self.con.cursors
+ del cursors.data[id(self)]
+ except:
+ pass
+
+ def setinputsizes(self, sizes):
+ """Does nothing, required by DB API."""
+ self._checkNotClosed("setinputsize")
+
+ def setoutputsize(self, size, column=None):
+ """Does nothing, required by DB API."""
+ self._checkNotClosed("setinputsize")
+
+ #
+ # DB-API methods:
+ #
+
+ def fetchone(self):
+ self._checkNotClosed("fetchone")
+
+ # If there are no records
+ if self._real_rowcount == 0:
+ return None
+
+ # If we have reached the last record
+ if self.current_recnum >= self._real_rowcount:
+ return None
+
+ if type(self.rowclass) is TupleType:
+ retval = self.rs.row_list[self.current_recnum]
+ else:
+ retval = self.rowclass(self.rs.row_list[self.current_recnum])
+ self.current_recnum += 1
+
+ return retval
+
+ def fetchmany(self, howmany=None):
+ self._checkNotClosed("fetchmany")
+
+ if howmany is None:
+ howmany = self.arraysize
+
+ # If there are no records
+ if self._real_rowcount == 0:
+ return []
+
+ # If we have reached the last record
+ if self.current_recnum >= self._real_rowcount:
+ return []
+
+ if type(self.rowclass) is TupleType:
+ retval = self.rs.row_list[self.current_recnum:self.current_recnum + howmany]
+ else:
+ retval = [self.rowclass(row) for row in self.rs.row_list[self.current_recnum:self.current_recnum + howmany]]
+
+ self.current_recnum += howmany
+ if self.current_recnum > self._real_rowcount:
+ self.current_recnum = self._real_rowcount
+
+ return retval
+
+ def fetchall(self):
+ self._checkNotClosed("fetchall")
+
+ # If there are no records
+ if self._real_rowcount == 0:
+ return []
+
+ # If we have reached the last record
+ if self.current_recnum >= self._real_rowcount:
+ return []
+
+ if type(self.rowclass) is TupleType:
+ retval = self.rs.row_list[self.current_recnum:]
+ else:
+ retval = [self.rowclass(row) for row in self.rs.row_list[self.current_recnum:]]
+
+ self.current_recnum =self._real_rowcount
+
+ return retval
+
+ #
+ # Optional DB-API extensions from PEP 0249:
+ #
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ item = self.fetchone()
+ if item is None:
+ if sys.version_info[:2] >= (2,2):
+ raise StopIteration
+ else:
+ raise IndexError
+ else:
+ return item
+
+ def scroll(self, value, mode="relative"):
+ if mode == "relative":
+ new_recnum = self.current_recnum + value
+ elif mode == "absolute":
+ new_recnum = value
+ else:
+ raise ValueError, "invalid mode parameter"
+ if new_recnum >= 0 and new_recnum < self.rowcount:
+ self.current_recnum = new_recnum
+ else:
+ raise IndexError
+
+ def __getattr__(self, key):
+ if self.__dict__.has_key(key):
+ return self.__dict__[key]
+ elif key == "sql":
+ # The sql attribute is a PySQLite extension.
+ return self.con.db.sql
+ elif key == "rownumber":
+ return self.current_recnum
+ elif key == "lastrowid":
+ return self.con.db.sqlite_last_insert_rowid()
+ elif key == "connection":
+ return self.con
+ else:
+ raise AttributeError, key
+
+class UnicodeConverter:
+ def __init__(self, encoding):
+ self.encoding = encoding
+
+ def __call__(self, val):
+ return unicode(val, *self.encoding)
+
+class Connection:
+
+ def __init__(self, database=None, mode=0755, converters={}, autocommit=0, encoding=None, timeout=None, command_logfile=None, *arg, **kwargs):
+ # Old parameter names, for backwards compatibility
+ database = database or kwargs.get("db")
+ encoding = encoding or kwargs.get("client_encoding")
+
+ # Set these here, to prevent an attribute access error in __del__
+ # in case the connect fails.
+ self.closed = 0
+ self.db = None
+ self.inTransaction = 0
+ self.autocommit = autocommit
+ self.cursors = weakref.WeakValueDictionary()
+ self.rowclass = PgResultSet
+
+ self.db = _sqlite.connect(database, mode)
+
+ if type(encoding) not in (TupleType, ListType):
+ self.encoding = (encoding or sys.getdefaultencoding(),)
+ else:
+ self.encoding = encoding
+
+ register = self.db.register_converter
+ # These are the converters we provide by default ...
+ register("str", str)
+ register("int", int)
+ register("long", long)
+ register("float", float)
+ register("unicode", UnicodeConverter(self.encoding))
+ register("binary", _sqlite.decode)
+
+ # ... and DateTime/DateTimeDelta, if we have the mx.DateTime module.
+ if have_datetime:
+ register("date", DateTime.DateFrom)
+ register("time", DateTime.TimeFrom)
+ register("timestamp", DateTime.DateTimeFrom)
+ register("interval", DateTime.DateTimeDeltaFrom)
+
+ for typename, conv in converters.items():
+ register(typename, conv)
+
+ if timeout is not None:
+ def busy_handler(timeout, table, count):
+ if count == 1:
+ busy_handler.starttime = time.time()
+ elapsed_time = time.time() - busy_handler.starttime
+ print elapsed_time, timeout
+ if elapsed_time > timeout:
+ return 0
+ else:
+ time_to_sleep = 0.01 * (2 << min(5, count))
+ time.sleep(time_to_sleep)
+ return 1
+
+ self.db.sqlite_busy_handler(busy_handler, timeout/1000.0)
+
+ self.db.set_command_logfile(command_logfile)
+
+ def __del__(self):
+ if not self.closed:
+ self.close()
+
+ def _checkNotClosed(self, methodname):
+ if self.closed:
+ raise _sqlite.ProgrammingError, \
+ "%s failed - Connection is closed." % methodname
+
+ def __anyCursorsLeft(self):
+ return len(self.cursors.data.keys()) > 0
+
+ def __closeCursors(self, doclose=0):
+ """__closeCursors() - closes all cursors associated with this connection"""
+ if self.__anyCursorsLeft():
+ cursors = map(lambda x: x(), self.cursors.data.values())
+
+ for cursor in cursors:
+ try:
+ if doclose:
+ cursor.close()
+ else:
+ cursor._reset()
+ except weakref.ReferenceError:
+ pass
+
+ def _begin(self):
+ self.db.execute("BEGIN")
+ self.inTransaction = 1
+
+ #
+ # PySQLite extensions:
+ #
+
+ def create_function(self, name, nargs, func):
+ self.db.create_function(name, nargs, func)
+
+ def create_aggregate(self, name, nargs, agg_class):
+ self.db.create_aggregate(name, nargs, agg_class)
+
+ #
+ # DB-API methods:
+ #
+
+ def commit(self):
+ self._checkNotClosed("commit")
+ if self.autocommit:
+ # Ignore .commit(), according to the DB-API spec.
+ return
+
+ if self.inTransaction:
+ self.db.execute("COMMIT")
+ self.inTransaction = 0
+
+ def rollback(self):
+ self._checkNotClosed("rollback")
+ if self.autocommit:
+ raise _sqlite.ProgrammingError, "Rollback failed - autocommit is on."
+
+ if self.inTransaction:
+ self.db.execute("ROLLBACK")
+ self.inTransaction = 0
+
+ def close(self):
+ self._checkNotClosed("close")
+
+ self.__closeCursors(1)
+
+ if self.inTransaction:
+ self.rollback()
+
+ if self.db:
+ self.db.close()
+ self.closed = 1
+
+ def cursor(self):
+ self._checkNotClosed("cursor")
+ return Cursor(self, self.rowclass)
+
+ #
+ # Optional DB-API extensions from PEP 0249:
+ #
+
+ def __getattr__(self, key):
+ if key in self.__dict__.keys():
+ return self.__dict__[key]
+ elif key in ('IntegrityError', 'InterfaceError', 'InternalError',
+ 'NotSupportedError', 'OperationalError',
+ 'ProgrammingError', 'Warning'):
+ return getattr(_sqlite, key)
+ else:
+ raise AttributeError, key
+
+ #
+ # MySQLdb compatibility stuff
+ #
+
+ def insert_id(self):
+ return self.db.sqlite_last_insert_rowid()
diff --git a/test/all_tests.py b/test/all_tests.py
new file mode 100644
index 0000000..df1a03d
--- /dev/null
+++ b/test/all_tests.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+"""
+This combines all PySQLite test suites into one big one.
+"""
+
+import unittest, sys
+import api_tests, logging_tests, lowlevel_tests, pgresultset_tests, type_tests
+import userfunction_tests, transaction_tests
+
+def suite():
+ suite = unittest.TestSuite((lowlevel_tests.suite(), api_tests.suite(),
+ type_tests.suite(), userfunction_tests.suite(),
+ transaction_tests.suite(), pgresultset_tests.suite(),
+ logging_tests.suite()))
+
+ return suite
+
+def main():
+ runner = unittest.TextTestRunner()
+ runner.run(suite())
+
+if __name__ == "__main__":
+ main()
diff --git a/test/api_tests.py b/test/api_tests.py
new file mode 100644
index 0000000..78f7c7f
--- /dev/null
+++ b/test/api_tests.py
@@ -0,0 +1,524 @@
+#!/usr/bin/env python
+import testsupport
+import os, string, sys, types, unittest, weakref
+import sqlite
+
+class DBAPICompliance(unittest.TestCase):
+ def CheckAPILevel(self):
+ self.assertEqual(sqlite.apilevel, '2.0',
+ 'apilevel is %s, should be 2.0' % sqlite.apilevel)
+
+ def CheckThreadSafety(self):
+ self.assertEqual(sqlite.threadsafety, 1,
+ 'threadsafety is %d, should be 1' % sqlite.threadsafety)
+
+ def CheckParamStyle(self):
+ self.assertEqual(sqlite.paramstyle, 'pyformat',
+ 'paramstyle is "%s", should be "pyformat"' %
+ sqlite.paramstyle)
+
+ def CheckWarning(self):
+ self.assert_(issubclass(sqlite.Warning, StandardError),
+ 'Warning is not a subclass of StandardError')
+
+ def CheckError(self):
+ self.failUnless(issubclass(sqlite.Error, StandardError),
+ 'Error is not a subclass of StandardError')
+
+ def CheckInterfaceError(self):
+ self.failUnless(issubclass(sqlite.InterfaceError, sqlite.Error),
+ 'InterfaceError is not a subclass of Error')
+
+ def CheckDatabaseError(self):
+ self.failUnless(issubclass(sqlite.DatabaseError, sqlite.Error),
+ 'DatabaseError is not a subclass of Error')
+
+ def CheckDataError(self):
+ self.failUnless(issubclass(sqlite.DataError, sqlite.DatabaseError),
+ 'DataError is not a subclass of DatabaseError')
+
+ def CheckOperationalError(self):
+ self.failUnless(issubclass(sqlite.OperationalError, sqlite.DatabaseError),
+ 'OperationalError is not a subclass of DatabaseError')
+
+ def CheckIntegrityError(self):
+ self.failUnless(issubclass(sqlite.IntegrityError, sqlite.DatabaseError),
+ 'IntegrityError is not a subclass of DatabaseError')
+
+ def CheckInternalError(self):
+ self.failUnless(issubclass(sqlite.InternalError, sqlite.DatabaseError),
+ 'InternalError is not a subclass of DatabaseError')
+
+ def CheckProgrammingError(self):
+ self.failUnless(issubclass(sqlite.ProgrammingError, sqlite.DatabaseError),
+ 'ProgrammingError is not a subclass of DatabaseError')
+
+ def CheckNotSupportedError(self):
+ self.failUnless(issubclass(sqlite.NotSupportedError,
+ sqlite.DatabaseError),
+ 'NotSupportedError is not a subclass of DatabaseError')
+
+class moduleTestCases(unittest.TestCase, testsupport.TestSupport):
+ def setUp(self):
+ self.filename = self.getfilename()
+ self.cnx = sqlite.connect(self.filename)
+ self.cur = self.cnx.cursor()
+
+ def tearDown(self):
+ try:
+ self.cnx.close()
+ self.removefile()
+ except AttributeError:
+ pass
+ except sqlite.ProgrammingError:
+ pass
+
+ def CheckConnectionObject(self):
+ self.assert_(isinstance(self.cnx, sqlite.Connection),
+ 'sqlite.connect did not return a Connection object')
+
+ def CheckConnectionClose(self):
+ self.assert_(hasattr(self.cnx, 'close') and
+ type(self.cnx.close) == types.MethodType,
+ 'close is not a method of Connection')
+ self.cnx.close()
+ self.removefile()
+ self.failUnlessRaises(sqlite.ProgrammingError, self.cnx.close)
+
+ def CheckConnectionCommit(self):
+ self.assert_(hasattr(self.cnx, "commit") and
+ type(self.cnx.commit) == types.MethodType,
+ 'commit is not a method of Connection')
+ self.cnx.close()
+ self.removefile()
+ self.failUnlessRaises(sqlite.ProgrammingError, self.cnx.commit)
+
+ def CheckConnectionRollback(self):
+ self.assert_(hasattr(self.cnx, "rollback") and
+ type(self.cnx.rollback) == types.MethodType,
+ 'rollback is not a method of Connection')
+ self.cnx.close()
+ self.removefile()
+ self.failUnlessRaises(sqlite.ProgrammingError, self.cnx.rollback)
+
+ def CheckConnectionCursor(self):
+ self.assert_(hasattr(self.cnx, "cursor") and
+ type(self.cnx.cursor) == types.MethodType,
+ 'cursor is not a method of Connection')
+ self.cnx.close()
+ self.removefile()
+ self.failUnlessRaises(sqlite.ProgrammingError, self.cnx.cursor)
+
+ def CheckCloseConnection(self):
+ self.cnx.close()
+ self.removefile()
+
+ def CheckCursorObject(self):
+ self.assert_(isinstance(self.cur, sqlite.Cursor),
+ 'cnx.cursor() did not return a Cursor instance')
+
+ def CheckCursorArraysize(self):
+ self.assert_(self.cur.arraysize == 1,
+ 'cur.arraysize is %d, it should be 1' %
+ self.cur.arraysize)
+
+ def CheckCursorDescription(self):
+ self.assert_(self.cur.description == None,
+ "cur.description should be None at this point, it isn't.")
+
+ def CheckCursorDescriptionNoRow(self):
+ """
+ cursor.description should at least provide the column name(s), even if
+ no row returned.
+ """
+ self.cur.execute("create table test(a, b)")
+ self.cur.execute("select a, b from test")
+ self.assert_(self.cur.description[0][0] == "a")
+ self.assert_(self.cur.description[1][0] == "b")
+
+ def CheckCursorRowcount(self):
+ self.assert_(self.cur.rowcount == -1,
+ 'cur.rowcount is %d, should be -1' % self.cur.rowcount)
+
+ def CheckCursorClose(self):
+ self.assert_(hasattr(self.cur, "close") and
+ type(self.cur.close) == types.MethodType,
+ 'close is not a method of the Cursor object')
+ self.cur.close()
+ self.failUnlessRaises(sqlite.ProgrammingError, self.cur.close)
+
+ def CheckCursorExecute(self):
+ self.assert_(hasattr(self.cur, "execute") and
+ type(self.cur.execute) == types.MethodType,
+ 'execute is not a method of the Cursor object')
+ self.cur.close()
+ self.failUnlessRaises(sqlite.ProgrammingError,
+ self.cur.execute, 'SELECT max(3,4)')
+
+ def CheckCursorExecutemany(self):
+ self.assert_(hasattr(self.cur, "executemany") and
+ type(self.cur.executemany) == types.MethodType,
+ 'executemany is not a method of the Cursor object')
+
+ self.cur.close()
+ self.failUnlessRaises(sqlite.ProgrammingError,
+ self.cur.executemany, 'SELECT max(3,4)', [1,2])
+
+ def CheckCursorFetchone(self):
+ self.assert_(hasattr(self.cur, "fetchone") and
+ type(self.cur.fetchone) == types.MethodType,
+ 'fetchone is not a method of the Cursor object')
+ self.cur.close()
+ self.failUnlessRaises(sqlite.ProgrammingError, self.cur.fetchone)
+
+ def CheckCursorFetchMany(self):
+ self.failUnless(hasattr(self.cur, "fetchmany") and
+ type(self.cur.fetchmany) == types.MethodType,
+ 'fetchmany is not a method of the Cursor object')
+
+ cursor = self.cnx.cursor()
+ cursor.execute("create table test(id int)")
+ cursor.executemany("insert into test(id) values (%s)", range(10))
+ cursor.execute("select id from test")
+ res = cursor.fetchmany()
+ self.failUnlessEqual(len(res), 1, """fetchmany should have returned a
+ list of length 1, but the list was %i elements long""" % len(res))
+ res = cursor.fetchmany(2)
+ self.failUnlessEqual(len(res), 2, """fetchmany should have returned a
+ list of length 2, but the list was %i elements long""" % len(res))
+ cursor.arraysize = 5
+ res = cursor.fetchmany()
+ self.failUnlessEqual(len(res), 5, """fetchmany should have returned a
+ list of length 5, but the list was %i elements long""" % len(res))
+
+ self.cur.close()
+ self.failUnlessRaises(sqlite.ProgrammingError,
+ self.cur.fetchmany, 10)
+
+ def CheckCursorFetchall(self):
+ self.failUnless(hasattr(self.cur, "fetchall") and
+ type(self.cur.fetchall) == types.MethodType,
+ 'fetchall is not a method of the Cursor object')
+ self.cur.close()
+ self.failUnlessRaises(sqlite.ProgrammingError,
+ self.cur.fetchall)
+
+ def CheckCursorSetoutputsize(self):
+ self.failUnless(hasattr(self.cur, "setoutputsize") and
+ type(self.cur.setoutputsize) == types.MethodType,
+ 'setoutputsize is not a method of the Cursor object')
+ self.cur.close()
+ self.failUnlessRaises(sqlite.ProgrammingError,
+ self.cur.setoutputsize, 1024)
+
+ def CheckCursorSetinputsizes(self):
+ self.failUnless(hasattr(self.cur, "setinputsizes") and
+ type(self.cur.setinputsizes) == types.MethodType,
+ 'setinputsizes is not a method of the Cursor object')
+ self.cur.close()
+ self.failUnlessRaises(sqlite.ProgrammingError,
+ self.cur.setinputsizes, [1, 2, 3])
+
+ def CheckExecuteWithSingleton(self):
+ """Test execute() with a singleton string as the parameter."""
+ try:
+ self.cur.execute("select max(3,4)")
+ except StandardError, msg:
+ self.fail(msg)
+
+ self.assertEqual(type(self.cur.description), types.TupleType,
+ "cur.description should be a tuple, but isn't.")
+
+ clen = len(self.cur.description)
+ self.assertEqual(clen, 1,
+ "Length of cur.description is %d, it should be %d." %
+ (clen, 1))
+
+
+ self.assertEqual(len(self.cur.description[0]), 7,
+ "Length of cur.description[0] is %d, it should be 7." %
+ len(self.cur.description[0]))
+
+
+ self.failUnless(self.cur.description[0][0] == "max(3,4)" and
+ self.cur.description[0][1] == sqlite.NUMBER and
+ self.cur.description[0][2] == None and
+ self.cur.description[0][3] == None and
+ self.cur.description[0][4] == None and
+ self.cur.description[0][5] == None and
+ self.cur.description[0][6] == None,
+ "cur.description[0] does not match the query.")
+ self.cur.close()
+
+ def CheckExecuteWithTuple(self):
+ """Test execute() with a tuple as the parameter."""
+ try:
+ self.cur.execute("select max(%s, %s)", (4, 5))
+ except StandardError, msg:
+ self.fail(msg)
+
+ # Empty tuple
+ try:
+ self.cur.execute("select 3+4", ())
+ except StandardError, msg:
+ self.fail(msg)
+ self.cur.close()
+
+ def CheckExecuteWithDictionary(self):
+ """Test execute() with a dictionary as the parameter."""
+ try:
+ self.cur.execute("select max(%(n1)s, %(n2)s)", {"n1": 5, "n2": 6})
+ except StandardError, msg:
+ self.fail(msg)
+ self.cur.close()
+
+ def CheckQuotingOfLong(self):
+ """Test wether longs are quoted properly for SQL."""
+ try:
+ self.cur.execute("-- types long")
+ self.cur.execute("select %s + %s as x", (5L, 6L))
+ except StandardError, msg:
+ self.fail(msg)
+ res = self.cur.fetchone()
+ self.failUnlessEqual(res.x, 11L,
+ "The addition of long should have returned %i, returned %i"
+ % (11L, res.x))
+
+ def CheckCursorIterator(self):
+ self.cur.execute("create table test (id, name)")
+ self.cur.executemany("insert into test (id) values (%s)",
+ [(1,), (2,), (3,)])
+ self.cur.execute("-- types int")
+ self.cur.execute("select id from test")
+
+ if sys.version_info[:2] >= (2,2):
+ counter = 0
+ for row in self.cur:
+ if counter == 0:
+ self.failUnlessEqual(row.id, 1,
+ "row.id should have been 1, was %i" % row.id)
+ elif counter == 1:
+ self.failUnlessEqual(row.id, 2,
+ "row.id should have been 2, was %i" % row.id)
+ elif counter == 2:
+ self.failUnlessEqual(row.id, 3,
+ "row.id should have been 3, was %i" % row.id)
+ else:
+ self.fail("Iterated over too many rows.")
+ counter += 1
+ else:
+ # Python 2.1
+ counter = 0
+ try:
+ while 1:
+ row = self.cur.next()
+ if counter == 0:
+ self.failUnlessEqual(row.id, 1,
+ "row.id should have been 1, was %i" % row.id)
+ elif counter == 1:
+ self.failUnlessEqual(row.id, 2,
+ "row.id should have been 2, was %i" % row.id)
+ elif counter == 2:
+ self.failUnlessEqual(row.id, 3,
+ "row.id should have been 3, was %i" % row.id)
+ else:
+ self.fail("Iterated over too many rows.")
+ counter += 1
+ except IndexError:
+ pass
+ self.failUnlessEqual(counter, 3,
+ "Should have iterated over 3 items, was: %i" % counter)
+
+ def CheckCursorScrollAndRownumber(self):
+ self.cur.execute("create table test (id, name)")
+ values = [("foo",)] * 20
+ self.cur.executemany("insert into test (name) values (%s)", values)
+ self.cur.execute("select name from test")
+ self.failUnlessEqual(self.cur.rownumber, 0,
+ "Directly after execute, rownumber must be 0, is: %i"
+ % self.cur.rownumber)
+
+ self.cur.scroll(1, "absolute")
+ self.cur.scroll(5, "absolute")
+ self.failUnlessEqual(self.cur.rownumber, 5,
+ "rownumber should be 5, is: %i"
+ % self.cur.rownumber)
+
+ self.cur.scroll(1, "relative")
+ self.failUnlessEqual(self.cur.rownumber, 6,
+ "rownumber should be 6, is: %i"
+ % self.cur.rownumber)
+
+ self.cur.scroll(-2, "relative")
+ self.failUnlessEqual(self.cur.rownumber, 4,
+ "rownumber should be 4, is: %i"
+ % self.cur.rownumber)
+
+ self.failUnlessRaises(IndexError, self.cur.scroll, -2, "absolute")
+ self.failUnlessRaises(IndexError, self.cur.scroll, 1000, "absolute")
+
+ self.cur.scroll(10, "absolute")
+ self.failUnlessRaises(IndexError, self.cur.scroll, -11, "relative")
+
+ self.cur.scroll(10, "absolute")
+ self.failUnlessRaises(IndexError, self.cur.scroll, 30, "relative")
+
+ def CheckCursorConnection(self):
+ if not isinstance(self.cur.connection, weakref.ProxyType) and \
+ not isinstance(self.cur.connection, weakref.CallableProxyType):
+ fail("cursor.connection doesn't return the correct type")
+
+ def CheckCursorLastRowID(self):
+ self.cur.execute("create table test (id integer primary key, name)")
+
+ self.cur.execute("insert into test(name) values ('foo')")
+ self.failUnlessEqual(self.cur.lastrowid, 1,
+ "lastrowid should be 1, is %i" % self.cur.lastrowid)
+
+ self.cur.execute("insert into test(name) values ('foo')")
+ self.failUnlessEqual(self.cur.lastrowid, 2,
+ "lastrowid should be 2, is %i" % self.cur.lastrowid)
+
+ def CheckResultObject(self):
+ try:
+ self.cur.execute("select max(3,4)")
+ self.assertEqual(self.cur.rowcount, 1,
+ "cur.rowcount is %d, it should be 1." %
+ self.cur.rowcount)
+ self.res = self.cur.fetchall()
+ except StandardError, msg:
+ self.fail(msg)
+
+ self.assertEqual(type(self.res), types.ListType,
+ 'cur.fetchall() did not return a sequence.')
+
+ self.assertEqual(len(self.res), 1,
+ 'Length of the list of results is %d, it should be 1' %
+ len(self.res))
+
+ self.failUnless(isinstance(self.res[0], sqlite.PgResultSet),
+ 'cur.fetchall() did not return a list of PgResultSets.')
+
+ def CheckResultFetchone(self):
+ try:
+ self.cur.execute("select max(3,4)")
+ self.res = self.cur.fetchone()
+ self.assertEqual(self.cur.rowcount, 1,
+ 'cur.rowcount is %d, it should be 1.' %
+ self.cur.rowcount)
+ except StandardError, msg:
+ self.fail(msg)
+
+ self.failUnless(isinstance(self.res, sqlite.PgResultSet),
+ "cur.fetchone() does not return a PgResultSet.")
+
+ try:
+ self.res = self.cur.fetchone()
+ self.assertEqual(self.res, None,
+ "res should be None at this point, but it isn't.")
+ except StandardError, msg:
+ self.fail(msg)
+
+ def CheckRowCountAfterInsert(self):
+ try:
+ self.cur.execute("create table test(a)")
+ self.cur.execute("insert into test(a) values (5)")
+ self.assertEqual(self.cur.rowcount, 1,
+ 'cur.rowcount is %d, it should be 1.' %
+ self.cur.rowcount)
+ except StandardError, msg:
+ self.fail(msg)
+
+ def CheckRowCountAfterUpdate(self):
+ try:
+ self.cur.execute("create table test(a, b)")
+ self.cur.execute("insert into test(a, b) values (1, 2)")
+ self.cur.execute("insert into test(a, b) values (1, 3)")
+ self.cur.execute("insert into test(a, b) values (1, 4)")
+ self.cur.execute("update test set b=1 where a=1")
+ self.assertEqual(self.cur.rowcount, 3,
+ 'cur.rowcount is %d, it should be 3.' %
+ self.cur.rowcount)
+ except StandardError, msg:
+ self.fail(msg)
+
+ def CheckRowCountAfterDelete(self):
+ try:
+ self.cur.execute("create table test(a, b)")
+ self.cur.execute("insert into test(a, b) values (1, 2)")
+ self.cur.execute("insert into test(a, b) values (1, 3)")
+ self.cur.execute("insert into test(a, b) values (2, 4)")
+ self.cur.execute("delete from test where a=1")
+ self.assertEqual(self.cur.rowcount, 2,
+ 'cur.rowcount is %d, it should be 2.' %
+ self.cur.rowcount)
+ except StandardError, msg:
+ self.fail(msg)
+
+ def CheckSelectOfNonPrintableString(self):
+ try:
+ a = '\x01\x02\x03\x04'
+ self.cur.execute('select %s as a', a)
+ r = self.cur.fetchone()
+ self.assertEqual(len(r.a), len(a),
+ "Length of result is %d, it should be %d." %
+ (len(r.a), len(a)))
+ self.failUnless(r.a == a,
+ "Result is '%s', it should be '%s'" % (r.a, a))
+ except StandardError, msg:
+ self.fail(msg)
+
+ def CheckQuotingIntWithPercentS(self):
+ try:
+ self.cur.execute("create table test(a number)")
+ self.cur.execute("insert into test(a) values (%s)", (5,))
+ except StandardError, msg:
+ self.fail(msg)
+
+ def CheckQuotingLongWithPercentS(self):
+ try:
+ self.cur.execute("create table test(a number)")
+ self.cur.execute("insert into test(a) values (%s)", (50000000L,))
+ except StandardError, msg:
+ self.fail(msg)
+
+ def CheckQuotingFloatWithPercentS(self):
+ try:
+ self.cur.execute("create table test(a number)")
+ self.cur.execute("insert into test(a) values (%s)", (-3.24,))
+ except StandardError, msg:
+ self.fail(msg)
+
+ def CheckQuotingIntWithPyQuoting(self):
+ try:
+ self.cur.execute("create table test(a number)")
+ self.cur.execute("insert into test(a) values (%i)", (5,))
+ except StandardError, msg:
+ self.fail(msg)
+
+ def CheckQuotingLongWithPyQuoting(self):
+ try:
+ self.cur.execute("create table test(a number)")
+ self.cur.execute("insert into test(a) values (%i)", (50000000L,))
+ except StandardError, msg:
+ self.fail(msg)
+
+ def CheckQuotingFloatWithPyQuoting(self):
+ try:
+ self.cur.execute("create table test(a number)")
+ self.cur.execute("insert into test(a) values (%f)", (-3.24,))
+ except StandardError, msg:
+ self.fail(msg)
+
+def suite():
+ dbapi_suite = unittest.makeSuite(DBAPICompliance, "Check")
+ module_suite = unittest.makeSuite(moduleTestCases, "Check")
+ test_suite = unittest.TestSuite((dbapi_suite, module_suite))
+ return test_suite
+
+def main():
+ runner = unittest.TextTestRunner()
+ runner.run(suite())
+
+if __name__ == "__main__":
+ main()
diff --git a/test/logging_tests.py b/test/logging_tests.py
new file mode 100644
index 0000000..ed7bf24
--- /dev/null
+++ b/test/logging_tests.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+import testsupport
+import StringIO, unittest
+import sqlite
+
+class LogFileTemplate:
+ def write(self, s):
+ pass
+
+class LogFile:
+ def __init__(self):
+ pass
+
+def init_LogFile():
+ LogFile.write = LogFileTemplate.write
+
+class CommandLoggingTests(unittest.TestCase, testsupport.TestSupport):
+ def tearDown(self):
+ try:
+ self.cnx.close()
+ self.removefile()
+ except AttributeError:
+ pass
+ except sqlite.InterfaceError:
+ pass
+
+ def CheckNoWrite(self):
+ init_LogFile()
+ del LogFile.write
+ logger = LogFile()
+ try:
+ self.cnx = sqlite.connect(self.getfilename(),
+ command_logfile=logger)
+
+ self.fail("ValueError not raised")
+ except ValueError:
+ pass
+
+ def CheckWriteNotCallable(self):
+ logger = LogFile()
+ logger.write = 5
+ try:
+ self.cnx = sqlite.connect(self.getfilename(),
+ command_logfile=logger)
+
+ self.fail("ValueError not raised")
+ except ValueError:
+ pass
+
+ def CheckLoggingWorks(self):
+ logger = StringIO.StringIO()
+
+ expected_output = "\n".join([
+ "BEGIN", "CREATE TABLE TEST(FOO INTEGER)",
+ "INSERT INTO TEST(FOO) VALUES (5)",
+ "ROLLBACK"]) + "\n"
+
+ self.cnx = sqlite.connect(self.getfilename(),
+ command_logfile=logger)
+ cu = self.cnx.cursor()
+ cu.execute("CREATE TABLE TEST(FOO INTEGER)")
+ cu.execute("INSERT INTO TEST(FOO) VALUES (%i)", (5,))
+ self.cnx.rollback()
+
+ logger.seek(0)
+ real_output = logger.read()
+
+ if expected_output != real_output:
+ self.fail("Logging didn't produce expected output.")
+
+def suite():
+ command_logging_suite = unittest.makeSuite(CommandLoggingTests, "Check")
+ return command_logging_suite
+
+def main():
+ runner = unittest.TextTestRunner()
+ runner.run(suite())
+
+if __name__ == "__main__":
+ main()
diff --git a/test/lowlevel_tests.py b/test/lowlevel_tests.py
new file mode 100644
index 0000000..7a1d537
--- /dev/null
+++ b/test/lowlevel_tests.py
@@ -0,0 +1,162 @@
+#!/usr/bin/env python
+"""
+These are the tests for the low-level module _sqlite.
+
+They try to execute as much of the low-level _sqlite module as possible to
+facilitate coverage testing with the help of gcov.
+"""
+
+from __future__ import nested_scopes
+import testsupport
+import os, unittest, re
+import _sqlite
+from sqlite import ProgrammingError
+
+class lowlevelTestCases(unittest.TestCase, testsupport.TestSupport):
+ def setUp(self):
+ self.filename = self.getfilename()
+ self.cnx = _sqlite.connect(self.filename)
+
+ def tearDown(self):
+ try:
+ self.cnx.close()
+ self.removefile()
+ del self.cnx
+ except AttributeError:
+ pass
+ except ProgrammingError:
+ pass
+
+ def CheckModuleAttributeAccess(self):
+ for attr in dir(_sqlite):
+ _sqlite.__dict__[attr]
+
+ def CheckConnectionAttributes(self):
+ self.cnx.filename
+ self.cnx.sql
+ self.cnx.sqlite_changes()
+ self.cnx.sqlite_last_insert_rowid()
+
+ try:
+ self.cnx.foo = 7
+ self.fail("Could set attribute. Connection object should be read-only.")
+ except TypeError:
+ pass
+
+ def CheckSQLiteExec(self):
+ self.cnx.execute("create table test(id int, name varchar(20))")
+ self.cnx.execute("insert into test(id, name) values (1, 'foo')")
+ self.cnx.execute("insert into test(id, name) values (2, 'bar')")
+
+ expected_colnames = ('id', 'name')
+ expected_values = [('1', 'foo'), ('2', 'bar')]
+ failures = []
+
+ def callback(arg1, items, colnames):
+ if colnames != expected_colnames:
+ failures.append("expected colnames %s, got %s"
+ % (repr(expected_colnames), repr(colnames)))
+ if items not in expected_values:
+ failures.append("%s not in expected_values %s"
+ % (repr(items), repr(expected_values)))
+ else:
+ expected_values.pop(0)
+
+ self.cnx.sqlite_exec("select * from test", callback, None)
+ if len(failures) > 0:
+ for failure in failures:
+ self.fail(failure)
+
+ def CheckSQLiteLastInsertRowID(self):
+ self.cnx.execute("create table test(id integer primary key, name varchar(20))")
+ self.cnx.execute("insert into test(id, name) values (NULL, 'foo')")
+ self.cnx.execute("insert into test(id, name) values (NULL, 'bar')")
+ rowid = self.cnx.sqlite_last_insert_rowid()
+ self.failUnlessEqual(rowid, 2,
+ "last inserted rowid should have been %i, was %i"
+ % (2, rowid))
+
+ def CheckSQLiteChanges(self):
+ self.cnx.execute("create table test(id integer primary key, name varchar(20))")
+ self.cnx.execute("insert into test(id, name) values (NULL, 'foo')")
+ self.cnx.execute("insert into test(id, name) values (NULL, 'bar')")
+ self.cnx.execute("insert into test(id, name) values (NULL, 'baz')")
+ self.cnx.execute("delete from test where name='baz'")
+ changed = self.cnx.sqlite_changes()
+ self.failUnlessEqual(changed, 1,
+ "changed rows should have been %i, was %i"
+ % (1, changed))
+ self.cnx.execute("update test set name='foobar' where id < 10")
+ changed = self.cnx.sqlite_changes()
+ self.failUnlessEqual(changed, 2,
+ "changed rows should have been %i, was %i"
+ % (2, changed))
+
+ def CheckConnectionForProgrammingError(self):
+ self.cnx.close()
+ self.removefile()
+
+ self.failUnlessRaises(ProgrammingError, self.cnx.close)
+ self.failUnlessRaises(ProgrammingError, self.cnx.execute, "")
+
+ def CheckConnectionForNumberOfArguments(self):
+ self.failUnlessRaises(TypeError, self.cnx.close, None)
+ self.failUnlessRaises(TypeError, self.cnx.execute, None, None)
+ self.failUnlessRaises(TypeError, self.cnx.sqlite_changes, None)
+ self.failUnlessRaises(TypeError, self.cnx.sqlite_exec, None)
+ self.failUnlessRaises(TypeError, self.cnx.sqlite_last_insert_rowid, None)
+
+ def CheckConnectionDestructor(self):
+ del self.cnx
+ self.removefile()
+
+ def CheckResultObject(self):
+ create_statement = "create table test(id INTEGER, name TEXT)"
+ self.cnx.execute(create_statement)
+
+ self.failUnlessEqual(create_statement, self.cnx.sql,
+ ".sql should have been %s, was %s" % (create_statement, self.cnx.sql))
+
+ self.cnx.execute("insert into test(id, name) values (4, 'foo')")
+ self.cnx.execute("insert into test(id, name) values (5, 'bar')")
+
+ res = self.cnx.execute("select id, name from test")
+ self.failUnless(res.rowcount == 2, "Should have returned 2 rows, but was %i" % res.rowcount)
+
+ correct_col_defs = (('id', _sqlite.INTEGER, None, None, None, None, None), \
+ ('name', _sqlite.STRING, None, None, None, None, None))
+ self.assertEqual(res.col_defs, correct_col_defs,
+ "col_defs should have been %s, was %s" % (repr(correct_col_defs), repr(res.col_defs)))
+
+ correct_row_list = [(4, 'foo'), (5, 'bar')]
+ self.assertEqual(res.row_list, correct_row_list,
+ "rowlist should have been %s, was %s" % (repr(correct_row_list), repr(res.row_list)))
+
+ def CheckResultAttributes(self):
+ res = self.cnx.execute("select NULL, max(4,5)")
+ try:
+ res.foo = 7
+
+ except TypeError:
+ pass
+
+ def CheckSQLiteVersion(self):
+ try:
+ ver = _sqlite.sqlite_version()
+ except:
+ self.fail('sqlite_version() failed')
+ pat = re.compile(r'\d*\.\d*\.\d*')
+ if not re.match(pat,ver):
+ self.fail('Incorrect sqlite_version() format, '
+ 'should be digits.digits.digits, was %s'%ver)
+
+
+def suite():
+ return unittest.makeSuite(lowlevelTestCases, "Check")
+
+def main():
+ runner = unittest.TextTestRunner()
+ runner.run(suite())
+
+if __name__ == "__main__":
+ main()
diff --git a/test/pgresultset_tests.py b/test/pgresultset_tests.py
new file mode 100644
index 0000000..81f1d46
--- /dev/null
+++ b/test/pgresultset_tests.py
@@ -0,0 +1,202 @@
+#!/usr/bin/env python
+import testsupport
+import os, unittest, sys
+import sqlite
+
+class PgResultSetTests(unittest.TestCase, testsupport.TestSupport):
+ def setUp(self):
+ self.filename = self.getfilename()
+ self.cnx = sqlite.connect(self.filename)
+ self.cur = self.cnx.cursor()
+
+ def tearDown(self):
+ try:
+ self.cnx.close()
+ self.removefile()
+ except AttributeError:
+ pass
+ except sqlite.ProgrammingError:
+ pass
+
+ def getResult(self):
+ try:
+ self.cur.execute("DROP TABLE TEST")
+ except sqlite.DatabaseError, reason:
+ pass
+
+ self.cur.execute("CREATE TABLE TEST (id, name, age)")
+ self.cur.execute("INSERT INTO TEST (id, name, age) VALUES (%s, %s, %s)",
+ (5, 'Alice', 29))
+ self.cur.execute("-- types int, str, int")
+ self.cur.execute("SELECT id, name, age FROM TEST")
+ return self.cur.fetchone()
+
+ def CheckAttributeAccess(self):
+ res = self.getResult()
+ if not hasattr(res, "id"):
+ self.fail("Resultset doesn't have attribute 'id'")
+ if not hasattr(res, "ID"):
+ self.fail("Resultset doesn't have attribute 'ID'")
+
+ def CheckAttributeValue(self):
+ res = self.getResult()
+ if res.id != 5:
+ self.fail("id should be 5, is %i" % res.id)
+ if res.ID != 5:
+ self.fail("ID should be 5, is %i" % res.ID)
+
+ def CheckKeyAccess(self):
+ res = self.getResult()
+ if not "id" in res:
+ self.fail("Resultset doesn't have item 'id'")
+ if not "ID" in res:
+ self.fail("Resultset doesn't have item 'ID'")
+
+ def CheckKeyValue(self):
+ res = self.getResult()
+ if res["id"] != 5:
+ self.fail("id should be 5, is %i" % res.id)
+ if res["ID"] != 5:
+ self.fail("ID should be 5, is %i" % res.ID)
+
+ def CheckIndexValue(self):
+ res = self.getResult()
+ if res[0] != 5:
+ self.fail("item 0 should be 5, is %i" % res.id)
+
+ def Check_haskey(self):
+ res = self.getResult()
+ if not res.has_key("id"):
+ self.fail("resultset should have key 'id'")
+ if not res.has_key("ID"):
+ self.fail("resultset should have key 'ID'")
+ if not res.has_key("Id"):
+ self.fail("resultset should have key 'Id'")
+
+ def Check_len(self):
+ l = len(self.getResult())
+ if l != 3:
+ self.fail("length of resultset should be 3, is %i", l)
+
+ def Check_keys(self):
+ res = self.getResult()
+ if res.keys() != ["id", "name", "age"]:
+ self.fail("keys() should return %s, returns %s" %
+ (["id", "name", "age"], res.keys()))
+
+ def Check_values(self):
+ val = self.getResult().values()
+ if val != (5, 'Alice', 29):
+ self.fail("Wrong values(): %s" % val)
+
+ def Check_items(self):
+ it = self.getResult().items()
+ if it != [("id", 5), ("name", 'Alice'), ("age", 29)]:
+ self.fail("Wrong items(): %s" % it)
+
+ def Check_get(self):
+ res = self.getResult()
+ v = res.get("id")
+ if v != 5:
+ self.fail("Wrong result for get [1]")
+
+ v = res.get("ID")
+ if v != 5:
+ self.fail("Wrong result for get [2]")
+
+ v = res.get("asdf")
+ if v is not None:
+ self.fail("Wrong result for get [3]")
+
+ v = res.get("asdf", 6)
+ if v != 6:
+ self.fail("Wrong result for get [4]")
+
+class TupleResultTests(unittest.TestCase, testsupport.TestSupport):
+ def setUp(self):
+ self.filename = self.getfilename()
+ self.cnx = sqlite.connect(self.filename)
+ self.cnx.rowclass = tuple
+ self.cur = self.cnx.cursor()
+
+ def tearDown(self):
+ try:
+ self.cnx.close()
+ self.removefile()
+ except AttributeError:
+ pass
+ except sqlite.ProgrammingError:
+ pass
+
+ def getOneResult(self):
+ try:
+ self.cur.execute("DROP TABLE TEST")
+ except sqlite.DatabaseError, reason:
+ pass
+
+ self.cur.execute("CREATE TABLE TEST (id, name, age)")
+ self.cur.execute("INSERT INTO TEST (id, name, age) VALUES (%s, %s, %s)",
+ (5, 'Alice', 29))
+ self.cur.execute("-- types int, str, int")
+ self.cur.execute("SELECT id, name, age FROM TEST")
+ return self.cur.fetchone()
+
+ def getManyResults(self):
+ try:
+ self.cur.execute("DROP TABLE TEST")
+ except sqlite.DatabaseError, reason:
+ pass
+
+ self.cur.execute("CREATE TABLE TEST (id, name, age)")
+ self.cur.execute("INSERT INTO TEST (id, name, age) VALUES (%s, %s, %s)",
+ (5, 'Alice', 29))
+ self.cur.execute("INSERT INTO TEST (id, name, age) VALUES (%s, %s, %s)",
+ (5, 'Alice', 29))
+ self.cur.execute("INSERT INTO TEST (id, name, age) VALUES (%s, %s, %s)",
+ (5, 'Alice', 29))
+ self.cur.execute("-- types int, str, int")
+ self.cur.execute("SELECT id, name, age FROM TEST")
+ return self.cur.fetchmany(2)
+
+ def getAllResults(self):
+ try:
+ self.cur.execute("DROP TABLE TEST")
+ except sqlite.DatabaseError, reason:
+ pass
+
+ self.cur.execute("CREATE TABLE TEST (id, name, age)")
+ self.cur.execute("INSERT INTO TEST (id, name, age) VALUES (%s, %s, %s)",
+ (5, 'Alice', 29))
+ self.cur.execute("INSERT INTO TEST (id, name, age) VALUES (%s, %s, %s)",
+ (5, 'Alice', 29))
+ self.cur.execute("INSERT INTO TEST (id, name, age) VALUES (%s, %s, %s)",
+ (5, 'Alice', 29))
+ self.cur.execute("-- types int, str, int")
+ self.cur.execute("SELECT id, name, age FROM TEST")
+ return self.cur.fetchall()
+
+ def CheckRowTypeIsTupleFetchone(self):
+ res = self.getOneResult()
+ self.failUnless(type(res) is tuple, "Result type of row isn't a tuple")
+
+ def CheckRowTypeIsTupleFetchmany(self):
+ res = self.getManyResults()
+ self.failUnless(type(res[1]) is tuple, "Result type of row isn't a tuple")
+
+ def CheckRowTypeIsTupleFetchall(self):
+ res = self.getAllResults()
+ self.failUnless(type(res[2]) is tuple, "Result type of row isn't a tuple")
+
+def suite():
+ tests = [unittest.makeSuite(PgResultSetTests, "Check"),
+ unittest.makeSuite(PgResultSetTests, "Check")]
+ if sys.version_info >= (2,2):
+ tests.append(unittest.makeSuite(TupleResultTests, "Check"))
+ return unittest.TestSuite(tests)
+
+def main():
+ runner = unittest.TextTestRunner()
+ runner.run(suite())
+
+if __name__ == "__main__":
+ main()
diff --git a/test/testsupport.py b/test/testsupport.py
new file mode 100644
index 0000000..18d9ff4
--- /dev/null
+++ b/test/testsupport.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python
+import _sqlite
+import os, tempfile, unittest
+
+class TestSupport:
+ def getfilename(self):
+ if _sqlite.sqlite_version_info() >= (2, 8, 2):
+ return ":memory:"
+ else:
+ return tempfile.mktemp()
+
+ def removefile(self):
+ if self.filename != ":memory:":
+ os.remove(self.filename)
diff --git a/test/transaction_tests.py b/test/transaction_tests.py
new file mode 100644
index 0000000..57bc70f
--- /dev/null
+++ b/test/transaction_tests.py
@@ -0,0 +1,106 @@
+#!/usr/bin/env python
+import testsupport
+import os, string, sys, types, unittest
+import sqlite
+
+class TransactionTests(unittest.TestCase, testsupport.TestSupport):
+ def setUp(self):
+ self.filename = self.getfilename()
+ self.cnx = sqlite.connect(self.filename)
+ self.cur = self.cnx.cursor()
+
+ def tearDown(self):
+ try:
+ self.cnx.close()
+ self.removefile()
+ except AttributeError:
+ pass
+ except sqlite.InterfaceError:
+ pass
+
+ def CheckValueInTransaction(self):
+ self.cur.execute("create table test (a)")
+ self.cur.execute("insert into test (a) values (%s)", "foo")
+ self.cur.execute("-- types int")
+ self.cur.execute("select count(a) as count from test")
+ res = self.cur.fetchone()
+ self.failUnlessEqual(res.count, 1,
+ "Wrong number of rows during transaction.")
+
+ def CheckValueAfterCommit(self):
+ self.cur.execute("create table test (a)")
+ self.cur.execute("insert into test (a) values (%s)", "foo")
+ self.cur.execute("-- types int")
+ self.cur.execute("select count(a) as count from test")
+ self.cnx.commit()
+ res = self.cur.fetchone()
+ self.failUnlessEqual(res.count, 1,
+ "Wrong number of rows during transaction.")
+
+ def CheckValueAfterRollback(self):
+ self.cur.execute("create table test (a)")
+ self.cnx.commit()
+ self.cur.execute("insert into test (a) values (%s)", "foo")
+ self.cnx.rollback()
+ self.cur.execute("-- types int")
+ self.cur.execute("select count(a) as count from test")
+ res = self.cur.fetchone()
+ self.failUnlessEqual(res.count, 0,
+ "Wrong number of rows during transaction.")
+
+ def CheckImmediateCommit(self):
+ try:
+ self.cnx.commit()
+ except:
+ self.fail("Immediate commit raises exeption.")
+
+ def CheckImmediateRollback(self):
+ try:
+ self.cnx.rollback()
+ except:
+ self.fail("Immediate rollback raises exeption.")
+
+class AutocommitTests(unittest.TestCase, testsupport.TestSupport):
+ def setUp(self):
+ self.filename = self.getfilename()
+ self.cnx = sqlite.connect(self.filename, autocommit=1)
+ self.cur = self.cnx.cursor()
+
+ def tearDown(self):
+ try:
+ self.cnx.close()
+ self.removefile()
+ except AttributeError:
+ pass
+ except sqlite.InterfaceError:
+ pass
+
+ def CheckCommit(self):
+ self.cur.execute("select abs(5)")
+ try:
+ self.cnx.commit()
+ except:
+ self.fail(".commit() raised an exception")
+
+ def CheckRollback(self):
+ self.cur.execute("select abs(5)")
+ self.failUnlessRaises(sqlite.ProgrammingError, self.cnx.rollback)
+
+class ChangeAutocommitTests(unittest.TestCase):
+ pass
+
+def suite():
+ transaction_tests = unittest.makeSuite(TransactionTests, "Check")
+ autocommit_tests = unittest.makeSuite(AutocommitTests, "Check")
+ change_autocommit_tests = unittest.makeSuite(ChangeAutocommitTests, "Check")
+
+ test_suite = unittest.TestSuite((transaction_tests, autocommit_tests,
+ change_autocommit_tests))
+ return test_suite
+
+def main():
+ runner = unittest.TextTestRunner()
+ runner.run(suite())
+
+if __name__ == "__main__":
+ main()
diff --git a/test/type_tests.py b/test/type_tests.py
new file mode 100644
index 0000000..a8d2031
--- /dev/null
+++ b/test/type_tests.py
@@ -0,0 +1,342 @@
+#!/usr/bin/env python
+#-*- coding: ISO-8859-1 -*-
+import testsupport
+import os, string, sys, types, unittest
+import sqlite
+import _sqlite
+
+try:
+ from mx.DateTime import Date, Time, DateTime, DateTimeDelta, DateFrom, \
+ TimeFrom, DateTimeDeltaFrom
+ have_datetime = 1
+except ImportError:
+ have_datetime = 0
+
+def sqlite_is_at_least(major, minor, micro):
+ version = map(int, _sqlite.sqlite_version().split("."))
+ return version >= (major, minor, micro)
+
+class MyType:
+ def __init__(self, val):
+ self.val = int(val)
+
+ def _quote(self):
+ return str(self.val)
+
+ def __repr__(self):
+ return "MyType(%s)" % self.val
+
+ def __cmp__(self, other):
+ assert(isinstance(other, MyType))
+ return cmp(self.val, other.val)
+
+class MyTypeNew(MyType):
+ def __quote__(self):
+ return str(self.val)
+
+ def __getattr__(self, key):
+ # Forbid access to the old-style _quote method
+ if key == "_quote":
+ raise AttributeError
+ else:
+ return self.__dict__[key]
+
+class ExpectedTypes(unittest.TestCase, testsupport.TestSupport):
+ def setUp(self):
+ self.filename = self.getfilename()
+ self.cnx = sqlite.connect(self.filename, converters={"mytype": MyType})
+ self.cur = self.cnx.cursor()
+
+ def tearDown(self):
+ try:
+ self.cnx.close()
+ self.removefile()
+ except AttributeError:
+ pass
+ except sqlite.InterfaceError:
+ pass
+
+ def CheckExpectedTypesStandardTypes(self):
+ self.cur.execute("create table test (a, b, c)")
+ self.cur.execute("insert into test(a, b, c) values (5, 6.3, 'hello')")
+ self.cur.execute("-- types int, float, str")
+ self.cur.execute("select * from test")
+ res = self.cur.fetchone()
+ self.failUnless(isinstance(res.a, types.IntType),
+ "The built-in int converter didn't work.")
+ self.failUnless(isinstance(res.b, types.FloatType),
+ "The built-in float converter didn't work.")
+ self.failUnless(isinstance(res.c, types.StringType),
+ "The built-in string converter didn't work.")
+
+ def CheckExpectedTypesStandardTypesNull(self):
+ self.cur.execute("create table test (a, b, c)")
+ self.cur.execute("insert into test(a, b, c) values (NULL, NULL, NULL)")
+ self.cur.execute("-- types int, float, str")
+ self.cur.execute("select * from test")
+ res = self.cur.fetchone()
+ self.failUnless(res.a == None,
+ "The built-in int converter should have returned None.")
+ self.failUnless(res.b == None,
+ "The built-in float converter should have returned None.")
+ self.failUnless(res.c == None,
+ "The built-in string converter should have returned None.")
+
+ def CheckExpectedTypesCustomTypes(self):
+ value = MyType(10)
+ self.cur.execute("create table test (a)")
+ self.cur.execute("insert into test(a) values (%s)", value)
+ self.cur.execute("-- types mytype")
+ self.cur.execute("select a from test")
+ res = self.cur.fetchone()
+
+ self.failUnless(isinstance(res.a, MyType),
+ "The converter did return the wrong type.")
+ self.failUnlessEqual(value, res.a,
+ "The returned value and the inserted one are different.")
+
+ def CheckNewQuoteMethod(self):
+ value = MyTypeNew(10)
+ self.cur.execute("create table test (a integer)")
+ self.cur.execute("insert into test(a) values (%s)", value)
+ self.cur.execute("select a from test")
+ res = self.cur.fetchone()
+
+ self.failUnlessEqual(10, res.a,
+ "The returned value and the inserted one are different.")
+
+ def CheckExpectedTypesCustomTypesNull(self):
+ value = None
+ self.cur.execute("create table test (a)")
+ self.cur.execute("insert into test(a) values (%s)", value)
+ self.cur.execute("-- types mytype")
+ self.cur.execute("select a from test")
+ res = self.cur.fetchone()
+
+ self.failUnless(res.a == None,
+ "The converter should have returned None.")
+
+ def CheckResetExpectedTypes(self):
+ self.cur.execute("create table test (a)")
+ self.cur.execute("insert into test(a) values ('5')")
+ self.cur.execute("-- types int")
+ self.cur.execute("select a from test")
+ self.cur.execute("select a from test")
+ res = self.cur.fetchone()
+ self.assert_(isinstance(res.a, types.StringType),
+ "'resetting types' didn't succeed.")
+
+ if have_datetime:
+ def CheckDateTypes(self):
+ dt = DateTime(2002, 6, 15)
+ dtd = DateTimeDelta(0, 0, 0, 1)
+
+ self.cur.execute("create table test (t timestamp)")
+ self.cur.execute("insert into test(t) values (%s)", (dt,))
+ self.cur.execute("select t from test")
+ res = self.cur.fetchone()
+
+ self.failUnlessEqual(dt, res.t,
+ "DateTime object should have been %s, was %s"
+ % (repr(dt), repr(res.t)))
+
+ self.cur.execute("drop table test")
+ self.cur.execute("create table test(i interval)")
+ self.cur.execute("insert into test(i) values (%s)", (dtd,))
+ self.cur.execute("select i from test")
+ res = self.cur.fetchone()
+
+ self.failUnlessEqual(dtd, res.i,
+ "DateTimeDelta object should have been %s, was %s"
+ % (repr(dtd), repr(res.i)))
+
+class UnicodeTestsLatin1(unittest.TestCase, testsupport.TestSupport):
+ def setUp(self):
+ self.filename = self.getfilename()
+ self.cnx = sqlite.connect(self.filename, encoding=("iso-8859-1",))
+ self.cur = self.cnx.cursor()
+
+ def tearDown(self):
+ try:
+ self.cnx.close()
+ self.removefile()
+ except AttributeError:
+ pass
+ except sqlite.InterfaceError:
+ pass
+
+ def CheckGetSameBack(self):
+ test_str = unicode("sterreich", "latin1")
+ self.cur.execute("create table test (a UNICODE)")
+ self.cur.execute("insert into test(a) values (%s)", test_str)
+ self.cur.execute("select a from test")
+ res = self.cur.fetchone()
+ self.failUnlessEqual(type(test_str), type(res.a),
+ "Something other than a Unicode string was fetched: %s"
+ % (str(type(res.a))))
+ self.failUnlessEqual(test_str, res.a,
+ "Fetching the unicode string doesn't return the inserted one.")
+
+class UnicodeTestsUtf8(unittest.TestCase, testsupport.TestSupport):
+ def setUp(self):
+ self.filename = self.getfilename()
+ self.cnx = sqlite.connect(self.filename, encoding="utf-8")
+ self.cur = self.cnx.cursor()
+
+ def tearDown(self):
+ try:
+ self.cnx.close()
+ self.removefile()
+ except AttributeError:
+ pass
+ except sqlite.InterfaceError:
+ pass
+
+ def CheckGetSameBack(self):
+ # PREZIDENT ROSSI'SKO' FEDERACII sterreich
+ test_str = unicode("ПРЕЗИДЕНТ РОССИЙСКОЙ ФЕДЕРАЦИИ Österreich", "utf-8")
+
+ self.cur.execute("create table test (a UNICODE)")
+ self.cur.execute("insert into test(a) values (%s)", test_str)
+ self.cur.execute("select a from test")
+ res = self.cur.fetchone()
+ self.failUnlessEqual(type(test_str), type(res.a),
+ "Something other than a Unicode string was fetched: %s"
+ % (str(type(res.a))))
+ self.failUnlessEqual(test_str, res.a,
+ "Fetching the unicode string doesn't return the inserted one.")
+
+class UnicodeTestsKOI8R(unittest.TestCase, testsupport.TestSupport):
+ def setUp(self):
+ self.filename = self.getfilename()
+ self.cnx = sqlite.connect(self.filename, encoding="koi8-r")
+ self.cur = self.cnx.cursor()
+
+ def tearDown(self):
+ try:
+ self.cnx.close()
+ self.removefile()
+ except AttributeError:
+ pass
+ except sqlite.InterfaceError:
+ pass
+
+ def CheckGetSameBack(self):
+ # PREZIDENT ROSSI'SKO' FEDERACII
+ # (President of the Russian Federation)
+ test_str = unicode(" ", "koi8-r")
+
+ self.cur.execute("create table test (a UNICODE)")
+ self.cur.execute("insert into test(a) values (%s)", test_str)
+ self.cur.execute("select a from test")
+ res = self.cur.fetchone()
+ self.failUnlessEqual(type(test_str), type(res.a),
+ "Something other than a Unicode string was fetched: %s"
+ % (str(type(res.a))))
+ self.failUnlessEqual(test_str, res.a,
+ "Fetching the unicode string doesn't return the inserted one.")
+
+class SQLiteBuiltinTypeSupport(unittest.TestCase, testsupport.TestSupport):
+ def setUp(self):
+ self.filename = self.getfilename()
+ self.cnx = sqlite.connect(self.filename, encoding="koi8-r")
+ self.cur = self.cnx.cursor()
+
+ def tearDown(self):
+ try:
+ self.cnx.close()
+ self.removefile()
+ except AttributeError:
+ pass
+ except sqlite.InterfaceError:
+ pass
+
+ def CheckInt(self):
+ self.cur.execute("create table test (a INTEGER)")
+ self.cur.execute("insert into test(a) values (%s)", 5)
+ self.cur.execute("select a from test")
+ res = self.cur.fetchone()
+ self.failUnlessEqual(type(5), type(res.a),
+ "Something other than an INTEGER was fetched: %s"
+ % (str(type(res.a))))
+
+ def CheckFloat(self):
+ self.cur.execute("create table test (a FLOAT)")
+ self.cur.execute("insert into test(a) values (%s)", 5.7)
+ self.cur.execute("select a from test")
+ res = self.cur.fetchone()
+ self.failUnlessEqual(type(5.7), type(res.a),
+ "Something other than a FLOAT was fetched: %s"
+ % (str(type(res.a))))
+
+ def CheckString(self):
+ self.cur.execute("create table test (a VARCHAR(20))")
+ self.cur.execute("insert into test(a) values (%s)", "foo")
+ self.cur.execute("select a from test")
+ res = self.cur.fetchone()
+ self.failUnlessEqual(type("foo"), type(res.a),
+ "Something other than a VARCHAR was fetched: %s"
+ % (str(type(res.a))))
+
+ def CheckBinary(self):
+ bindata = "".join([chr(x) for x in range(256)])
+ self.cur.execute("create table test(b BINARY)")
+ self.cur.execute("insert into test(b) values (%s)", sqlite.Binary(bindata))
+ self.cur.execute("select b from test")
+ res = self.cur.fetchone()
+ self.failUnlessEqual(bindata, res.b, "Binary roundtrip didn't produce original string")
+ self.failUnlessEqual(self.cur.description[0][1], sqlite.BINARY, "Wrong type code")
+
+ if have_datetime:
+ def CheckDate(self):
+ self.cur.execute("create table test (a DATE)")
+ d = DateFrom("2002-05-07")
+ self.cur.execute("insert into test(a) values (%s)", d)
+ self.cur.execute("select a from test")
+ res = self.cur.fetchone()
+ if res.a != d:
+ self.fail("didn't get back the same DATE")
+
+ def CheckTime(self):
+ self.cur.execute("create table test (a TIME)")
+ t = TimeFrom("22:15:00")
+ self.cur.execute("insert into test(a) values (%s)", t)
+ self.cur.execute("select a from test")
+ res = self.cur.fetchone()
+ if res.a != t:
+ self.fail("didn't get back the same TIME")
+
+ def CheckTimestamp(self):
+ self.cur.execute("create table test (a TIMESTAMP)")
+ d = DateFrom("2002-05-07 22:15:00")
+ self.cur.execute("insert into test(a) values (%s)", d)
+ self.cur.execute("select a from test")
+ res = self.cur.fetchone()
+ if res.a != d:
+ self.fail("didn't get back the same TIMESTAMP")
+
+ def CheckInterval(self):
+ self.cur.execute("create table test (a INTERVAL)")
+ d = DateTimeDeltaFrom("02:00:00")
+ self.cur.execute("insert into test(a) values (%s)", d)
+ self.cur.execute("select a from test")
+ res = self.cur.fetchone()
+ if res.a != d:
+ self.fail("didn't get back the same INTERVAL")
+
+def suite():
+ expected_suite = unittest.makeSuite(ExpectedTypes, "Check")
+ unicode_suite1 = unittest.makeSuite(UnicodeTestsLatin1, "Check")
+ unicode_suite2 = unittest.makeSuite(UnicodeTestsUtf8, "Check")
+ unicode_suite3 = unittest.makeSuite(UnicodeTestsKOI8R, "Check")
+ builtin_suite = unittest.makeSuite(SQLiteBuiltinTypeSupport, "Check")
+
+ return unittest.TestSuite((expected_suite, unicode_suite1, unicode_suite2,
+ unicode_suite3, builtin_suite))
+
+def main():
+ runner = unittest.TextTestRunner()
+ runner.run(suite())
+
+if __name__ == "__main__":
+ main()
diff --git a/test/userfunction_tests.py b/test/userfunction_tests.py
new file mode 100644
index 0000000..ce70387
--- /dev/null
+++ b/test/userfunction_tests.py
@@ -0,0 +1,259 @@
+#!/usr/bin/env python
+import testsupport
+import os, string, sys, types, unittest
+import sqlite
+
+def intreturner(x):
+ return int(x) * 2
+
+def floatreturner(x):
+ return float(x) * 2.0
+
+def stringreturner(x):
+ return "[%s]" % x
+
+def nullreturner(x):
+ return None
+
+def exceptionreturner(x):
+ return 5 / 0
+
+class MySum:
+ def __init__(self):
+ self.reset()
+
+ def reset(self):
+ self.sum = 0
+
+ def step(self, x):
+ self.sum += int(x)
+
+ def finalize(self):
+ val = self.sum
+ self.reset()
+ return val
+
+class MySumFloat:
+ def __init__(self):
+ self.reset()
+
+ def reset(self):
+ self.sum = 0.0
+
+ def step(self, x):
+ self.sum += float(x)
+
+ def finalize(self):
+ val = self.sum
+ self.reset()
+ return val
+
+class MySumReturnNull:
+ def __init__(self):
+ self.reset()
+
+ def reset(self):
+ self.sum = 0
+
+ def step(self, x):
+ self.sum += int(x)
+
+ def finalize(self):
+ return None
+
+class MySumStepExeption:
+ def __init__(self):
+ self.reset()
+
+ def reset(self):
+ self.sum = 0
+
+ def step(self, x):
+ self.sum += int(x) / 0
+
+ def finalize(self):
+ val = self.sum
+ self.reset()
+ return val
+
+class MySumFinalizeExeption:
+ def __init__(self):
+ self.reset()
+
+ def reset(self):
+ self.sum = 0
+
+ def step(self, x):
+ self.sum += int(x)
+
+ def finalize(self):
+ val = self.sum / 0
+ self.reset()
+ return val
+
+class UserFunctions(unittest.TestCase, testsupport.TestSupport):
+ def setUp(self):
+ self.filename = self.getfilename()
+ self.cnx = sqlite.connect(self.filename)
+
+ sqlite._sqlite.enable_callback_debugging(0)
+
+ self.cnx.create_function("intreturner", 1, intreturner)
+ self.cnx.create_function("floatreturner", 1, floatreturner)
+ self.cnx.create_function("stringreturner", 1, stringreturner)
+ self.cnx.create_function("nullreturner", 1, nullreturner)
+ self.cnx.create_function("exceptionreturner", 1, exceptionreturner)
+
+ self.cnx.create_aggregate("mysum", 1, MySum)
+ self.cnx.create_aggregate("mysumfloat", 1, MySumFloat)
+ self.cnx.create_aggregate("mysumreturnnull", 1, MySumReturnNull )
+ self.cnx.create_aggregate("mysumstepexception", 1, MySumStepExeption)
+ self.cnx.create_aggregate("mysumfinalizeexception", 1, MySumFinalizeExeption)
+ self.cur = self.cnx.cursor()
+
+ def tearDown(self):
+ try:
+ self.cnx.close()
+ self.removefile()
+ except AttributeError:
+ pass
+ except sqlite.InterfaceError:
+ pass
+
+ def CheckIntFunction(self):
+ self.cur.execute("create table test (a)")
+ self.cur.execute("insert into test(a) values (%s)", 5)
+ self.cur.execute("-- types int")
+ self.cur.execute("select intreturner(a) as a from test")
+ res = self.cur.fetchone()
+ self.failUnless(isinstance(res.a, types.IntType),
+ "The result should have been an int.")
+ self.failUnlessEqual(res.a, 10,
+ "The function returned the wrong result.")
+
+ def CheckFloatFunction(self):
+ self.cur.execute("create table test (a)")
+ self.cur.execute("insert into test(a) values (%s)", 5.0)
+ self.cur.execute("-- types float")
+ self.cur.execute("select floatreturner(a) as a from test")
+ res = self.cur.fetchone()
+ self.failUnless(isinstance(res.a, types.FloatType),
+ "The result should have been a float.")
+ self.failUnlessEqual(res.a, 5.0 * 2.0,
+ "The function returned the wrong result.")
+
+ def CheckStringFunction(self):
+ mystr = "test"
+ self.cur.execute("create table test (a)")
+ self.cur.execute("insert into test(a) values (%s)", mystr)
+ self.cur.execute("-- types str")
+ self.cur.execute("select stringreturner(a) as a from test")
+ res = self.cur.fetchone()
+ self.failUnless(isinstance(res.a, types.StringType),
+ "The result should have been a string.")
+ self.failUnlessEqual(res.a, "[%s]" % mystr,
+ "The function returned the wrong result.")
+
+ def CheckNullFunction(self):
+ mystr = "test"
+ self.cur.execute("create table test (a)")
+ self.cur.execute("insert into test(a) values (%s)", mystr)
+ self.cur.execute("-- types str")
+ self.cur.execute("select nullreturner(a) as a from test")
+ res = self.cur.fetchone()
+ self.failUnlessEqual(res.a, None,
+ "The result should have been None.")
+
+ def CheckFunctionWithNullArgument(self):
+ mystr = "test"
+ self.cur.execute("-- types str")
+ self.cur.execute("select nullreturner(NULL) as a")
+ res = self.cur.fetchone()
+ self.failUnlessEqual(res.a, None,
+ "The result should have been None.")
+
+
+ def CheckExceptionFunction(self):
+ mystr = "test"
+ self.cur.execute("create table test (a)")
+ self.cur.execute("insert into test(a) values (%s)", mystr)
+ self.cur.execute("-- types str")
+ try:
+ self.cur.execute("select exceptionreturner(a) as a from test")
+ except sqlite.DatabaseError, reason:
+ pass
+ except Exception, reason:
+ self.fail("Wrong exception raised: %s", sys.exc_info()[0])
+
+ def CheckAggregateBasic(self):
+ self.cur.execute("create table test (a)")
+ self.cur.executemany("insert into test(a) values (%s)", [(10,), (20,), (30,)])
+ self.cur.execute("-- types int")
+ self.cur.execute("select mysum(a) as sum from test")
+ res = self.cur.fetchone()
+ self.failUnless(isinstance(res.sum, types.IntType),
+ "The result should have been an int.")
+ self.failUnlessEqual(res.sum, 60,
+ "The function returned the wrong result.")
+
+ def CheckAggregateFloat(self):
+ self.cur.execute("create table test (a)")
+ self.cur.executemany("insert into test(a) values (%s)", [(10.0,), (20.0,), (30.0,)])
+ self.cur.execute("-- types float")
+ self.cur.execute("select mysumfloat(a) as sum from test")
+ res = self.cur.fetchone()
+ self.failUnless(isinstance(res.sum, types.FloatType),
+ "The result should have been an float.")
+ if res.sum <= 59.9 or res.sum >= 60.1:
+ self.fail("The function returned the wrong result.")
+
+ def CheckAggregateReturnNull(self):
+ self.cur.execute("create table test (a)")
+ self.cur.executemany("insert into test(a) values (%s)", [(10,), (20,), (30,)])
+ self.cur.execute("-- types int")
+ self.cur.execute("select mysumreturnnull(a) as sum from test")
+ res = self.cur.fetchone()
+ self.failUnlessEqual(res.sum, None,
+ "The result should have been None.")
+
+ def CheckAggregateStepException(self):
+ self.cur.execute("create table test (a)")
+ self.cur.executemany("insert into test(a) values (%s)", [(10,), (20,), (30,)])
+ self.cur.execute("-- types int")
+ try:
+ self.cur.execute("select mysumstepexception(a) as sum from test")
+ except sqlite.DatabaseError, reason:
+ pass
+ except Exception, reason:
+ self.fail("Wrong exception raised: %s" % sys.exc_info()[0])
+
+ def CheckAggregateFinalizeException(self):
+ self.cur.execute("create table test (a)")
+ self.cur.executemany("insert into test(a) values (%s)", [(10,), (20,), (30,)])
+ self.cur.execute("-- types int")
+ try:
+ self.cur.execute("select mysumfinalizeexception(a) as sum from test")
+ except sqlite.DatabaseError, reason:
+ pass
+ except Exception, reason:
+ self.fail("Wrong exception raised: %s", sys.exc_info()[0])
+
+ def CheckAggregateStepNullArgument(self):
+ self.cur.execute("-- types int")
+ self.cur.execute("select mysum(NULL) as a")
+ res = self.cur.fetchone()
+ self.failUnlessEqual(res.a, 0,
+ "The result should have been 0.")
+
+
+def suite():
+ user_functions = unittest.makeSuite(UserFunctions, "Check")
+ test_suite = unittest.TestSuite((user_functions,))
+ return test_suite
+
+def main():
+ runner = unittest.TextTestRunner()
+ runner.run(suite())
+
+if __name__ == "__main__":
+ main()