While basic queries are straightforward, advanced users often need to manage transactions, parameterized queries, error handling, and connection pooling to ensure security, performance, and reliability in production environments.
This example demonstrates:
import pyodbc
import logging
# Configure logging
logging.basicConfig(filename='db_errors.log', level=logging.ERROR, format='%(asctime)s - %(levelname)s - %(message)s')
# Connection parameters (adjust to your environment)
conn_str = (
"DRIVER={ODBC Driver 17 for SQL Server};"
"SERVER=localhost\\SQLEXPRESS;"
"DATABASE=MyDatabase;"
"UID=my_user;"
"PWD=my_password"
)
def fetch_user_by_email(email):
try:
# Connect to the database
with pyodbc.connect(conn_str) as conn:
conn.autocommit = False # Start manual transaction control
cursor = conn.cursor()
# Parameterized query to prevent SQL injection
query = "SELECT id, name, email FROM users WHERE email = ?"
cursor.execute(query, (email,))
row = cursor.fetchone()
if row:
return {"id": row.id, "name": row.name, "email": row.email}
else:
return None
except pyodbc.Error as e:
logging.error(f"Database operation failed: {e}")
raise RuntimeError("Database error occurred.") from e
# --- Usage Example ---
try:
user = fetch_user_by_email("alice@example.com")
if user:
print("User found:", user)
else:
print("User not found.")
except Exception as err:
print("Handled error:", err)
cursor.execute()
.conn.autocommit = False
ensures changes are not committed unless explicitly requested — essential when doing inserts, updates, or deletes.db_errors.log
, and a user-friendly message is raised while preserving the original stack trace using raise ... from e
.row.id
, row.name
, etc., by ensuring your query retrieves named columns.fetchmany()
or a generator pattern to avoid loading everything into memory.