patx/pickledb
introduce sqlite as optional backend replacement for a safer and more thread safe env
Commit 0a94a42 · patx · 2025-12-21T01:30:53-05:00
Comments
No comments yet.
Diff
diff --git a/docs/index.html b/docs/index.html
index 378a560..bc15ec7 100644
--- a/docs/index.html
+++ b/docs/index.html
@@ -605,6 +605,100 @@
letter-spacing: -0.01em;
}
+ /* API Reference */
+ .api-section {
+ margin: 32px 0;
+ display: flex;
+ flex-direction: column;
+ gap: 28px;
+ }
+
+ .api-class {
+ background: var(--bg-card);
+ border-radius: 18px;
+ border: 2px solid var(--border);
+ padding: 28px 30px;
+ box-shadow: 0 4px 16px var(--shadow);
+ }
+
+ .api-class-header {
+ display: flex;
+ flex-wrap: wrap;
+ align-items: baseline;
+ gap: 12px;
+ margin-bottom: 8px;
+ }
+
+ .api-class-name {
+ font-family: var(--mono);
+ font-size: 1.05rem;
+ font-weight: 600;
+ color: var(--accent-forest);
+ }
+
+ .api-class-note {
+ font-size: 0.95rem;
+ color: var(--text-muted);
+ }
+
+ .api-method {
+ margin-top: 16px;
+ padding: 14px 16px;
+ border-radius: 12px;
+ background: var(--bg-secondary);
+ border: 1px solid var(--border);
+ }
+
+ .api-signature {
+ display: flex;
+ flex-wrap: wrap;
+ align-items: center;
+ gap: 12px;
+ margin-bottom: 8px;
+ }
+
+ .api-signature code {
+ background: rgba(255,255,255,0.8);
+ border-radius: 8px;
+ padding: 4px 8px;
+ border: 1px solid var(--border);
+ font-size: 0.9rem;
+ }
+
+ .api-tags {
+ display: inline-flex;
+ gap: 6px;
+ flex-wrap: wrap;
+ }
+
+ .api-tag {
+ font-size: 0.75rem;
+ text-transform: uppercase;
+ letter-spacing: 0.08em;
+ padding: 2px 8px;
+ border-radius: 999px;
+ border: 1px solid rgba(0,0,0,0.06);
+ background: rgba(255,255,255,0.7);
+ color: var(--text-muted);
+ }
+
+ .api-desc {
+ font-size: 0.95rem;
+ color: var(--text-secondary);
+ margin-bottom: 6px;
+ }
+
+ .api-usage {
+ font-size: 0.9rem;
+ color: var(--text-secondary);
+ margin: 6px 0 0 0;
+ padding-left: 20px;
+ }
+
+ .api-usage li {
+ margin-bottom: 4px;
+ }
+
/* When Not To Use Section */
.warning-list {
background: #fefce8;
@@ -670,6 +764,10 @@
.quote-block::before {
font-size: 4rem;
}
+
+ .api-class {
+ padding: 22px 20px;
+ }
}
/* Smooth scrolling */
@@ -738,6 +836,10 @@
<div class="install-banner">
<h3>Get Started in Seconds</h3>
<pre><code>pip install pickledb</code></pre>
+ <p class="license-note" style="margin-top: 16px; position: relative; z-index: 1;">
+ Need the SQLite backend? Install the extra:
+ <code>pip install "pickledb[sqlite]"</code>
+ </p>
</div>
</section>
@@ -750,7 +852,8 @@
<pre><code><span class="keyword">from</span> pickledb <span class="keyword">import</span> PickleDB
<span class="comment"># Bind to a JSON file; no I/O yet</span>
-db = PickleDB(<span class="string">"data.json"</span>).load()
+db = PickleDB(<span class="string">"data.json"</span>)
+db.load()
db.set(<span class="string">"username"</span>, <span class="string">"alice"</span>)
db.set(<span class="string">"theme"</span>, {
@@ -769,7 +872,8 @@ db.save() <span class="comment"># atomically write to disk</span></code></pre>
<span class="keyword">from</span> pickledb <span class="keyword">import</span> PickleDB
<span class="keyword">async def</span> <span class="function">main</span>():
- db = <span class="keyword">await</span> PickleDB(<span class="string">"data.json"</span>).load()
+ db = PickleDB(<span class="string">"data.json"</span>)
+ <span class="keyword">await</span> db.load()
<span class="keyword">await</span> db.set(<span class="string">"score"</span>, <span class="number">42</span>)
value = <span class="keyword">await</span> db.get(<span class="string">"score"</span>)
@@ -796,7 +900,12 @@ asyncio.run(main())</code></pre>
<li><strong>Unified sync/async:</strong> Every core method works in both worlds via the same name</li>
<li><strong>Atomic disk writes:</strong> Uses a temp file and <code>os.replace</code> to avoid partial writes</li>
<li><strong>No hidden autosave:</strong> Nothing is written to disk unless you call <code>save()</code> or exit a context manager cleanly</li>
- <li><strong>Single file simplicity:</strong> Because the database is saved to a single file, pickleDB is not thread-safe or process-safe. If you need this, check out <a href="https://github.com/patx/mongokv">mongoKV</a></li>
+ <li>
+ <strong>Single file simplicity:</strong> Because the JSON database is a single file, <code>PickleDB</code> is
+ <strong>not</strong> thread-safe or process-safe. For safer multi-thread/multi-process access on a single host,
+ use <code>PickleDBSQLite</code> (SQLite handles locking and ACID transactions). For networked or multi-host workloads,
+ consider <a href="https://github.com/patx/mongokv">mongoKV</a>.
+ </li>
</ul>
</div>
</section>
@@ -835,63 +944,275 @@ asyncio.run(main())</code></pre>
</section>
<section class="section fade-in">
- <h2>Core Methods</h2>
- <p>These are the only methods you need to know. Every method works the same in sync and async code — just add <code>await</code> when you're in an async function.</p>
+ <h2>API Reference</h2>
+ <p>pickleDB exposes two primary classes: <code>PickleDB</code> for JSON-on-disk, and <code>PickleDBSQLite</code> for an optional SQLite backend. All core methods share the same names in synchronous and asynchronous code — just add <code>await</code> when you’re inside an <code>async</code> function.</p>
+
+ <div class="api-section">
+ <div class="api-class">
+ <div class="api-class-header">
+ <div class="api-class-name">class PickleDB(location: str)</div>
+ <div class="api-class-note">In-memory JSON database backed by a single file on disk.</div>
+ </div>
+
+ <div class="api-method">
+ <div class="api-signature">
+ <code>load() -> bool</code>
+ <div class="api-tags">
+ <span class="api-tag">sync</span>
+ <span class="api-tag">async</span>
+ </div>
+ </div>
+ <p class="api-desc">
+ Load (or reload) the JSON file at <code>location</code> into memory. If the file is missing or empty, the in-memory database becomes an empty dict.
+ </p>
+ <ul class="api-usage">
+ <li><strong>Sync:</strong> <code>db.load()</code></li>
+ <li><strong>Async:</strong> <code>await db.load()</code></li>
+ <li>Returns <code>True</code> on success.</li>
+ </ul>
+ </div>
+
+ <div class="api-method">
+ <div class="api-signature">
+ <code>save() -> bool</code>
+ <div class="api-tags">
+ <span class="api-tag">sync</span>
+ <span class="api-tag">async</span>
+ </div>
+ </div>
+ <p class="api-desc">
+ Atomically write the in-memory database to disk at <code>location</code> using a temporary file + <code>os.replace()</code>.
+ </p>
+ <ul class="api-usage">
+ <li><strong>Sync:</strong> <code>db.save()</code></li>
+ <li><strong>Async:</strong> <code>await db.save()</code></li>
+ <li>Returns <code>True</code> on success.</li>
+ </ul>
+ </div>
+
+ <div class="api-method">
+ <div class="api-signature">
+ <code>set(key: Any, value: Any) -> bool</code>
+ <div class="api-tags">
+ <span class="api-tag">sync</span>
+ <span class="api-tag">async</span>
+ </div>
+ </div>
+ <p class="api-desc">
+ Store <code>value</code> under <code>key</code> in the in-memory database. Keys are coerced to <code>str</code>, and values must be JSON-serializable (via <code>orjson</code>).
+ </p>
+ <ul class="api-usage">
+ <li><strong>Sync:</strong> <code>db.set("name", "alice")</code></li>
+ <li><strong>Async:</strong> <code>await db.set("name", "alice")</code></li>
+ <li>Returns <code>True</code> after updating the in-memory store (does not write to disk until <code>save()</code>).</li>
+ </ul>
+ </div>
+
+ <div class="api-method">
+ <div class="api-signature">
+ <code>get(key: Any, default: Any | None = None) -> Any | None</code>
+ <div class="api-tags">
+ <span class="api-tag">sync</span>
+ <span class="api-tag">async</span>
+ </div>
+ </div>
+ <p class="api-desc">
+ Retrieve the value stored under <code>key</code>, or <code>default</code> if it doesn’t exist.
+ </p>
+ <ul class="api-usage">
+ <li><strong>Sync:</strong> <code>db.get("name")</code> or <code>db.get("name", default="guest")</code></li>
+ <li><strong>Async:</strong> <code>await db.get("name")</code></li>
+ <li>Returns the stored value, or <code>default</code> (default is <code>None</code>).</li>
+ </ul>
+ </div>
+
+ <div class="api-method">
+ <div class="api-signature">
+ <code>remove(key: Any) -> bool</code>
+ <div class="api-tags">
+ <span class="api-tag">sync</span>
+ <span class="api-tag">async</span>
+ </div>
+ </div>
+ <p class="api-desc">
+ Remove <code>key</code> from the in-memory database.
+ </p>
+ <ul class="api-usage">
+ <li><strong>Sync:</strong> <code>db.remove("name")</code></li>
+ <li><strong>Async:</strong> <code>await db.remove("name")</code></li>
+ <li>Returns <code>True</code> if the key existed and was removed, <code>False</code> otherwise.</li>
+ </ul>
+ </div>
+
+ <div class="api-method">
+ <div class="api-signature">
+ <code>all() -> list[str]</code>
+ <div class="api-tags">
+ <span class="api-tag">sync</span>
+ <span class="api-tag">async</span>
+ </div>
+ </div>
+ <p class="api-desc">
+ Return a list of all keys currently stored in memory.
+ </p>
+ <ul class="api-usage">
+ <li><strong>Sync:</strong> <code>db.all()</code></li>
+ <li><strong>Async:</strong> <code>await db.all()</code></li>
+ <li>Returns a list of <code>str</code> keys. Order is not guaranteed.</li>
+ </ul>
+ </div>
+
+ <div class="api-method">
+ <div class="api-signature">
+ <code>purge() -> bool</code>
+ <div class="api-tags">
+ <span class="api-tag">sync</span>
+ <span class="api-tag">async</span>
+ </div>
+ </div>
+ <p class="api-desc">
+ Clear the in-memory database (equivalent to <code>db.all()</code> becoming an empty list). Does not touch disk until you call <code>save()</code>.
+ </p>
+ <ul class="api-usage">
+ <li><strong>Sync:</strong> <code>db.purge()</code></li>
+ <li><strong>Async:</strong> <code>await db.purge()</code></li>
+ <li>Returns <code>True</code>.</li>
+ </ul>
+ </div>
+ </div>
- <table>
- <thead>
- <tr>
- <th>Method</th>
- <th>Sync Usage</th>
- <th>Async Usage</th>
- <th>Description</th>
- </tr>
- </thead>
- <tbody>
- <tr>
- <td><code>load()</code></td>
- <td><code>db.load()</code></td>
- <td><code>await db.load()</code></td>
- <td>Load (or reload) the JSON file into memory. Returns the same instance for chaining.</td>
- </tr>
- <tr>
- <td><code>save()</code></td>
- <td><code>db.save()</code></td>
- <td><code>await db.save()</code></td>
- <td>Atomically save the in-memory database back to disk.</td>
- </tr>
- <tr>
- <td><code>set(key, value)</code></td>
- <td><code>db.set(k, v)</code></td>
- <td><code>await db.set(k, v)</code></td>
- <td>Store a value under <code>key</code>. Keys are coerced to <code>str</code>; values must be JSON-serializable.</td>
- </tr>
- <tr>
- <td><code>get(key, default=None)</code></td>
- <td><code>db.get(k, d)</code></td>
- <td><code>await db.get(k, d)</code></td>
- <td>Retrieve the stored value, or <code>default</code> if the key doesn't exist.</td>
- </tr>
- <tr>
- <td><code>remove(key)</code></td>
- <td><code>db.remove(k)</code></td>
- <td><code>await db.remove(k)</code></td>
- <td>Delete a key. Returns <code>True</code> if it existed and was removed.</td>
- </tr>
- <tr>
- <td><code>all()</code></td>
- <td><code>db.all()</code></td>
- <td><code>await db.all()</code></td>
- <td>Return a list of all keys in the in-memory database.</td>
- </tr>
- <tr>
- <td><code>purge()</code></td>
- <td><code>db.purge()</code></td>
- <td><code>await db.purge()</code></td>
- <td>Clear the in-memory database. Returns <code>True</code>.</td>
- </tr>
- </tbody>
- </table>
+ <div class="api-class">
+ <div class="api-class-header">
+ <div class="api-class-name">
+ class PickleDBSQLite(sqlite_path: str = "pickledb.sqlite3", table_name: str = "kv")
+ </div>
+ <div class="api-class-note">
+ Optional SQLite-backed key-value store using the same sync/async method names.
+ </div>
+ </div>
+
+ <p class="api-class-note" style="margin-top: 4px;">
+ <strong>Installation:</strong>
+ <code>PickleDBSQLite</code> lives behind an optional dependency. Install
+ <code>aiosqlite</code> yourself or use:
+ <code>pip install "pickledb[sqlite]"</code>.
+ </p>
+ <p class="api-class-note" style="margin-top: 4px;">
+ <strong>Why use it?</strong> SQLite adds file-level locking and ACID transactions, so concurrent readers and
+ serialized writers across multiple threads/processes on the <em>same machine</em> are handled for you. It’s still
+ not a distributed database, but it’s much safer than multiple processes writing to a plain JSON file.
+ </p>
+
+ <div class="api-method">
+ <div class="api-signature">
+ <code>set(key: str | None, value: Any) -> str</code>
+ <div class="api-tags">
+ <span class="api-tag">sync</span>
+ <span class="api-tag">async</span>
+ </div>
+ </div>
+ <p class="api-desc">
+ Store <code>value</code> in a SQLite table as an orjson-encoded BLOB. If <code>key</code> is
+ <code>None</code>, a new UUID key is generated and returned.
+ </p>
+ <ul class="api-usage">
+ <li><strong>Sync:</strong> <code>key = kv.set(None, {"foo": "bar"})</code></li>
+ <li><strong>Async:</strong> <code>key = await kv.set(None, {"foo": "bar"})</code></li>
+ <li>Returns the key used (either the provided string or the generated UUID).</li>
+ </ul>
+ </div>
+
+ <div class="api-method">
+ <div class="api-signature">
+ <code>get(key: str, default: Any = MISSING) -> Any</code>
+ <div class="api-tags">
+ <span class="api-tag">sync</span>
+ <span class="api-tag">async</span>
+ </div>
+ </div>
+ <p class="api-desc">
+ Look up a key in SQLite and deserialize it from orjson. If the key does not exist and no <code>default</code> is
+ provided, a <code>KeyError</code> is raised.
+ </p>
+ <ul class="api-usage">
+ <li><strong>Sync:</strong> <code>kv.get(key)</code> or <code>kv.get("missing", default=None)</code></li>
+ <li><strong>Async:</strong> <code>await kv.get(key)</code></li>
+ <li>Returns the stored value, <code>default</code>, or raises <code>KeyError</code>.</li>
+ </ul>
+ </div>
+
+ <div class="api-method">
+ <div class="api-signature">
+ <code>remove(key: str) -> bool</code>
+ <div class="api-tags">
+ <span class="api-tag">sync</span>
+ <span class="api-tag">async</span>
+ </div>
+ </div>
+ <p class="api-desc">
+ Delete <code>key</code> from the SQLite table.
+ </p>
+ <ul class="api-usage">
+ <li><strong>Sync:</strong> <code>kv.remove("key")</code></li>
+ <li><strong>Async:</strong> <code>await kv.remove("key")</code></li>
+ <li>Returns <code>True</code> if a row was deleted, <code>False</code> otherwise.</li>
+ </ul>
+ </div>
+
+ <div class="api-method">
+ <div class="api-signature">
+ <code>all() -> list[str]</code>
+ <div class="api-tags">
+ <span class="api-tag">sync</span>
+ <span class="api-tag">async</span>
+ </div>
+ </div>
+ <p class="api-desc">
+ Return a list of all keys stored in the SQLite table.
+ </p>
+ <ul class="api-usage">
+ <li><strong>Sync:</strong> <code>kv.all()</code></li>
+ <li><strong>Async:</strong> <code>await kv.all()</code></li>
+ <li>Returns a list of key strings sorted by key.</li>
+ </ul>
+ </div>
+
+ <div class="api-method">
+ <div class="api-signature">
+ <code>purge() -> bool</code>
+ <div class="api-tags">
+ <span class="api-tag">sync</span>
+ <span class="api-tag">async</span>
+ </div>
+ </div>
+ <p class="api-desc">
+ Remove all rows from the SQLite table.
+ </p>
+ <ul class="api-usage">
+ <li><strong>Sync:</strong> <code>kv.purge()</code></li>
+ <li><strong>Async:</strong> <code>await kv.purge()</code></li>
+ <li>Returns <code>True</code>.</li>
+ </ul>
+ </div>
+
+ <div class="api-method">
+ <div class="api-signature">
+ <code>close() -> None</code>
+ <div class="api-tags">
+ <span class="api-tag">sync</span>
+ <span class="api-tag">async</span>
+ </div>
+ </div>
+ <p class="api-desc">
+ Close the underlying synchronous SQLite connection. In async code, returns a coroutine you should <code>await</code>.
+ </p>
+ <ul class="api-usage">
+ <li><strong>Sync:</strong> <code>kv.close()</code></li>
+ <li><strong>Async:</strong> <code>await kv.close()</code></li>
+ </ul>
+ </div>
+ </div>
+ </div>
<div class="callout-warning callout">
<p><strong>Important:</strong> pickleDB is intentionally <strong>method-based only</strong>. Dict-style access like <code>db["key"]</code> or <code>db["key"] = value</code> is not supported.</p>
@@ -946,7 +1267,7 @@ db.set(<span class="string">"user:2"</span>, {<span class="string">"name"</span>
data = db.get(key)
<span class="keyword">if not</span> data:
<span class="keyword">return</span> <span class="keyword">None</span>
- <span class="keyword">if</span> time.time() < data.get(<span class="string">"expires_at"</span>, <span class="number">0</span>):
+ <span class="keyword">if</span> time.time() < data.get(<span class="string">"expires_at"</span>, <span class="number">0</span>):
<span class="keyword">return</span> data[<span class="string">"value"</span>]
db.remove(key)
<span class="keyword">return</span> <span class="keyword">None</span>
@@ -1003,7 +1324,7 @@ time.sleep(<span class="number">3</span>)
<h3>When Not to Use pickleDB</h3>
<ul>
<li>You need multi-process or multi-host concurrency</li>
- <li>ASGI web servers or frameworks <small>(unless you're using a single worker)</small></li>
+ <li>ASGI web servers or frameworks <small>(unless you're using a single worker or SQLite backend carefully)</small></li>
<li>Your dataset is too large to comfortably fit in memory</li>
<li>You need rich querying, indexing, or joins</li>
</ul>
@@ -1021,3 +1342,4 @@ time.sleep(<span class="number">3</span>)
</body>
</html>
+
diff --git a/pickledb.py b/pickledb.py
index 45220a2..e3022c0 100644
--- a/pickledb.py
+++ b/pickledb.py
@@ -6,12 +6,24 @@ Licensed - BSD 3 Clause (see LICENSE)
import asyncio
import os
-import aiofiles
+from typing import Any
+import uuid
+
import orjson
+import aiofiles
+try:
+ import sqlite3
+ import aiosqlite
+ sqlite_enable = True
+except ImportError:
+ sqlite_enable = False
+
+MISSING = object()
-def in_async():
- """Check if running in an event loop."""
+
+def in_async() -> bool:
+ """Return True if we're currently running inside an event loop."""
try:
asyncio.get_running_loop()
return True
@@ -20,7 +32,12 @@ def in_async():
def dualmethod(func):
- """Allows async methods to also be called synchronously."""
+ """
+ Decorator that lets an async method be called in both sync and async code.
+
+ - In async code: returns the coroutine (you must `await` it).
+ - In sync code: runs the coroutine with asyncio.run() and returns the result.
+ """
def wrapper(self, *args, **kwargs):
coro = func(self, *args, **kwargs)
if in_async():
@@ -32,11 +49,14 @@ def dualmethod(func):
class PickleDB:
"""
A unified async/sync key-value store using orjson + aiofiles.
+
+ All data is kept in-memory in `self.db` and serialized to disk as a single
+ orjson-encoded file at `self.location`.
"""
def __init__(self, location: str):
self.location = os.path.expanduser(location)
- self.db = {}
+ self.db: dict[str, Any] = {}
self._lock = asyncio.Lock()
def __enter__(self):
@@ -57,7 +77,11 @@ class PickleDB:
@dualmethod
async def load(self) -> bool:
- """Load JSON database from disk into memory."""
+ """
+ Load JSON database from disk into memory.
+
+ Returns True on success (or if the file did not exist / was empty).
+ """
if os.path.exists(self.location) and os.path.getsize(self.location) > 0:
async with aiofiles.open(self.location, "rb") as f:
data = await f.read()
@@ -67,11 +91,16 @@ class PickleDB:
async with self._lock:
self.db = new_db
- return self
+ return True
@dualmethod
async def save(self) -> bool:
- """Atomically save database to disk."""
+ """
+ Atomically save database to disk.
+
+ Writes to `<location>.tmp` and then os.replace() over the original file.
+ Returns True on success.
+ """
temp = f"{self.location}.tmp"
async with self._lock:
async with aiofiles.open(temp, "wb") as f:
@@ -81,20 +110,20 @@ class PickleDB:
@dualmethod
async def set(self, key, value) -> bool:
- """Set a key-value pair."""
+ """Set a key-value pair. Always returns True."""
async with self._lock:
self.db[str(key)] = value
return True
@dualmethod
async def get(self, key, default=None):
- """Get a key's value."""
+ """Get a key's value, or `default` if missing."""
async with self._lock:
return self.db.get(str(key), default)
@dualmethod
async def remove(self, key) -> bool:
- """Remove a key-value pair."""
+ """Remove a key-value pair. Returns True if it existed, False otherwise."""
async with self._lock:
return self.db.pop(str(key), None) is not None
@@ -106,8 +135,255 @@ class PickleDB:
@dualmethod
async def purge(self) -> bool:
- """Remove all key-value pairs from database."""
+ """Remove all key-value pairs from the database. Always returns True."""
async with self._lock:
self.db.clear()
return True
+if sqlite_enable:
+ class PickleDBSQLite:
+ """
+ A unified async/sync key-value store backed by SQLite.
+
+ Each key is stored as a row:
+
+ CREATE TABLE kv (
+ key TEXT PRIMARY KEY,
+ value BLOB NOT NULL
+ )
+
+ Values are stored as JSON-encoded bytes via orjson.
+ """
+
+ def __init__(
+ self,
+ sqlite_path: str = "pickledb.sqlite3",
+ table_name: str = "kv",
+ ) -> None:
+ self.sqlite_path = sqlite_path
+ self.table_name = table_name
+
+ self._conn = sqlite3.connect(self.sqlite_path, check_same_thread=False)
+ self._conn.row_factory = sqlite3.Row
+
+ self._conn.execute(
+ f"""
+ CREATE TABLE IF NOT EXISTS {self.table_name} (
+ key TEXT PRIMARY KEY,
+ value BLOB NOT NULL
+ )
+ """
+ )
+ self._conn.commit()
+
+ def _dumps(self, value: Any) -> bytes:
+ """Serialize a Python object to orjson-encoded bytes."""
+ return orjson.dumps(value)
+
+ def _loads(self, data: bytes) -> Any:
+ """Deserialize orjson-encoded bytes back into a Python object."""
+ return orjson.loads(data)
+
+ def set(self, key: str | None, value: Any) -> str:
+ """
+ Set a key-value pair.
+
+ If key is None, generate a new random UUID key and return it.
+
+ In async code, returns a coroutine you must `await`.
+ In sync code, returns the key string directly.
+ """
+ if in_async():
+ async def _aset() -> str:
+ async with aiosqlite.connect(self.sqlite_path) as db:
+ db.row_factory = sqlite3.Row
+ payload = self._dumps(value)
+
+ if key is None:
+ new_key = str(uuid.uuid4())
+ await db.execute(
+ f"INSERT INTO {self.table_name} (key, value) VALUES (?, ?)",
+ (new_key, payload),
+ )
+ await db.commit()
+ return new_key
+
+ await db.execute(
+ f"""
+ INSERT INTO {self.table_name} (key, value)
+ VALUES (?, ?)
+ ON CONFLICT(key) DO UPDATE SET value=excluded.value
+ """,
+ (str(key), payload),
+ )
+ await db.commit()
+ return str(key)
+
+ return _aset()
+
+ payload = self._dumps(value)
+ if key is None:
+ new_key = str(uuid.uuid4())
+ self._conn.execute(
+ f"INSERT INTO {self.table_name} (key, value) VALUES (?, ?)",
+ (new_key, payload),
+ )
+ self._conn.commit()
+ return new_key
+
+ self._conn.execute(
+ f"""
+ INSERT INTO {self.table_name} (key, value)
+ VALUES (?, ?)
+ ON CONFLICT(key) DO UPDATE SET value=excluded.value
+ """,
+ (str(key), payload),
+ )
+ self._conn.commit()
+ return str(key)
+
+ def get(self, key: str, default: Any = MISSING) -> Any:
+ """
+ Get the value for a key.
+
+ If the key does not exist:
+ - If default is MISSING, raises KeyError.
+ - Otherwise returns default.
+
+ In async code, returns a coroutine you must `await`.
+ In sync code, returns the value directly.
+ """
+ if in_async():
+ async def _aget() -> Any:
+ async with aiosqlite.connect(self.sqlite_path) as db:
+ db.row_factory = sqlite3.Row
+ cursor = await db.execute(
+ f"SELECT value FROM {self.table_name} WHERE key = ?",
+ (str(key),),
+ )
+ row = await cursor.fetchone()
+ await cursor.close()
+
+ if row is None:
+ if default is MISSING:
+ raise KeyError(key)
+ return default
+
+ return self._loads(row["value"])
+
+ return _aget()
+
+ cursor = self._conn.execute(
+ f"SELECT value FROM {self.table_name} WHERE key = ?",
+ (str(key),),
+ )
+ row = cursor.fetchone()
+ if row is None:
+ if default is MISSING:
+ raise KeyError(key)
+ return default
+ return self._loads(row["value"])
+
+ def remove(self, key: str) -> bool:
+ """
+ Remove a key-value pair.
+
+ Returns True if a row was deleted, False otherwise.
+
+ In async code, returns a coroutine you must `await`.
+ In sync code, returns a bool directly.
+ """
+ if in_async():
+ async def _aremove() -> bool:
+ async with aiosqlite.connect(self.sqlite_path) as db:
+ cursor = await db.execute(
+ f"DELETE FROM {self.table_name} WHERE key = ?",
+ (str(key),),
+ )
+ await db.commit()
+ return cursor.rowcount > 0
+
+ return _aremove()
+
+ cursor = self._conn.execute(
+ f"DELETE FROM {self.table_name} WHERE key = ?",
+ (str(key),),
+ )
+ self._conn.commit()
+ return cursor.rowcount > 0
+
+ def all(self) -> list[str]:
+ """
+ Return a list of all keys in the database.
+
+ In async code, returns a coroutine you must `await`.
+ In sync code, returns the list directly.
+ """
+ if in_async():
+ async def _aall() -> list[str]:
+ async with aiosqlite.connect(self.sqlite_path) as db:
+ db.row_factory = sqlite3.Row
+ cursor = await db.execute(
+ f"SELECT key FROM {self.table_name} ORDER BY key"
+ )
+ rows = await cursor.fetchall()
+ await cursor.close()
+ return [row["key"] for row in rows]
+
+ return _aall()
+
+ cursor = self._conn.execute(
+ f"SELECT key FROM {self.table_name} ORDER BY key"
+ )
+ return [row["key"] for row in cursor.fetchall()]
+
+ def purge(self) -> bool:
+ """
+ Remove all key-value pairs from the database.
+
+ Always returns True.
+
+ In async code, returns a coroutine you must `await`.
+ In sync code, returns True directly.
+ """
+ if in_async():
+ async def _apurge() -> bool:
+ async with aiosqlite.connect(self.sqlite_path) as db:
+ await db.execute(f"DELETE FROM {self.table_name}")
+ await db.commit()
+ return True
+
+ return _apurge()
+
+ self._conn.execute(f"DELETE FROM {self.table_name}")
+ self._conn.commit()
+ return True
+
+ def close(self) -> None:
+ """
+ Close the underlying sync SQLite connection.
+
+ In async code, returns a coroutine you must `await`.
+ In sync code, closes immediately.
+ """
+ if in_async():
+ async def _aclose() -> None:
+ self._conn.close()
+ return _aclose()
+
+ self._conn.close()
+
+
+else:
+ class PickleDBSQLite:
+ """
+ This class is only usable if `aiosqlite` is installed, e.g.:
+ pip install "pickledb[sqlite]"
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ raise RuntimeError(
+ "PickleDBSQLite requires `aiosqlite`. "
+ "Install it via `pip install \"pickledb[sqlite]\"`."
+ )
+
diff --git a/pyproject.toml b/pyproject.toml
index 1dd09ba..b246641 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,13 +4,13 @@ build-backend = "flit_core.buildapi"
[project]
name = "pickledb"
-version = "1.4.1.1"
+version = "1.5"
description = "An in-memory key-value store using orjson."
-keywords = ["pickle", "database", "json", "redis", "asyncio"]
+keywords = ["pickle", "database", "json", "redis", "asyncio", "sqlite"]
readme = "README.md"
authors = [{ name = "Harrison Erd", email = "[email protected]" }]
license = {file = "LICENSE"}
-dependencies = ["orjson>=3.11.5", "aiofiles>=25.1.0"]
+dependencies = ["orjson", "aiofiles"]
requires-python = ">=3.10"
classifiers = [
"Framework :: AsyncIO",
@@ -22,6 +22,9 @@ classifiers = [
"Topic :: Database"
]
+[project.optional-dependencies]
+sqlite = ["aiosqlite"]
+
[project.urls]
Homepage = "https://patx.github.io/pickledb"
Repository = "https://github.com/patx/pickledb"
diff --git a/test_pickledb.py b/test_pickledb.py
index e086098..a3c6346 100644
--- a/test_pickledb.py
+++ b/test_pickledb.py
@@ -2,12 +2,11 @@
import os
import json
import asyncio
-import tempfile
from pathlib import Path
import pytest
-from pickledb import PickleDB # adjust this import if your module name is different
+from pickledb import PickleDB, PickleDBSQLite # adjust this import if your module name is different
# ---------------------------------------------------------------------------
@@ -20,7 +19,7 @@ def make_tmp_path(tmp_path, name="db.json") -> Path:
# ---------------------------------------------------------------------------
-# Basic sync usage
+# Basic sync usage (PickleDB)
# ---------------------------------------------------------------------------
def test_sync_set_get_save_and_reload(tmp_path):
@@ -95,7 +94,7 @@ def test_sync_purge_clears_database(tmp_path):
# ---------------------------------------------------------------------------
-# Basic async usage
+# Basic async usage (PickleDB)
# ---------------------------------------------------------------------------
@pytest.mark.asyncio
@@ -151,7 +150,7 @@ async def test_async_remove_all_purge(tmp_path):
# ---------------------------------------------------------------------------
-# Context managers
+# Context managers (PickleDB)
# ---------------------------------------------------------------------------
def test_sync_context_manager_saves_on_success(tmp_path):
@@ -216,7 +215,7 @@ async def test_async_context_manager_does_not_save_on_exception(tmp_path):
# ---------------------------------------------------------------------------
-# Edge cases: missing file, empty file, atomic save
+# Edge cases: missing file, empty file, atomic save (PickleDB)
# ---------------------------------------------------------------------------
def test_load_on_missing_file_returns_empty_db(tmp_path):
@@ -240,7 +239,7 @@ def test_load_on_empty_file_returns_empty_db(tmp_path):
db_path.write_text("") # existing but empty file
db = PickleDB(str(db_path))
- # Your load() treats 0-byte file as empty db
+ # load() treats 0-byte file as empty db
db.load()
assert db.all() == []
@@ -256,14 +255,14 @@ def test_save_uses_temp_file_and_is_atomic(tmp_path):
# temp file should not be left behind
assert not os.path.exists(tmp_name)
- # file should be valid JSON
+ # file should be valid JSON (orjson output is still standard JSON)
raw = db_path.read_bytes()
data = json.loads(raw.decode("utf-8"))
assert data == {"a": 1}
# ---------------------------------------------------------------------------
-# Concurrency tests
+# Concurrency tests (PickleDB)
# ---------------------------------------------------------------------------
@pytest.mark.asyncio
@@ -313,10 +312,11 @@ async def test_async_concurrent_gets_and_sets(tmp_path):
# ---------------------------------------------------------------------------
-# Stress test: 1,000,000 key-value pairs
+# Stress test: 1,000,000 key-value pairs (PickleDB)
# ---------------------------------------------------------------------------
@pytest.mark.asyncio
[email protected]
async def test_stress_one_million_entries(tmp_path):
"""
Stress test inserting and retrieving 1,000,000 key-value pairs.
@@ -341,3 +341,50 @@ async def test_stress_one_million_entries(tmp_path):
await db.save()
assert db_path.exists()
+
+# ---------------------------------------------------------------------------
+# Basic tests for PickleDBSQLite
+# ---------------------------------------------------------------------------
+
+def test_sqlite_sync_set_get_and_all(tmp_path):
+ sqlite_path = tmp_path / "kv.sqlite3"
+ kv = PickleDBSQLite(str(sqlite_path))
+
+ key1 = kv.set(None, {"foo": "bar"})
+ key2 = kv.set("explicit", [1, 2, 3])
+
+ assert isinstance(key1, str)
+ assert key2 == "explicit"
+
+ assert kv.get(key1) == {"foo": "bar"}
+ assert kv.get("explicit") == [1, 2, 3]
+
+ keys = set(kv.all())
+ assert key1 in keys
+ assert "explicit" in keys
+
+ kv.close()
+
+
[email protected]
+async def test_sqlite_async_set_get_and_purge(tmp_path):
+ sqlite_path = tmp_path / "kv_async.sqlite3"
+ kv = PickleDBSQLite(str(sqlite_path))
+
+ key = await kv.set(None, {"async": True})
+ assert await kv.get(key) == {"async": True}
+
+ await kv.set("x", 1)
+ await kv.set("y", 2)
+ keys = set(await kv.all())
+ assert key in keys
+ assert {"x", "y"} <= keys
+
+ assert await kv.remove("x") is True
+ assert await kv.remove("x") is False
+
+ assert await kv.purge() is True
+ assert await kv.all() == []
+
+ await kv.close()
+