Skip to content

Commit

Permalink
Merge pull request #3 from ErikKalkoken/update-docs
Browse files Browse the repository at this point in the history
Update docs
  • Loading branch information
ErikKalkoken authored Jun 5, 2023
2 parents 2dca4ff + 4e8a076 commit 8fc6948
Show file tree
Hide file tree
Showing 11 changed files with 159 additions and 107 deletions.
20 changes: 20 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
# Change Log

All notable changes to this project will be documented in this file.

The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).

## [Unreleased] - yyyy-mm-dd

### Added

### Changed

### Fixed

## [0.1.0] - 2023-06-05

### Added

- Initial release
5 changes: 5 additions & 0 deletions docs/_static/custom.css
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
.logo {
overflow-wrap: normal;
}

div.sphinxsidebar {
max-height: 100%;
overflow-y: auto;
}
6 changes: 1 addition & 5 deletions docs/api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
API Reference
===============

Complete reference of the public API.

Queues
======
Expand All @@ -14,8 +15,3 @@ Exceptions
==========

.. automodule:: aiodiskqueue.exceptions

Storage Engines
===============

.. automodule:: aiodiskqueue.engines
20 changes: 20 additions & 0 deletions docs/extensions.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
.. currentmodule:: aiodiskqueue

===============
Customization
===============

Storage Engines
===============

aiodiskqueue uses the DbmEngine as default, but you can also select a different storage engine.

Or you can create your own storage engine by inheriting from :class:`.FifoStorageEngine`.

.. automodule:: aiodiskqueue.engines.dbm

.. automodule:: aiodiskqueue.engines.simple

.. automodule:: aiodiskqueue.engines.sqlite

.. automodule:: aiodiskqueue.engines.base
13 changes: 10 additions & 3 deletions docs/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,17 @@

.. toctree::
:hidden:
:maxdepth: 2
:maxdepth: 4

api
extensions


Contens
-------

.. toctree::
:hidden:
:maxdepth: 1
:maxdepth: 2

api
extensions
2 changes: 1 addition & 1 deletion src/aiodiskqueue/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from aiodiskqueue.exceptions import QueueEmpty, QueueFull
from aiodiskqueue.queues import Queue

__version__ = "0.1.0b8"
__version__ = "0.1.0"


__all__ = ["engines", "Queue", "QueueEmpty", "QueueFull"]
18 changes: 14 additions & 4 deletions src/aiodiskqueue/engines/base.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""Engines for storing the queues on disk."""
"""Base class for storage engines."""

import logging
from abc import ABC, abstractmethod
Expand All @@ -8,19 +8,25 @@
logger = logging.getLogger("aiodiskqueue")


class _FifoStorageEngine(ABC):
class FifoStorageEngine(ABC):
"""Base class for all storage engines implementing a FIFO queue."""

def __init__(self, data_path: Path) -> None:
self._data_path = data_path

@abstractmethod
async def initialize(self) -> List[Any]:
"""Initialize data file."""
"""Initialize data file.
:meta private:
"""

@abstractmethod
async def fetch_all(self) -> List[Any]:
"""Return all items in data file."""
"""Return all items in data file.
:meta private:
"""

@abstractmethod
async def add_item(self, item: Any):
Expand All @@ -29,6 +35,8 @@ async def add_item(self, item: Any):
Args:
item: Item to be appended
items: All items including the one to be appended
:meta private:
"""

@abstractmethod
Expand All @@ -38,4 +46,6 @@ async def remove_item(self):
Args:
item: Item to be removed
items: All items not including the one to be removed
:meta private:
"""
164 changes: 79 additions & 85 deletions src/aiodiskqueue/engines/dbm.py
Original file line number Diff line number Diff line change
@@ -1,102 +1,96 @@
"""Engines for storing the queues on disk."""
"""Engines for storing the queues with DBM."""

import dbm
import logging
import pickle
from pathlib import Path
from typing import Any, List, Optional, Union

try:
import aiodbm
except ImportError:
has_aiodbm = False
else:
has_aiodbm = True
import aiodbm

from .base import _FifoStorageEngine
from .base import FifoStorageEngine

logger = logging.getLogger("aiodiskqueue")

if has_aiodbm:

class DbmEngine(_FifoStorageEngine):
"""A queue storage engine using DBM."""

def __init__(self, data_path: Path) -> None:
super().__init__(data_path)
self._data_path_2 = str(data_path.absolute())

HEAD_ID_KEY = "head_id"
TAIL_ID_KEY = "tail_id"

async def initialize(self):
async with aiodbm.open(self._data_path_2, "c") as db:
await db.set("dummy", "test")
await db.delete("dummy")

async def fetch_all(self) -> List[Any]:
try:
async with aiodbm.open(self._data_path_2, "r") as db:
head_id = await self._get_obj(db, self.HEAD_ID_KEY)
tail_id = await self._get_obj(db, self.TAIL_ID_KEY)
if not head_id or not tail_id:
return []

items = []
for item_id in range(head_id, tail_id + 1):
item_key = self._make_item_key(item_id)
item = await self._get_obj(db, item_key)
items.append(item)
except dbm.error:
items = []

return items
class DbmEngine(FifoStorageEngine):
"""A queue storage engine using DBM."""

async def add_item(self, item: Any):
async with aiodbm.open(self._data_path_2, "w") as db:
tail_id = await self._get_obj(db, self.TAIL_ID_KEY)
if tail_id:
item_id = tail_id + 1
is_first = False
else:
item_id = 1
is_first = True
def __init__(self, data_path: Path) -> None:
super().__init__(data_path)
self._data_path_2 = str(data_path.absolute())

await self._set_obj(db, self._make_item_key(item_id), item)
await self._set_obj(db, self.TAIL_ID_KEY, item_id)
_HEAD_ID_KEY = "head_id"
_TAIL_ID_KEY = "tail_id"

if is_first:
await self._set_obj(db, self.HEAD_ID_KEY, item_id)
async def initialize(self):
async with aiodbm.open(self._data_path_2, "c") as db:
await db.set("dummy", "test")
await db.delete("dummy")

async def remove_item(self):
async with aiodbm.open(self._data_path_2, "w") as db:
head_id = await self._get_obj(db, self.HEAD_ID_KEY)
tail_id = await self._get_obj(db, self.TAIL_ID_KEY)
async def fetch_all(self) -> List[Any]:
try:
async with aiodbm.open(self._data_path_2, "r") as db:
head_id = await self._get_obj(db, self._HEAD_ID_KEY)
tail_id = await self._get_obj(db, self._TAIL_ID_KEY)
if not head_id or not tail_id:
raise ValueError("Nothing to remove from an empty database")
item_key = self._make_item_key(head_id)
await db.delete(item_key)

if head_id != tail_id:
# there are items left
await self._set_obj(db, self.HEAD_ID_KEY, head_id + 1)
else:
# was last item
await db.delete(self.HEAD_ID_KEY)
await db.delete(self.TAIL_ID_KEY)

@staticmethod
def _make_item_key(item_id: int) -> str:
return f"item-{item_id}"

@staticmethod
async def _get_obj(db, key: Union[str, bytes]) -> Optional[Any]:
data = await db.get(key)
if not data:
return None
return pickle.loads(data)

@staticmethod
async def _set_obj(db, key: Union[str, bytes], item: Any):
data = pickle.dumps(item)
await db.set(key, data)
return []

items = []
for item_id in range(head_id, tail_id + 1):
item_key = self._make_item_key(item_id)
item = await self._get_obj(db, item_key)
items.append(item)
except dbm.error:
items = []

return items

async def add_item(self, item: Any):
async with aiodbm.open(self._data_path_2, "w") as db:
tail_id = await self._get_obj(db, self._TAIL_ID_KEY)
if tail_id:
item_id = tail_id + 1
is_first = False
else:
item_id = 1
is_first = True

await self._set_obj(db, self._make_item_key(item_id), item)
await self._set_obj(db, self._TAIL_ID_KEY, item_id)

if is_first:
await self._set_obj(db, self._HEAD_ID_KEY, item_id)

async def remove_item(self):
async with aiodbm.open(self._data_path_2, "w") as db:
head_id = await self._get_obj(db, self._HEAD_ID_KEY)
tail_id = await self._get_obj(db, self._TAIL_ID_KEY)
if not head_id or not tail_id:
raise ValueError("Nothing to remove from an empty database")
item_key = self._make_item_key(head_id)
await db.delete(item_key)

if head_id != tail_id:
# there are items left
await self._set_obj(db, self._HEAD_ID_KEY, head_id + 1)
else:
# was last item
await db.delete(self._HEAD_ID_KEY)
await db.delete(self._TAIL_ID_KEY)

@staticmethod
def _make_item_key(item_id: int) -> str:
return f"item-{item_id}"

@staticmethod
async def _get_obj(db, key: Union[str, bytes]) -> Optional[Any]:
data = await db.get(key)
if not data:
return None
return pickle.loads(data)

@staticmethod
async def _set_obj(db, key: Union[str, bytes], item: Any):
data = pickle.dumps(item)
await db.set(key, data)
8 changes: 4 additions & 4 deletions src/aiodiskqueue/engines/simple.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""Engines for storing the queues on disk."""
"""Engines for storing the queues in flat files."""

import io
import logging
Expand All @@ -8,12 +8,12 @@
import aiofiles
import aiofiles.os

from .base import _FifoStorageEngine
from .base import FifoStorageEngine

logger = logging.getLogger("aiodiskqueue")


class PickledList(_FifoStorageEngine):
class PickledList(FifoStorageEngine):
"""This engine stores items as one singular pickled list of items."""

async def initialize(self):
Expand Down Expand Up @@ -54,7 +54,7 @@ async def _save_all_items(self, items: List[Any]):
logger.debug("Wrote queue with %d items: %s", len(items), self._data_path)


class PickleSequence(_FifoStorageEngine):
class PickleSequence(FifoStorageEngine):
"""This engine stores items as a sequence of single pickles."""

async def initialize(self):
Expand Down
4 changes: 2 additions & 2 deletions src/aiodiskqueue/engines/sqlite.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,13 @@
else:
has_aiosqlite = True

from .base import _FifoStorageEngine
from .base import FifoStorageEngine

logger = logging.getLogger("aiodiskqueue")

if has_aiosqlite:

class SqliteEngine(_FifoStorageEngine):
class SqliteEngine(FifoStorageEngine):
"""A queue storage engine using Sqlite."""

async def initialize(self):
Expand Down
Loading

0 comments on commit 8fc6948

Please sign in to comment.