Merge pull request #600 from JWCook/dynamodb

DynamoDB: Add TTL support, and store responses as JSON documents instead of serialized binaries
diff --git a/HISTORY.md b/HISTORY.md
index f174881..bc97907 100644
--- a/HISTORY.md
+++ b/HISTORY.md
@@ -28,8 +28,11 @@
 * MongoDB:
   * Store responses in plain (human-readable) document format instead of fully serialized binary
   * Add optional integration with MongoDB TTL to improve performance for removing expired responses
+    * Disabled by default. See 'Backends: MongoDB' docs for details.
 * DynamoDB:
   * Create default table in on-demand mode instead of provisioned
+  * Add optional integration with DynamoDB TTL to improve performance for removing expired responses
+    * This is enabled by default, but may be disabled
 * SQLite, Redis, MongoDB, and GridFS: Close open database connections when `CachedSession` is used as a contextmanager, or if `CachedSession.close()` is called
 
 **Type hints:**
@@ -98,7 +101,8 @@
 
 **Backends:**
 * Filesystem and SQLite backends: Add better error message if parent path exists but isn't a directory
-* Redis: Improve performance by using native Redis TTL for cache expiration
+* Redis: Add optional integration with Redis TTL to improve performance for removing expired responses
+  * This is enabled by default, but may be disabled
 
 **Other features:**
 * Support `expire_after` param for `CachedSession.send()`
diff --git a/docs/_static/dynamodb_items.png b/docs/_static/dynamodb_items.png
new file mode 100644
index 0000000..3ab4531
--- /dev/null
+++ b/docs/_static/dynamodb_items.png
Binary files differ
diff --git a/docs/_static/dynamodb_response.png b/docs/_static/dynamodb_response.png
new file mode 100644
index 0000000..9e2bae0
--- /dev/null
+++ b/docs/_static/dynamodb_response.png
Binary files differ
diff --git a/docs/user_guide/backends/dynamodb.md b/docs/user_guide/backends/dynamodb.md
index f807a77..0758c1c 100644
--- a/docs/user_guide/backends/dynamodb.md
+++ b/docs/user_guide/backends/dynamodb.md
@@ -36,7 +36,50 @@
 >>> session = CachedSession(backend=backend)
 ```
 
-## Table
+## Viewing Responses
+By default, responses are only partially serialized so they can be saved as plain DynamoDB
+documents. Response data can then be easily viewed via the
+[AWS Console](https://aws.amazon.com/console/).
+
+Here is an example of responses listed under **DynamoDB > Tables > Explore Items:**
+:::{admonition} Screenshot
+:class: toggle
+```{image} ../../_static/dynamodb_items.png
+```
+:::
+
+And here is an example response:
+:::{admonition} Screenshot
+:class: toggle
+```{image} ../../_static/dynamodb_response.png
+```
+:::
+
+It is also possible query these responses with the [AWS CLI](https://aws.amazon.com/cli), for example:
+```bash
+aws dynamodb query \
+    --table-name http_cache \
+    --key-condition-expression "namespace = :n1" \
+    --expression-attribute-values '{":n1": {"S": "responses"}}' \
+    > responses.json
+```
+
+## Expiration
+DynamoDB natively supports TTL on a per-item basis, and can automatically remove expired responses from
+the cache. This will be set by by default, according to normal {ref}`expiration settings <expiration>`.
+
+```{warning}
+DynamoDB does not remove expired items immediately. See
+[How It Works: DynamoDB Time to Live](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/howitworks-ttl.html)
+for more details.
+```
+
+If needed, you can disable this behavior with the `ttl` argument:
+```python
+>>> backend = DynamoDbCache(ttl=False)
+```
+
+## Creating a Table
 A table will be automatically created if one doesn't already exist. This is convienient if you just
 want to quickly test out DynamoDB as a cache backend, but in a production environment you will
 likely want to create the tables yourself, for example with
@@ -60,7 +103,7 @@
 ```
 :::
 
-Then deploy with:
+To deploy with the [AWS CLI](https://aws.amazon.com/cli):
 ```
 aws cloudformation deploy \
     --stack-name requests-cache \
diff --git a/docs/user_guide/backends/mongodb.md b/docs/user_guide/backends/mongodb.md
index 11201b2..4b504cc 100644
--- a/docs/user_guide/backends/mongodb.md
+++ b/docs/user_guide/backends/mongodb.md
@@ -33,19 +33,16 @@
 ```
 
 ## Viewing Responses
-Unlike most of the other backends, response data can be easily viewed via the
+By default, responses are only partially serialized so they can be saved as plain MongoDB documents.
+Response data can be easily viewed via the
 [MongoDB shell](https://www.mongodb.com/docs/mongodb-shell/#mongodb-binary-bin.mongosh),
-[Compass](https://www.mongodb.com/products/compass), or any other interface for MongoDB. This is
-possible because its internal document format ([BSON](https://www.mongodb.com/json-and-bson))
-supports all the types needed to store a response as a plain document rather than a fully serialized
-blob.
+[Compass](https://www.mongodb.com/products/compass), or any other interface for MongoDB.
 
 Here is an example response viewed in
 [MongoDB for VSCode](https://code.visualstudio.com/docs/azure/mongodb):
 
 :::{admonition} Screenshot
 :class: toggle
-
 ```{image} ../../_static/mongodb_vscode.png
 ```
 :::
diff --git a/examples/cloudformation.yml b/examples/cloudformation.yml
index ef5d749..e88e0ab 100644
--- a/examples/cloudformation.yml
+++ b/examples/cloudformation.yml
@@ -24,9 +24,15 @@
           KeyType: HASH
         - AttributeName: key
           KeyType: RANGE
-      BillingMode: PAY_PER_REQUEST
-      # Uncomment for provisioned throughput instead of on-demand
-      # BillingMode: PROVISIONED
-      # ProvisionedThroughput:
-      #     WriteCapacityUnits: 2
-      #     ReadCapacityUnits: 2
+      # BillingMode: PAY_PER_REQUEST
+
+      # Optional: Use provisioned throughput instead of on-demand
+      BillingMode: PROVISIONED
+      ProvisionedThroughput:
+          WriteCapacityUnits: 2
+          ReadCapacityUnits: 2
+
+      # Optional: Enable DynamoDB's TTL feature
+      TimeToLiveSpecification:
+        AttributeName: ttl
+        Enabled: true
diff --git a/requests_cache/backends/__init__.py b/requests_cache/backends/__init__.py
index 250be7e..7695b8f 100644
--- a/requests_cache/backends/__init__.py
+++ b/requests_cache/backends/__init__.py
@@ -15,17 +15,17 @@
 
 # Import all backend classes for which dependencies are installed
 try:
-    from .dynamodb import DynamoDbCache, DynamoDbDict
+    from .dynamodb import DynamoDbCache, DynamoDbDict, DynamoDbDocumentDict
 except ImportError as e:
-    DynamoDbCache = DynamoDbDict = get_placeholder_class(e)  # type: ignore
+    DynamoDbCache = DynamoDbDict = DynamoDbDocumentDict = get_placeholder_class(e)  # type: ignore
 try:
     from .gridfs import GridFSCache, GridFSPickleDict
 except ImportError as e:
     GridFSCache = GridFSPickleDict = get_placeholder_class(e)  # type: ignore
 try:
-    from .mongodb import MongoCache, MongoDict, MongoPickleDict
+    from .mongodb import MongoCache, MongoDict, MongoDocumentDict
 except ImportError as e:
-    MongoCache = MongoDict = MongoPickleDict = get_placeholder_class(e)  # type: ignore
+    MongoCache = MongoDict = MongoDocumentDict = get_placeholder_class(e)  # type: ignore
 try:
     from .redis import RedisCache, RedisDict, RedisHashDict
 except ImportError as e:
diff --git a/requests_cache/backends/dynamodb.py b/requests_cache/backends/dynamodb.py
index 17721f6..50ef82d 100644
--- a/requests_cache/backends/dynamodb.py
+++ b/requests_cache/backends/dynamodb.py
@@ -4,6 +4,7 @@
    :classes-only:
    :nosignatures:
 """
+from time import time
 from typing import Dict, Iterable
 
 import boto3
@@ -12,6 +13,7 @@
 from botocore.exceptions import ClientError
 
 from .._utils import get_valid_kwargs
+from ..serializers import dynamodb_document_serializer
 from . import BaseCache, BaseStorage
 
 
@@ -23,38 +25,43 @@
         namespace: Name of DynamoDB hash map
         connection: :boto3:`DynamoDB Resource <services/dynamodb.html#DynamoDB.ServiceResource>`
             object to use instead of creating a new one
+        ttl: Use DynamoDB TTL to automatically remove expired items
         kwargs: Additional keyword arguments for :py:meth:`~boto3.session.Session.resource`
     """
 
     def __init__(
-        self, table_name: str = 'http_cache', connection: ServiceResource = None, **kwargs
+        self,
+        table_name: str = 'http_cache',
+        ttl: bool = True,
+        connection: ServiceResource = None,
+        **kwargs,
     ):
         super().__init__(cache_name=table_name, **kwargs)
-        self.responses = DynamoDbDict(table_name, 'responses', connection=connection, **kwargs)
+        self.responses = DynamoDbDocumentDict(
+            table_name, 'responses', ttl=ttl, connection=connection, **kwargs
+        )
         self.redirects = DynamoDbDict(
-            table_name, 'redirects', connection=self.responses.connection, **kwargs
+            table_name, 'redirects', ttl=False, connection=self.responses.connection, **kwargs
         )
 
 
 class DynamoDbDict(BaseStorage):
-    """A dictionary-like interface for DynamoDB key-value store
-
-    **Notes:**
-        * The actual table name on the Dynamodb server will be ``namespace:table_name``
-        * In order to deal with how DynamoDB stores data, all values are serialized.
+    """A dictionary-like interface for DynamoDB table
 
     Args:
         table_name: DynamoDB table name
         namespace: Name of DynamoDB hash map
         connection: :boto3:`DynamoDB Resource <services/dynamodb.html#DynamoDB.ServiceResource>`
             object to use instead of creating a new one
+        ttl: Use DynamoDB TTL to automatically remove expired items
         kwargs: Additional keyword arguments for :py:meth:`~boto3.session.Session.resource`
     """
 
     def __init__(
         self,
         table_name: str,
-        namespace: str = 'http_cache',
+        namespace: str,
+        ttl: bool = True,
         connection: ServiceResource = None,
         **kwargs,
     ):
@@ -62,36 +69,48 @@
         connection_kwargs = get_valid_kwargs(boto3.Session, kwargs, extras=['endpoint_url'])
         self.connection = connection or boto3.resource('dynamodb', **connection_kwargs)
         self.namespace = namespace
+        self.table_name = table_name
+        self.ttl = ttl
 
-        self._create_table(table_name)
-        self._table = self.connection.Table(table_name)
-        self._table.wait_until_exists()
+        self._table = self.connection.Table(self.table_name)
+        self._create_table()
+        if ttl:
+            self._enable_ttl()
 
-    def _create_table(self, table_name: str):
+    def _create_table(self):
         """Create a default table if one does not already exist"""
         try:
             self.connection.create_table(
                 AttributeDefinitions=[
-                    {
-                        'AttributeName': 'namespace',
-                        'AttributeType': 'S',
-                    },
-                    {
-                        'AttributeName': 'key',
-                        'AttributeType': 'S',
-                    },
+                    {'AttributeName': 'namespace', 'AttributeType': 'S'},
+                    {'AttributeName': 'key', 'AttributeType': 'S'},
                 ],
-                TableName=table_name,
+                TableName=self.table_name,
                 KeySchema=[
                     {'AttributeName': 'namespace', 'KeyType': 'HASH'},
                     {'AttributeName': 'key', 'KeyType': 'RANGE'},
                 ],
-                BillingMode="PAY_PER_REQUEST",
+                BillingMode='PAY_PER_REQUEST',
             )
-        except ClientError:
-            pass
+            self._table.wait_until_exists()
+        # Ignore error if table already exists
+        except ClientError as e:
+            if e.response['Error']['Code'] != 'ResourceInUseException':
+                raise
 
-    def composite_key(self, key: str) -> Dict[str, str]:
+    def _enable_ttl(self):
+        """Enable TTL, if not already enabled"""
+        try:
+            self.connection.meta.client.update_time_to_live(
+                TableName=self.table_name,
+                TimeToLiveSpecification={'AttributeName': 'ttl', 'Enabled': True},
+            )
+        # Ignore error if TTL is already enabled
+        except ClientError as e:
+            if e.response['Error']['Code'] != 'ValidationException':
+                raise
+
+    def _composite_key(self, key: str) -> Dict[str, str]:
         return {'namespace': self.namespace, 'key': str(key)}
 
     def _scan(self):
@@ -105,22 +124,25 @@
         )
 
     def __getitem__(self, key):
-        result = self._table.get_item(Key=self.composite_key(key))
+        result = self._table.get_item(Key=self._composite_key(key))
         if 'Item' not in result:
             raise KeyError
 
-        # Depending on the serializer, the value may be either a string or Binary object
+        # With a custom serializer, the value may be a Binary object
         raw_value = result['Item']['value']
-        return self.serializer.loads(
-            raw_value.value if isinstance(raw_value, Binary) else raw_value
-        )
+        return raw_value.value if isinstance(raw_value, Binary) else raw_value
 
     def __setitem__(self, key, value):
-        item = {**self.composite_key(key), 'value': self.serializer.dumps(value)}
+        item = {**self._composite_key(key), 'value': value}
+
+        # If enabled, set TTL value as a timestamp in unix format
+        if self.ttl and getattr(value, 'ttl', None):
+            item['ttl'] = int(time() + value.ttl)
+
         self._table.put_item(Item=item)
 
     def __delitem__(self, key):
-        response = self._table.delete_item(Key=self.composite_key(key), ReturnValues='ALL_OLD')
+        response = self._table.delete_item(Key=self._composite_key(key), ReturnValues='ALL_OLD')
         if 'Attributes' not in response:
             raise KeyError
 
@@ -141,7 +163,23 @@
         """Delete multiple keys from the cache. Does not raise errors for missing keys."""
         with self._table.batch_writer() as batch:
             for key in keys:
-                batch.delete_item(Key=self.composite_key(key))
+                batch.delete_item(Key=self._composite_key(key))
 
     def clear(self):
         self.bulk_delete((k for k in self))
+
+
+class DynamoDbDocumentDict(DynamoDbDict):
+    """Same as :class:`DynamoDbDict`, but serializes values before saving.
+
+    By default, responses are only partially serialized into a DynamoDB-compatible document format.
+    """
+
+    def __init__(self, *args, serializer=None, **kwargs):
+        super().__init__(*args, serializer=serializer or dynamodb_document_serializer, **kwargs)
+
+    def __getitem__(self, key):
+        return self.serializer.loads(super().__getitem__(key))
+
+    def __setitem__(self, key, item):
+        super().__setitem__(key, self.serializer.dumps(item))
diff --git a/requests_cache/backends/mongodb.py b/requests_cache/backends/mongodb.py
index b48f2da..056d658 100644
--- a/requests_cache/backends/mongodb.py
+++ b/requests_cache/backends/mongodb.py
@@ -30,7 +30,7 @@
 
     def __init__(self, db_name: str = 'http_cache', connection: MongoClient = None, **kwargs):
         super().__init__(cache_name=db_name, **kwargs)
-        self.responses: MongoDict = MongoPickleDict(
+        self.responses: MongoDict = MongoDocumentDict(
             db_name,
             collection_name='responses',
             connection=connection,
@@ -140,10 +140,10 @@
         self.connection.close()
 
 
-class MongoPickleDict(MongoDict):
+class MongoDocumentDict(MongoDict):
     """Same as :class:`MongoDict`, but serializes values before saving.
 
-    By default, responses are only partially serialized into a MongoDB-compatible document mapping.
+    By default, responses are only partially serialized into a MongoDB-compatible document format.
     """
 
     def __init__(self, *args, serializer=None, **kwargs):
diff --git a/requests_cache/serializers/__init__.py b/requests_cache/serializers/__init__.py
index 72420b7..d49545a 100644
--- a/requests_cache/serializers/__init__.py
+++ b/requests_cache/serializers/__init__.py
@@ -7,6 +7,7 @@
     bson_document_serializer,
     bson_serializer,
     dict_serializer,
+    dynamodb_document_serializer,
     json_serializer,
     pickle_serializer,
     safe_pickle_serializer,
@@ -21,6 +22,7 @@
     'Stage',
     'bson_serializer',
     'bson_document_serializer',
+    'dynamodb_document_serializer',
     'dict_serializer',
     'json_serializer',
     'pickle_serializer',
diff --git a/requests_cache/serializers/cattrs.py b/requests_cache/serializers/cattrs.py
index 0aa62c2..3de7079 100644
--- a/requests_cache/serializers/cattrs.py
+++ b/requests_cache/serializers/cattrs.py
@@ -12,6 +12,7 @@
    :nosignatures:
 """
 from datetime import datetime, timedelta
+from decimal import Decimal
 from typing import Callable, Dict, ForwardRef, MutableMapping
 
 from cattr import GenConverter
@@ -42,7 +43,11 @@
         return self.converter.structure(value, cl=CachedResponse)
 
 
-def init_converter(factory: Callable[..., GenConverter] = None, convert_datetime: bool = True):
+def init_converter(
+    factory: Callable[..., GenConverter] = None,
+    convert_datetime: bool = True,
+    convert_timedelta: bool = True,
+) -> GenConverter:
     """Make a converter to structure and unstructure nested objects within a
     :py:class:`.CachedResponse`
 
@@ -56,15 +61,18 @@
 
     # Convert datetimes to and from iso-formatted strings
     if convert_datetime:
-        converter.register_unstructure_hook(datetime, lambda obj: obj.isoformat() if obj else None)  # type: ignore
+        converter.register_unstructure_hook(datetime, lambda obj: obj.isoformat() if obj else None)
         converter.register_structure_hook(datetime, _to_datetime)
 
     # Convert timedeltas to and from float values in seconds
-    converter.register_unstructure_hook(timedelta, lambda obj: obj.total_seconds() if obj else None)  # type: ignore
-    converter.register_structure_hook(timedelta, _to_timedelta)
+    if convert_timedelta:
+        converter.register_unstructure_hook(
+            timedelta, lambda obj: obj.total_seconds() if obj else None
+        )
+        converter.register_structure_hook(timedelta, _to_timedelta)
 
     # Convert dict-like objects to and from plain dicts
-    converter.register_unstructure_hook(RequestsCookieJar, lambda obj: dict(obj.items()))  # type: ignore
+    converter.register_unstructure_hook(RequestsCookieJar, lambda obj: dict(obj.items()))
     converter.register_structure_hook(RequestsCookieJar, lambda obj, cls: cookiejar_from_dict(obj))
     converter.register_unstructure_hook(CaseInsensitiveDict, dict)
     converter.register_structure_hook(
@@ -85,6 +93,16 @@
     return converter
 
 
+def make_decimal_timedelta_converter(**kwargs) -> GenConverter:
+    """Make a converter that uses Decimals instead of floats to represent timedelta objects"""
+    converter = GenConverter(**kwargs)
+    converter.register_unstructure_hook(
+        timedelta, lambda obj: Decimal(str(obj.total_seconds())) if obj else None
+    )
+    converter.register_structure_hook(timedelta, _to_timedelta)
+    return converter
+
+
 def _to_datetime(obj, cls) -> datetime:
     if isinstance(obj, str):
         obj = datetime.fromisoformat(obj)
@@ -94,4 +112,6 @@
 def _to_timedelta(obj, cls) -> timedelta:
     if isinstance(obj, (int, float)):
         obj = timedelta(seconds=obj)
+    elif isinstance(obj, Decimal):
+        obj = timedelta(seconds=float(obj))
     return obj
diff --git a/requests_cache/serializers/preconf.py b/requests_cache/serializers/preconf.py
index 557c1a4..1cf6816 100644
--- a/requests_cache/serializers/preconf.py
+++ b/requests_cache/serializers/preconf.py
@@ -4,7 +4,7 @@
 required for specific serialization formats.
 
 This module wraps those converters as serializer :py:class:`.Stage` objects. These are then used as
-a stage in a :py:class:`.SerializerPipeline`, which runs after the base converter and before the
+stages in a :py:class:`.SerializerPipeline`, which runs after the base converter and before the
 format's ``dumps()`` (or equivalent) method.
 
 For any optional libraries that aren't installed, the corresponding serializer will be a placeholder
@@ -14,11 +14,15 @@
    :nosignatures:
 """
 import pickle
+from datetime import timedelta
+from decimal import Decimal
 from functools import partial
 from importlib import import_module
 
+from cattr import GenConverter
+
 from .._utils import get_placeholder_class
-from .cattrs import CattrStage
+from .cattrs import CattrStage, make_decimal_timedelta_converter
 from .pipeline import SerializerPipeline, Stage
 
 
@@ -144,3 +148,13 @@
     )  #: Complete YAML serializer
 except ImportError as e:
     yaml_serializer = get_placeholder_class(e)
+
+
+dynamodb_preconf_stage = CattrStage(
+    factory=make_decimal_timedelta_converter, convert_timedelta=False
+)
+dynamodb_document_serializer = SerializerPipeline(
+    [dynamodb_preconf_stage],
+    name='dynamodb_document',
+    is_binary=False,
+)
diff --git a/tests/conftest.py b/tests/conftest.py
index dc77368..96c35a7 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -25,7 +25,16 @@
 
 from requests_cache import ALL_METHODS, CachedSession, install_cache, uninstall_cache
 
-CACHE_NAME = 'pytest_cache'
+# Configure logging to show log output when tests fail (or with pytest -s)
+basicConfig(
+    level='INFO',
+    format='%(message)s',
+    datefmt='[%m-%d %H:%M:%S]',
+    handlers=[RichHandler(rich_tracebacks=True, markup=True)],
+)
+# getLogger('requests_cache').setLevel('DEBUG')
+logger = getLogger(__name__)
+
 
 # Allow running longer stress tests with an environment variable
 STRESS_TEST_MULTIPLIER = int(os.getenv('STRESS_TEST_MULTIPLIER', '1'))
@@ -49,7 +58,6 @@
     'robots.txt',
     'xml',
 ]
-
 HTTPDATE_STR = 'Fri, 16 APR 2021 21:13:00 GMT'
 HTTPDATE_DATETIME = datetime(2021, 4, 16, 21, 13)
 EXPIRED_DT = datetime.now() - timedelta(1)
@@ -66,28 +74,11 @@
 MOCKED_URL_500 = 'http+mock://requests-cache.com/answer?q=this-statement-is-false'
 MOCK_PROTOCOLS = ['mock://', 'http+mock://', 'https+mock://']
 
+CACHE_NAME = 'pytest_cache'
 PROJECT_DIR = Path(__file__).parent.parent.absolute()
 SAMPLE_DATA_DIR = PROJECT_DIR / 'tests' / 'sample_data'
 SAMPLE_CACHE_FILES = list(SAMPLE_DATA_DIR.glob('sample.db.*'))
 
-AWS_OPTIONS = {
-    'endpoint_url': 'http://localhost:8000',
-    'region_name': 'us-east-1',
-    'aws_access_key_id': 'placeholder',
-    'aws_secret_access_key': 'placeholder',
-}
-
-
-# Configure logging to show log output when tests fail (or with pytest -s)
-basicConfig(
-    level='INFO',
-    format='%(message)s',
-    datefmt='[%m-%d %H:%M:%S]',
-    handlers=[RichHandler(rich_tracebacks=True, markup=True)],
-)
-# getLogger('requests_cache').setLevel('DEBUG')
-logger = getLogger(__name__)
-
 
 def httpbin(path):
     """Get the url for either a local or remote httpbin instance"""
diff --git a/tests/integration/base_cache_test.py b/tests/integration/base_cache_test.py
index 0ec1d51..081b5a1 100644
--- a/tests/integration/base_cache_test.py
+++ b/tests/integration/base_cache_test.py
@@ -21,7 +21,6 @@
     SERIALIZERS,
     SerializerPipeline,
     Stage,
-    dict_serializer,
     safe_pickle_serializer,
 )
 from tests.conftest import (
@@ -56,7 +55,6 @@
     """Base class for testing cache backend classes"""
 
     backend_class: Type[BaseCache] = None
-    document_support: bool = False
     init_kwargs: Dict = {}
 
     def init_session(self, cache_name=CACHE_NAME, clear=True, **kwargs) -> CachedSession:
@@ -83,8 +81,6 @@
         """
         if not isinstance(serializer, (SerializerPipeline, Stage)):
             pytest.skip(f'Dependencies not installed for {serializer}')
-        if serializer is dict_serializer and not self.document_support:
-            return
 
         url = httpbin(method.lower())
         session = self.init_session(serializer=serializer)
@@ -98,8 +94,6 @@
         """Test all relevant combinations of (response formats X serializers)"""
         if not isinstance(serializer, SerializerPipeline):
             pytest.skip(f'Dependencies not installed for {serializer}')
-        if serializer is dict_serializer and not self.document_support:
-            return
 
         session = self.init_session(serializer=serializer)
         # Workaround for this issue: https://github.com/kevin1024/pytest-httpbin/issues/60
diff --git a/tests/integration/base_storage_test.py b/tests/integration/base_storage_test.py
index 776d494..7d38643 100644
--- a/tests/integration/base_storage_test.py
+++ b/tests/integration/base_storage_test.py
@@ -18,8 +18,9 @@
     num_instances: int = 10  # Max number of cache instances to test
 
     def init_cache(self, cache_name=CACHE_NAME, index=0, clear=True, **kwargs):
+        kwargs = {**self.init_kwargs, **kwargs}
         kwargs.setdefault('serializer', 'pickle')
-        cache = self.storage_class(cache_name, f'table_{index}', **self.init_kwargs, **kwargs)
+        cache = self.storage_class(cache_name, f'table_{index}', **kwargs)
         if clear:
             cache.clear()
         return cache
@@ -98,7 +99,7 @@
 
     def test_picklable_dict(self):
         if self.picklable:
-            cache = self.init_cache()
+            cache = self.init_cache(serializer='pickle')
             original_obj = BasicDataclass(
                 bool_attr=True,
                 datetime_attr=datetime(2022, 2, 2),
diff --git a/tests/integration/test_dynamodb.py b/tests/integration/test_dynamodb.py
index 52cb24f..84c1008 100644
--- a/tests/integration/test_dynamodb.py
+++ b/tests/integration/test_dynamodb.py
@@ -1,12 +1,30 @@
+from collections import OrderedDict
+from decimal import Decimal
 from unittest.mock import patch
 
 import pytest
+from botocore.exceptions import ClientError
 
-from requests_cache.backends import DynamoDbCache, DynamoDbDict
-from tests.conftest import AWS_OPTIONS, fail_if_no_connection
-from tests.integration.base_cache_test import BaseCacheTest
+from requests_cache.backends import DynamoDbCache, DynamoDbDict, DynamoDbDocumentDict
+from requests_cache.serializers import dynamodb_document_serializer
+from tests.conftest import HTTPBIN_FORMATS, HTTPBIN_METHODS, fail_if_no_connection
+from tests.integration.base_cache_test import TEST_SERIALIZERS, BaseCacheTest
 from tests.integration.base_storage_test import BaseStorageTest
 
+AWS_OPTIONS = {
+    'endpoint_url': 'http://localhost:8000',
+    'region_name': 'us-east-1',
+    'aws_access_key_id': 'placeholder',
+    'aws_secret_access_key': 'placeholder',
+}
+DYNAMODB_OPTIONS = {
+    **AWS_OPTIONS,
+    'serializer': None,  # Use class default serializer
+}
+
+# Add extra DynamoDB-specific format to list of serializers to test against
+DYNAMODB_SERIALIZERS = [dynamodb_document_serializer] + list(TEST_SERIALIZERS.values())
+
 
 @pytest.fixture(scope='module', autouse=True)
 @fail_if_no_connection(connect_timeout=5)
@@ -20,17 +38,71 @@
 
 class TestDynamoDbDict(BaseStorageTest):
     storage_class = DynamoDbDict
-    init_kwargs = AWS_OPTIONS
-    picklable = True
+    init_kwargs = DYNAMODB_OPTIONS
 
     @patch('requests_cache.backends.dynamodb.boto3.resource')
     def test_connection_kwargs(self, mock_resource):
         """A spot check to make sure optional connection kwargs gets passed to connection"""
-        DynamoDbDict('test', region_name='us-east-2', invalid_kwarg='???')
+        DynamoDbDict('test_table', 'namespace', region_name='us-east-2', invalid_kwarg='???')
         mock_resource.assert_called_with('dynamodb', region_name='us-east-2')
 
+    def test_create_table_error(self):
+        """An error other than 'table already exists' should be reraised"""
+        cache = self.init_cache()
+        error = ClientError({'Error': {'Code': 'NullPointerException'}}, 'CreateTable')
+        with patch.object(cache.connection.meta.client, 'update_time_to_live', side_effect=error):
+            with pytest.raises(ClientError):
+                cache._enable_ttl()
+
+    def test_enable_ttl_error(self):
+        """An error other than 'ttl already enabled' should be reraised"""
+        cache = self.init_cache()
+        error = ClientError({'Error': {'Code': 'NullPointerException'}}, 'CreateTable')
+        with patch.object(cache.connection, 'create_table', side_effect=error):
+            with pytest.raises(ClientError):
+                cache._create_table()
+
+    @pytest.mark.parametrize('ttl_enabled', [True, False])
+    def test_ttl(self, ttl_enabled):
+        """DynamoDB's TTL removal process can take up to 48 hours to run, so just test if the
+        'ttl' attribute is set correctly if enabled, and not set if disabled.
+        """
+        cache = self.init_cache(ttl=ttl_enabled)
+        item = OrderedDict(foo='bar')
+        item.ttl = 60
+        cache['key'] = item
+
+        # 'ttl' is a reserved word, so to retrieve it we need to alias it
+        item = cache._table.get_item(
+            Key=cache._composite_key('key'),
+            ProjectionExpression='#t',
+            ExpressionAttributeNames={'#t': 'ttl'},
+        )
+        ttl_value = item['Item'].get('ttl')
+
+        if ttl_enabled:
+            assert isinstance(ttl_value, Decimal)
+        else:
+            assert ttl_value is None
+
+
+class TestDynamoDbDocumentDict(BaseStorageTest):
+    storage_class = DynamoDbDocumentDict
+    init_kwargs = DYNAMODB_OPTIONS
+    picklable = True
+
 
 class TestDynamoDbCache(BaseCacheTest):
     backend_class = DynamoDbCache
-    # document_support = True
-    init_kwargs = AWS_OPTIONS
+    init_kwargs = DYNAMODB_OPTIONS
+
+    @pytest.mark.parametrize('serializer', DYNAMODB_SERIALIZERS)
+    @pytest.mark.parametrize('method', HTTPBIN_METHODS)
+    @pytest.mark.parametrize('field', ['params', 'data', 'json'])
+    def test_all_methods(self, field, method, serializer):
+        super().test_all_methods(field, method, serializer)
+
+    @pytest.mark.parametrize('serializer', DYNAMODB_SERIALIZERS)
+    @pytest.mark.parametrize('response_format', HTTPBIN_FORMATS)
+    def test_all_response_formats(self, response_format, serializer):
+        super().test_all_response_formats(response_format, serializer)
diff --git a/tests/integration/test_filesystem.py b/tests/integration/test_filesystem.py
index 1b81dff..4181690 100644
--- a/tests/integration/test_filesystem.py
+++ b/tests/integration/test_filesystem.py
@@ -19,7 +19,8 @@
         rmtree(CACHE_NAME, ignore_errors=True)
 
     def init_cache(self, index=0, clear=True, **kwargs):
-        cache = FileDict(f'{CACHE_NAME}_{index}', serializer='pickle', use_temp=True, **kwargs)
+        kwargs.setdefault('serializer', 'pickle')
+        cache = FileDict(f'{CACHE_NAME}_{index}', use_temp=True, **kwargs)
         if clear:
             cache.clear()
         return cache
diff --git a/tests/integration/test_mongodb.py b/tests/integration/test_mongodb.py
index 67e7d75..7db3f8c 100644
--- a/tests/integration/test_mongodb.py
+++ b/tests/integration/test_mongodb.py
@@ -13,7 +13,7 @@
     GridFSPickleDict,
     MongoCache,
     MongoDict,
-    MongoPickleDict,
+    MongoDocumentDict,
 )
 from requests_cache.policy.expiration import NEVER_EXPIRE
 from requests_cache.serializers import bson_document_serializer
@@ -41,7 +41,7 @@
 
 
 class TestMongoPickleDict(BaseStorageTest):
-    storage_class = MongoPickleDict
+    storage_class = MongoDocumentDict
     picklable = True
 
     @patch('requests_cache.backends.mongodb.MongoClient')
@@ -57,7 +57,6 @@
 
 class TestMongoCache(BaseCacheTest):
     backend_class = MongoCache
-    document_support = True
 
     init_kwargs = {'serializer': None}  # Use class default serializer instead of pickle
 
@@ -146,4 +145,3 @@
 
 class TestGridFSCache(BaseCacheTest):
     backend_class = GridFSCache
-    document_support = False