Add serializer name to cache key to avoid errors due to switching serializers
diff --git a/requests_cache/backends/base.py b/requests_cache/backends/base.py
index 7f44af4..a8b1be3 100644
--- a/requests_cache/backends/base.py
+++ b/requests_cache/backends/base.py
@@ -109,6 +109,7 @@
             request=request,
             ignored_parameters=self._settings.ignored_parameters,
             match_headers=self._settings.match_headers,
+            serializer=self.responses.serializer,
             **kwargs,
         )
 
@@ -290,6 +291,7 @@
     def __init__(self, *args, **kwargs):
         super().__init__(*args, **kwargs)
         self._serializer = None
+        self.serializer = None
 
     def __getitem__(self, key):
         """An additional step is needed here for response data. Since the original response object
diff --git a/requests_cache/backends/mongodb.py b/requests_cache/backends/mongodb.py
index cd39f49..12d11d9 100644
--- a/requests_cache/backends/mongodb.py
+++ b/requests_cache/backends/mongodb.py
@@ -104,16 +104,12 @@
 
 from .._utils import get_valid_kwargs
 from ..expiration import NEVER_EXPIRE, get_expiration_seconds
-from ..serializers import SerializerPipeline
-from ..serializers.preconf import bson_preconf_stage
+from ..serializers import bson_document_serializer
 from . import BaseCache, BaseStorage
 
-document_serializer = SerializerPipeline([bson_preconf_stage], is_binary=False)
 logger = getLogger(__name__)
 
 
-# TODO: Is there any reason to support custom serializers here?
-# TODO: Save items with different cache keys to avoid conflicts with old serialization format?
 class MongoCache(BaseCache):
     """MongoDB cache backend
 
@@ -239,7 +235,7 @@
     """
 
     def __init__(self, *args, serializer=None, **kwargs):
-        super().__init__(*args, serializer=serializer or document_serializer, **kwargs)
+        super().__init__(*args, serializer=serializer or bson_document_serializer, **kwargs)
 
     def __getitem__(self, key):
         return self.serializer.loads(super().__getitem__(key))
diff --git a/requests_cache/cache_keys.py b/requests_cache/cache_keys.py
index 99e7904..3d6f63a 100644
--- a/requests_cache/cache_keys.py
+++ b/requests_cache/cache_keys.py
@@ -9,7 +9,7 @@
 import json
 from hashlib import blake2b
 from logging import getLogger
-from typing import TYPE_CHECKING, Dict, Iterable, List, Mapping, Optional, Union
+from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Mapping, Optional, Union
 from urllib.parse import parse_qsl, urlencode, urlparse, urlunparse
 
 from requests import Request, Session
@@ -35,6 +35,7 @@
     request: AnyRequest = None,
     ignored_parameters: ParamList = None,
     match_headers: Union[ParamList, bool] = False,
+    serializer: Any = None,
     **request_kwargs,
 ) -> str:
     """Create a normalized cache key from either a request object or :py:class:`~requests.Request`
@@ -58,6 +59,7 @@
         request.body or '',
         request_kwargs.get('verify', True),
         *get_matched_headers(request.headers, match_headers),
+        str(serializer),
     ]
 
     # Generate a hash based on this info
diff --git a/requests_cache/serializers/__init__.py b/requests_cache/serializers/__init__.py
index 91c5e4c..dec86ef 100644
--- a/requests_cache/serializers/__init__.py
+++ b/requests_cache/serializers/__init__.py
@@ -4,6 +4,7 @@
 from .cattrs import CattrStage
 from .pipeline import SerializerPipeline, Stage
 from .preconf import (
+    bson_document_serializer,
     bson_serializer,
     dict_serializer,
     json_serializer,
@@ -19,6 +20,7 @@
     'SerializerPipeline',
     'Stage',
     'bson_serializer',
+    'bson_document_serializer',
     'dict_serializer',
     'json_serializer',
     'pickle_serializer',
diff --git a/requests_cache/serializers/pipeline.py b/requests_cache/serializers/pipeline.py
index 2a22761..cf10714 100644
--- a/requests_cache/serializers/pipeline.py
+++ b/requests_cache/serializers/pipeline.py
@@ -37,11 +37,12 @@
         is_binary: Indicates whether the serialized content is binary
     """
 
-    def __init__(self, stages: Sequence, is_binary: bool = False):
+    def __init__(self, stages: Sequence, name: str = None, is_binary: bool = False):
         self.is_binary = is_binary
         self.stages = stages
         self.dump_stages = [stage.dumps for stage in stages]
         self.load_stages = [stage.loads for stage in reversed(stages)]
+        self.name = name
 
     def dumps(self, value) -> Union[str, bytes]:
         for step in self.dump_stages:
@@ -52,3 +53,6 @@
         for step in self.load_stages:
             value = step(value)
         return value
+
+    def __str__(self) -> str:
+        return f'SerializerPipeline(name={self.name}, n_stages={len(self.dump_stages)})'
diff --git a/requests_cache/serializers/preconf.py b/requests_cache/serializers/preconf.py
index b0ad069..557c1a4 100644
--- a/requests_cache/serializers/preconf.py
+++ b/requests_cache/serializers/preconf.py
@@ -46,9 +46,11 @@
 
 # Basic serializers with no additional dependencies
 dict_serializer = SerializerPipeline(
-    [base_stage], is_binary=False
+    [base_stage], name='dict', is_binary=False
 )  #: Partial serializer that unstructures responses into dicts
-pickle_serializer = SerializerPipeline([base_stage, pickle], is_binary=True)  #: Pickle serializer
+pickle_serializer = SerializerPipeline(
+    [base_stage, pickle], name='pickle', is_binary=True
+)  #: Pickle serializer
 
 
 # Safe pickle serializer
@@ -59,14 +61,22 @@
     """
     from itsdangerous import Signer
 
-    return Stage(Signer(secret_key=secret_key, salt=salt), dumps='sign', loads='unsign')
+    return Stage(
+        Signer(secret_key=secret_key, salt=salt),
+        dumps='sign',
+        loads='unsign',
+    )
 
 
 def safe_pickle_serializer(secret_key=None, salt='requests-cache', **kwargs) -> SerializerPipeline:
     """Create a serializer that uses ``pickle`` + ``itsdangerous`` to add a signature to
     responses on write, and validate that signature with a secret key on read.
     """
-    return SerializerPipeline([base_stage, pickle, signer_stage(secret_key, salt)], is_binary=True)
+    return SerializerPipeline(
+        [base_stage, pickle, signer_stage(secret_key, salt)],
+        name='safe_pickle',
+        is_binary=True,
+    )
 
 
 try:
@@ -91,8 +101,15 @@
     import bson
 
     bson_serializer = SerializerPipeline(
-        [bson_preconf_stage, Stage(bson, **_get_bson_functions())], is_binary=True
+        [bson_preconf_stage, Stage(bson, **_get_bson_functions())],
+        name='bson',
+        is_binary=True,
     )  #: Complete BSON serializer; uses pymongo's ``bson`` if installed, otherwise standalone ``bson`` codec
+    bson_document_serializer = SerializerPipeline(
+        [bson_preconf_stage],
+        name='bson_document',
+        is_binary=False,
+    )  #: BSON partial serializer that produces a MongoDB-compatible document
 except ImportError as e:
     bson_serializer = get_placeholder_class(e)
 
@@ -109,7 +126,9 @@
 
 _json_stage = Stage(dumps=partial(json.dumps, indent=2), loads=json.loads)
 json_serializer = SerializerPipeline(
-    [_json_preconf_stage, _json_stage], is_binary=False
+    [_json_preconf_stage, _json_stage],
+    name='json',
+    is_binary=False,
 )  #: Complete JSON serializer; uses ultrajson if available
 
 
@@ -117,11 +136,10 @@
 try:
     import yaml
 
+    _yaml_stage = Stage(yaml, loads='safe_load', dumps='safe_dump')
     yaml_serializer = SerializerPipeline(
-        [
-            yaml_preconf_stage,
-            Stage(yaml, loads='safe_load', dumps='safe_dump'),
-        ],
+        [yaml_preconf_stage, _yaml_stage],
+        name='yaml',
         is_binary=False,
     )  #: Complete YAML serializer
 except ImportError as e:
diff --git a/tests/integration/test_mongodb.py b/tests/integration/test_mongodb.py
index 100b40d..ecec6c6 100644
--- a/tests/integration/test_mongodb.py
+++ b/tests/integration/test_mongodb.py
@@ -15,14 +15,14 @@
     MongoDict,
     MongoPickleDict,
 )
-from requests_cache.backends.mongodb import document_serializer
 from requests_cache.expiration import NEVER_EXPIRE
+from requests_cache.serializers import bson_document_serializer
 from tests.conftest import HTTPBIN_FORMATS, HTTPBIN_METHODS, fail_if_no_connection, httpbin
 from tests.integration.base_cache_test import TEST_SERIALIZERS, BaseCacheTest
 from tests.integration.base_storage_test import BaseStorageTest
 
 # Add extra MongoDB-specific format to list of serializers to test against
-MONGODB_SERIALIZERS = [document_serializer] + list(TEST_SERIALIZERS.values())
+MONGODB_SERIALIZERS = [bson_document_serializer] + list(TEST_SERIALIZERS.values())
 logger = getLogger(__name__)
 
 
diff --git a/tests/unit/test_cache_keys.py b/tests/unit/test_cache_keys.py
index dd79bf6..3b29147 100644
--- a/tests/unit/test_cache_keys.py
+++ b/tests/unit/test_cache_keys.py
@@ -8,7 +8,7 @@
 
 from requests_cache.cache_keys import MAX_NORM_BODY_SIZE, create_key, normalize_request
 
-CACHE_KEY = 'e8cb526891875e37'
+CACHE_KEY = 'e25f7e6326966e82'
 
 
 @pytest.mark.parametrize(
diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py
index 208df90..27af9f5 100644
--- a/tests/unit/test_session.py
+++ b/tests/unit/test_session.py
@@ -17,7 +17,6 @@
 from requests_cache._utils import get_placeholder_class
 from requests_cache.backends import BACKEND_CLASSES, BaseCache, SQLiteDict, SQLitePickleDict
 from requests_cache.backends.base import DESERIALIZE_ERRORS
-from requests_cache.cache_keys import create_key
 from requests_cache.expiration import DO_NOT_CACHE, EXPIRE_IMMEDIATELY, NEVER_EXPIRE
 from tests.conftest import (
     MOCKED_URL,
@@ -84,7 +83,7 @@
     response_1 = mock_session.get(MOCKED_URL)
     response_2 = mock_session.get(MOCKED_URL)
     response_3 = mock_session.get(MOCKED_URL)
-    cache_key = 'd7fa9fb7317b7412'
+    cache_key = '29de1c4491126e0b'
 
     assert response_1.cache_key == cache_key
     assert isinstance(response_1.created_at, datetime)
@@ -487,11 +486,11 @@
 
 
 def test_key_fn(mock_session):
-    def create_key(request, **kwargs):
+    def create_custom_key(request, **kwargs):
         """Create a key based on only the request URL (without params)"""
         return request.url.split('?')[0]
 
-    mock_session.settings.key_fn = create_key
+    mock_session.settings.key_fn = create_custom_key
     mock_session.get(MOCKED_URL)
     response = mock_session.get(MOCKED_URL, params={'k': 'v'})
     assert response.from_cache is True
@@ -617,13 +616,13 @@
 
 def test_remove_expired_responses__error(mock_session):
     # Start with two cached responses, one of which will raise an error
-    mock_session.get(MOCKED_URL)
-    mock_session.get(MOCKED_URL_JSON)
+    response_1 = mock_session.get(MOCKED_URL)
+    response_2 = mock_session.get(MOCKED_URL_JSON)
 
     def error_on_key(key):
-        if key == create_key(method='GET', url=MOCKED_URL_JSON):
+        if key == response_2.cache_key:
             raise PickleError
-        return mock_session.get(MOCKED_URL_JSON)
+        return response_1
 
     with patch.object(SQLitePickleDict, '__getitem__', side_effect=error_on_key):
         BaseCache.remove_expired_responses(mock_session.cache)