From 005a57306de7911553d628615befbd755b7792af Mon Sep 17 00:00:00 2001
From: Judit Novak <judit.novak@canonical.com>
Date: Mon, 16 Oct 2023 15:05:01 +0200
Subject: [PATCH] [DPE-2651] Juju3 pipelines / Testing secrets (#333)

* Juju3 pipelines

* New ci + tox.ini env vars to indicate Juju version

* pyproject.toml, poetry.lock -- adding pytest-mock, ops -> integration group, etc.

* Fixtures to enforce correct Juju version recognition for secrets

* Test changes adapted to Juju2+3

* Tests: Juju2 vs Juju3 (secrets vs databag)

* New unittests for internal secrets

* NO databag usage!

* data_platform_libs/data_interfaces on v19

* Updating CONTRIBUTING doc

* Defaults set to Juju3

* Free up disk space on runner
---
 .github/workflows/ci.yaml                     |  51 +++-
 CONTRIBUTING.md                               |   7 +
 .../data_platform_libs/v0/data_interfaces.py  | 283 +++++++++++++-----
 poetry.lock                                   | 101 +++++--
 pyproject.toml                                |   5 +-
 src/relations/mysql_provider.py               |   4 +-
 tests/conftest.py                             |  43 +++
 tests/integration/helpers.py                  |  12 +-
 tests/integration/relations/test_database.py  |  25 ++
 tests/integration/test_backups.py             |  14 +-
 tests/unit/test_backups.py                    |  11 +-
 tests/unit/test_charm.py                      |  68 ++++-
 tests/unit/test_relation_mysql_legacy.py      |  62 ++++
 tox.ini                                       |   4 +
 14 files changed, 555 insertions(+), 135 deletions(-)

diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 54607c5ab..94a5fba09 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -19,6 +19,9 @@ jobs:
     uses: canonical/data-platform-workflows/.github/workflows/lint.yaml@v4.2.3
 
   unit-test:
+    strategy:
+      matrix:
+        juju-version:  ["2.9", "3.1"]
     name: Unit test charm
     runs-on: ubuntu-latest
     timeout-minutes: 5
@@ -31,6 +34,10 @@ jobs:
           pipx install poetry
       - name: Run tests
         run: tox run -e unit
+        env:
+          # This env var is only to indicate Juju version to "simulate" in the unit tests
+          # No libjuju is being actually used in unit testing
+          LIBJUJU_VERSION_SPECIFIER: ${{ matrix.juju-version }}
       - name: Upload Coverage to Codecov
         uses: codecov/codecov-action@v3
 
@@ -98,7 +105,35 @@ jobs:
           # Update whenever charmcraft.yaml is changed
           - series: jammy
             bases-index: 0
-    name: (GH hosted) ${{ matrix.groups.job_name }} | ${{ matrix.ubuntu-versions.series }}
+        juju-snap-channel:  ["2.9/stable", "3.1/stable"]
+        include:
+          - juju-snap-channel: "3.1/stable"
+            agent-version: "3.1.6"
+            libjuju-version:  "3.2.0.1"
+          - juju-snap-channel: "2.9/stable"
+            agent-version: "2.9.45"
+            libjuju-version:  "2.9.44.1"
+        exclude:
+          # Disabling HA tests, as long as we want to have a limited pipeline on Juju3
+          - juju-snap-channel: "3.1/stable"
+            groups:
+              job_name: "high_availability/test_replication.py | group 1"
+          - juju-snap-channel: "3.1/stable"
+            groups:
+              job_name: "high_availability/test_replication.py | group 2"
+          - juju-snap-channel: "3.1/stable"
+            groups:
+              job_name: "high_availability/test_replication.py | group 3"
+          - juju-snap-channel: "3.1/stable"
+            groups:
+              job_name: "high_availability/test_self_healing.py | group 1"
+          - juju-snap-channel: "3.1/stable"
+            groups:
+              job_name: "high_availability/test_upgrade.py | group 1"                                                                                                                                           
+          - juju-snap-channel: "3.1/stable"
+            groups:
+              job_name: "high_availability/test_upgrade_from_stable.py | group 1"
+    name: ${{ matrix.juju-snap-channel }} - (GH hosted) ${{ matrix.groups.job_name }} | ${{ matrix.ubuntu-versions.series }}
     needs:
       - lint
       - unit-test
@@ -117,11 +152,22 @@ jobs:
         uses: charmed-kubernetes/actions-operator@main
         with:
           provider: lxd
-          bootstrap-options: "--agent-version 2.9.43"
+          bootstrap-options: "--agent-version ${{ matrix.agent-version }}"
+          juju-channel: ${{ matrix.juju-snap-channel }}
+      - name: Update python-libjuju version
+        if: ${{ matrix.libjuju-version == '2.9.44.1' }}
+        run: poetry add --lock --group integration juju@'${{ matrix.libjuju-version }}'
       - name: Download packed charm(s)
         uses: actions/download-artifact@v3
         with:
           name: ${{ needs.build.outputs.artifact-name }}
+      - name: Free up disk space
+        run: |
+          # From https://github.com/actions/runner-images/issues/2840#issuecomment-790492173
+          sudo rm -rf /usr/share/dotnet
+          sudo rm -rf /opt/ghc
+          sudo rm -rf /usr/local/share/boost
+          sudo rm -rf "$AGENT_TOOLSDIRECTORY"
       - name: Select test stability level
         id: select-test-stability
         run: |
@@ -136,6 +182,7 @@ jobs:
       - name: Run integration tests
         run: tox run -e integration -- "${{ matrix.groups.path_to_test_file }}" --group="${{ matrix.groups.group_number }}" -m '${{ steps.select-test-stability.outputs.mark_expression }}' --mysql-charm-series="${{ matrix.ubuntu-versions.series }}" --mysql-charm-bases-index="${{ matrix.ubuntu-versions.bases-index }}"
         env:
+          LIBJUJU_VERSION_SPECIFIER: ${{ matrix.libjuju-version }}
           SECRETS_FROM_GITHUB: |
             {
               "AWS_ACCESS_KEY": "${{ secrets.AWS_ACCESS_KEY }}",
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 8a2425090..eac96d58c 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -44,6 +44,13 @@ tox run -e integration   # integration tests
 tox                      # runs 'lint' and 'unit' environments
 ```
 
+Tests by default are using Juju 3. In case tests are to be run against Juju 3, the following
+environment variable should be defined with a valid `juju` Python library version:
+
+```
+export LIBJUJU_VERSION_SPECIFIER=2.9.44.1
+```
+
 ## Build charm
 
 Build the charm in this git repository using:
diff --git a/lib/charms/data_platform_libs/v0/data_interfaces.py b/lib/charms/data_platform_libs/v0/data_interfaces.py
index 9fa0021ec..2624dd4d6 100644
--- a/lib/charms/data_platform_libs/v0/data_interfaces.py
+++ b/lib/charms/data_platform_libs/v0/data_interfaces.py
@@ -320,7 +320,7 @@ def _on_topic_requested(self, event: TopicRequestedEvent):
 
 # Increment this PATCH version before using `charmcraft publish-lib` or reset
 # to 0 if you are raising the major API version
-LIBPATCH = 18
+LIBPATCH = 19
 
 PYDEPS = ["ops>=2.0.0"]
 
@@ -377,12 +377,19 @@ class SecretsIllegalUpdateError(SecretError):
     """Secrets aren't yet available for Juju version used."""
 
 
-def get_encoded_field(relation, member, field) -> Dict[str, str]:
+def get_encoded_field(
+    relation: Relation, member: Union[Unit, Application], field: str
+) -> Union[str, List[str], Dict[str, str]]:
     """Retrieve and decode an encoded field from relation data."""
     return json.loads(relation.data[member].get(field, "{}"))
 
 
-def set_encoded_field(relation, member, field, value) -> None:
+def set_encoded_field(
+    relation: Relation,
+    member: Union[Unit, Application],
+    field: str,
+    value: Union[str, list, Dict[str, str]],
+) -> None:
     """Set an encoded field from relation data."""
     relation.data[member].update({field: json.dumps(value)})
 
@@ -400,6 +407,15 @@ def diff(event: RelationChangedEvent, bucket: Union[Unit, Application]) -> Diff:
     """
     # Retrieve the old data from the data key in the application relation databag.
     old_data = get_encoded_field(event.relation, bucket, "data")
+
+    if not old_data:
+        old_data = {}
+
+    if not isinstance(old_data, dict):
+        # We should never get here, added to re-assure pyright
+        logger.error("Previous databag diff is of a wrong type.")
+        old_data = {}
+
     # Retrieve the new data from the event relation databag.
     new_data = (
         {key: value for key, value in event.relation.data[event.app].items() if key != "data"}
@@ -408,12 +424,16 @@ def diff(event: RelationChangedEvent, bucket: Union[Unit, Application]) -> Diff:
     )
 
     # These are the keys that were added to the databag and triggered this event.
-    added = new_data.keys() - old_data.keys()
+    added = new_data.keys() - old_data.keys()  # pyright: ignore [reportGeneralTypeIssues]
     # These are the keys that were removed from the databag and triggered this event.
-    deleted = old_data.keys() - new_data.keys()
+    deleted = old_data.keys() - new_data.keys()  # pyright: ignore [reportGeneralTypeIssues]
     # These are the keys that already existed in the databag,
     # but had their values changed.
-    changed = {key for key in old_data.keys() & new_data.keys() if old_data[key] != new_data[key]}
+    changed = {
+        key
+        for key in old_data.keys() & new_data.keys()  # pyright: ignore [reportGeneralTypeIssues]
+        if old_data[key] != new_data[key]  # pyright: ignore [reportGeneralTypeIssues]
+    }
     # Convert the new_data to a serializable format and save it for a next diff check.
     set_encoded_field(event.relation, bucket, "data", new_data)
 
@@ -426,6 +446,9 @@ def leader_only(f):
 
     def wrapper(self, *args, **kwargs):
         if not self.local_unit.is_leader():
+            logger.error(
+                "This operation (%s()) can only be performed by the leader unit", f.__name__
+            )
             return
         return f(self, *args, **kwargs)
 
@@ -587,11 +610,18 @@ def _get_relation_secret(
 
     @abstractmethod
     def _fetch_specific_relation_data(
-        self, relation, fields: Optional[List[str]]
+        self, relation: Relation, fields: Optional[List[str]]
     ) -> Dict[str, str]:
         """Fetch data available (directily or indirectly -- i.e. secrets) from the relation."""
         raise NotImplementedError
 
+    @abstractmethod
+    def _fetch_my_specific_relation_data(
+        self, relation: Relation, fields: Optional[List[str]]
+    ) -> Dict[str, str]:
+        """Fetch data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app."""
+        raise NotImplementedError
+
     # Internal helper methods
 
     @staticmethod
@@ -658,6 +688,22 @@ def _group_secret_fields(secret_fields: List[str]) -> Dict[SecretGroup, List[str
                 secret_fieldnames_grouped.setdefault(SecretGroup.EXTRA, []).append(key)
         return secret_fieldnames_grouped
 
+    def _retrieve_group_secret_contents(
+        self,
+        relation_id: int,
+        group: SecretGroup,
+        secret_fields: Optional[Union[Set[str], List[str]]] = None,
+    ) -> Dict[str, str]:
+        """Helper function to retrieve collective, requested contents of a secret."""
+        if not secret_fields:
+            secret_fields = []
+
+        if (secret := self._get_relation_secret(relation_id, group)) and (
+            secret_data := secret.get_content()
+        ):
+            return {k: v for k, v in secret_data.items() if k in secret_fields}
+        return {}
+
     @juju_secrets_only
     def _get_relation_secret_data(
         self, relation_id: int, group_mapping: SecretGroup, relation_name: Optional[str] = None
@@ -667,6 +713,72 @@ def _get_relation_secret_data(
         if secret:
             return secret.get_content()
 
+    def _fetch_relation_data_without_secrets(
+        self, app: Application, relation: Relation, fields: Optional[List[str]]
+    ) -> Dict[str, str]:
+        """Fetching databag contents when no secrets are involved.
+
+        Since the Provider's databag is the only one holding secrest, we can apply
+        a simplified workflow to read the Require's side's databag.
+        This is used typically when the Provides side wants to read the Requires side's data,
+        or when the Requires side may want to read its own data.
+        """
+        if fields:
+            return {k: relation.data[app][k] for k in fields if k in relation.data[app]}
+        else:
+            return dict(relation.data[app])
+
+    def _fetch_relation_data_with_secrets(
+        self,
+        app: Application,
+        req_secret_fields: Optional[List[str]],
+        relation: Relation,
+        fields: Optional[List[str]] = None,
+    ) -> Dict[str, str]:
+        """Fetching databag contents when secrets may be involved.
+
+        This function has internal logic to resolve if a requested field may be "hidden"
+        within a Relation Secret, or directly available as a databag field. Typically
+        used to read the Provides side's databag (eigher by the Requires side, or by
+        Provides side itself).
+        """
+        result = {}
+
+        normal_fields = fields
+        if not normal_fields:
+            normal_fields = list(relation.data[app].keys())
+
+        if req_secret_fields and self.secrets_enabled:
+            if fields:
+                # Processing from what was requested
+                normal_fields = set(fields) - set(req_secret_fields)
+                secret_fields = set(fields) - set(normal_fields)
+
+                secret_fieldnames_grouped = self._group_secret_fields(list(secret_fields))
+
+                for group in secret_fieldnames_grouped:
+                    if contents := self._retrieve_group_secret_contents(
+                        relation.id, group, secret_fields
+                    ):
+                        result.update(contents)
+                    else:
+                        # If it wasn't found as a secret, let's give it a 2nd chance as "normal" field
+                        normal_fields |= set(secret_fieldnames_grouped[group])
+            else:
+                # Processing from what is given, i.e. retrieving all
+                normal_fields = [
+                    f for f in relation.data[app].keys() if not self._is_secret_field(f)
+                ]
+                secret_fields = [f for f in relation.data[app].keys() if self._is_secret_field(f)]
+                for group in SecretGroup:
+                    result.update(
+                        self._retrieve_group_secret_contents(relation.id, group, req_secret_fields)
+                    )
+
+        # Processing "normal" fields. May include leftover from what we couldn't retrieve as a secret.
+        result.update({k: relation.data[app][k] for k in normal_fields if k in relation.data[app]})
+        return result
+
     # Public methods
 
     def get_relation(self, relation_name, relation_id) -> Relation:
@@ -716,6 +828,57 @@ def fetch_relation_data(
                 data[relation.id] = self._fetch_specific_relation_data(relation, fields)
         return data
 
+    def fetch_relation_field(
+        self, relation_id: int, field: str, relation_name: Optional[str] = None
+    ) -> Optional[str]:
+        """Get a single field from the relation data."""
+        return (
+            self.fetch_relation_data([relation_id], [field], relation_name)
+            .get(relation_id, {})
+            .get(field)
+        )
+
+    @leader_only
+    def fetch_my_relation_data(
+        self,
+        relation_ids: Optional[List[int]] = None,
+        fields: Optional[List[str]] = None,
+        relation_name: Optional[str] = None,
+    ) -> Optional[Dict[int, Dict[str, str]]]:
+        """Fetch data of the 'owner' (or 'this app') side of the relation.
+
+        NOTE: Since only the leader can read the relation's 'this_app'-side
+        Application databag, the functionality is limited to leaders
+        """
+        if not relation_name:
+            relation_name = self.relation_name
+
+        relations = []
+        if relation_ids:
+            relations = [
+                self.get_relation(relation_name, relation_id) for relation_id in relation_ids
+            ]
+        else:
+            relations = self.relations
+
+        data = {}
+        for relation in relations:
+            if not relation_ids or relation.id in relation_ids:
+                data[relation.id] = self._fetch_my_specific_relation_data(relation, fields)
+        return data
+
+    @leader_only
+    def fetch_my_relation_field(
+        self, relation_id: int, field: str, relation_name: Optional[str] = None
+    ) -> Optional[str]:
+        """Get a single field from the relation data -- owner side.
+
+        NOTE: Since only the leader can read the relation's 'this_app'-side
+        Application databag, the functionality is limited to leaders
+        """
+        if relation_data := self.fetch_my_relation_data([relation_id], [field], relation_name):
+            return relation_data.get(relation_id, {}).get(field)
+
     # Public methods - mandatory override
 
     @abstractmethod
@@ -823,18 +986,32 @@ def _get_relation_secret(
         if secret_uri := relation.data[self.local_app].get(secret_field):
             return self.secrets.get(label, secret_uri)
 
-    def _fetch_specific_relation_data(self, relation, fields: Optional[List[str]]) -> dict:
+    def _fetch_specific_relation_data(
+        self, relation: Relation, fields: Optional[List[str]]
+    ) -> Dict[str, str]:
         """Fetching relation data for Provides.
 
-        NOTE: Since all secret fields are in the Requires side of the databag, we don't need to worry about that
+        NOTE: Since all secret fields are in the Provides side of the databag, we don't need to worry about that
         """
         if not relation.app:
             return {}
 
-        if fields:
-            return {k: relation.data[relation.app].get(k) for k in fields}
-        else:
-            return relation.data[relation.app]
+        return self._fetch_relation_data_without_secrets(relation.app, relation, fields)
+
+    def _fetch_my_specific_relation_data(
+        self, relation: Relation, fields: Optional[List[str]]
+    ) -> dict:
+        """Fetching our own relation data."""
+        secret_fields = None
+        if relation.app:
+            secret_fields = get_encoded_field(relation, relation.app, REQ_SECRET_FIELDS)
+
+        return self._fetch_relation_data_with_secrets(
+            self.local_app,
+            secret_fields if isinstance(secret_fields, list) else None,
+            relation,
+            fields,
+        )
 
     # Public methods -- mandatory overrides
 
@@ -843,7 +1020,10 @@ def update_relation_data(self, relation_id: int, fields: Dict[str, str]) -> None
         """Set values for fields not caring whether it's a secret or not."""
         relation = self.get_relation(self.relation_name, relation_id)
 
-        relation_secret_fields = get_encoded_field(relation, relation.app, REQ_SECRET_FIELDS)
+        if relation.app:
+            relation_secret_fields = get_encoded_field(relation, relation.app, REQ_SECRET_FIELDS)
+        else:
+            relation_secret_fields = []
 
         normal_fields = list(fields)
         if relation_secret_fields and self.secrets_enabled:
@@ -1021,22 +1201,6 @@ def is_resource_created(self, relation_id: Optional[int] = None) -> bool:
                 else False
             )
 
-    def _retrieve_group_secret_contents(
-        self,
-        relation_id,
-        group: SecretGroup,
-        secret_fields: Optional[Union[Set[str], List[str]]] = None,
-    ) -> Dict[str, str]:
-        """Helper function to retrieve collective, requested contents of a secret."""
-        if not secret_fields:
-            secret_fields = []
-
-        if (secret := self._get_relation_secret(relation_id, group)) and (
-            secret_data := secret.get_content()
-        ):
-            return {k: v for k, v in secret_data.items() if k in secret_fields}
-        return {}
-
     # Event handlers
 
     def _on_relation_created_event(self, event: RelationCreatedEvent) -> None:
@@ -1070,49 +1234,16 @@ def _get_relation_secret(
     def _fetch_specific_relation_data(
         self, relation, fields: Optional[List[str]] = None
     ) -> Dict[str, str]:
+        """Fetching Requires data -- that may include secrets."""
         if not relation.app:
             return {}
+        return self._fetch_relation_data_with_secrets(
+            relation.app, self.secret_fields, relation, fields
+        )
 
-        result = {}
-
-        normal_fields = fields
-        if not normal_fields:
-            normal_fields = list(relation.data[relation.app].keys())
-
-        if self.secret_fields and self.secrets_enabled:
-            if fields:
-                # Processing from what was requested
-                normal_fields = set(fields) - set(self.secret_fields)
-                secret_fields = set(fields) - set(normal_fields)
-
-                secret_fieldnames_grouped = self._group_secret_fields(list(secret_fields))
-
-                for group in secret_fieldnames_grouped:
-                    if contents := self._retrieve_group_secret_contents(
-                        relation.id, group, secret_fields
-                    ):
-                        result.update(contents)
-                    else:
-                        # If it wasn't found as a secret, let's give it a 2nd chance as "normal" field
-                        normal_fields |= set(secret_fieldnames_grouped[group])
-            else:
-                # Processing from what is given, i.e. retrieving all
-                normal_fields = [
-                    f for f in relation.data[relation.app].keys() if not self._is_secret_field(f)
-                ]
-                secret_fields = [
-                    f for f in relation.data[relation.app].keys() if self._is_secret_field(f)
-                ]
-                for group in SecretGroup:
-                    result.update(
-                        self._retrieve_group_secret_contents(
-                            relation.id, group, self.secret_fields
-                        )
-                    )
-
-        # Processing "normal" fields. May include leftover from what we couldn't retrieve as a secret.
-        result.update({k: relation.data[relation.app].get(k) for k in normal_fields})
-        return result
+    def _fetch_my_specific_relation_data(self, relation, fields: Optional[List[str]]) -> dict:
+        """Fetching our own relation data."""
+        return self._fetch_relation_data_without_secrets(self.local_app, relation, fields)
 
     # Public methods -- mandatory overrides
 
@@ -1135,18 +1266,6 @@ def update_relation_data(self, relation_id: int, data: dict) -> None:
         if relation:
             relation.data[self.local_app].update(data)
 
-    # "Native" public methods
-
-    def fetch_relation_field(
-        self, relation_id: int, field: str, relation_name: Optional[str] = None
-    ) -> Optional[str]:
-        """Get a single field from the relation data."""
-        return (
-            self.fetch_relation_data([relation_id], [field], relation_name)
-            .get(relation_id, {})
-            .get(field)
-        )
-
 
 # General events
 
diff --git a/poetry.lock b/poetry.lock
index 0d5fb3a10..f5d7bccc5 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -660,6 +660,21 @@ pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"]
 reauth = ["pyu2f (>=0.1.5)"]
 requests = ["requests (>=2.20.0,<3.0.0.dev0)"]
 
+[[package]]
+name = "hvac"
+version = "1.2.1"
+description = "HashiCorp Vault API client"
+optional = false
+python-versions = ">=3.6.2,<4.0.0"
+files = [
+    {file = "hvac-1.2.1-py3-none-any.whl", hash = "sha256:cb87f5724be8fd5f57507f5d5a94e6c42d2675128b460bf3186f966e07d4db78"},
+    {file = "hvac-1.2.1.tar.gz", hash = "sha256:c786e3dfa1f35239810e5317cccadbe358f49b8c9001a1f2f68b79a250b9f8a1"},
+]
+
+[package.dependencies]
+pyhcl = ">=0.4.4,<0.5.0"
+requests = ">=2.27.1,<3.0.0"
+
 [[package]]
 name = "idna"
 version = "3.4"
@@ -837,38 +852,25 @@ referencing = ">=0.28.0"
 
 [[package]]
 name = "juju"
-version = "2.9.44.0"
+version = "3.2.2"
 description = "Python library for Juju"
 optional = false
 python-versions = "*"
 files = [
-    {file = "juju-2.9.44.0.tar.gz", hash = "sha256:bc71fe0c8fd59ee00f0c3b03066682cd2273f299c36135451abb1a81289e68f9"},
+    {file = "juju-3.2.2.tar.gz", hash = "sha256:b6f51c62b605bc8bd56842892d31cdb91d26879e49641380cd67c423f69fb1bb"},
 ]
 
 [package.dependencies]
+hvac = "*"
 kubernetes = ">=12.0.1"
 macaroonbakery = ">=1.1,<2.0"
 paramiko = ">=2.4.0,<3.0.0"
 pyasn1 = ">=0.4.4"
 pyRFC3339 = ">=1.0,<2.0"
 pyyaml = ">=5.1.2"
-theblues = ">=0.5.1,<1.0"
 toposort = ">=1.5,<2"
 typing_inspect = ">=0.6.0"
-websockets = {version = ">=9.0", markers = "python_version > \"3.9\""}
-
-[[package]]
-name = "jujubundlelib"
-version = "0.5.7"
-description = "A python library for working with Juju bundles"
-optional = false
-python-versions = "*"
-files = [
-    {file = "jujubundlelib-0.5.7.tar.gz", hash = "sha256:7e2b1a679faab13c4d56256e31e0cc616d55841abd32598951735bf395ca47e3"},
-]
-
-[package.dependencies]
-PyYAML = ">=3.11"
+websockets = {version = ">=10.0", markers = "python_version > \"3.9\""}
 
 [[package]]
 name = "kubernetes"
@@ -941,6 +943,16 @@ files = [
     {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"},
     {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"},
     {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"},
+    {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"},
+    {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"},
+    {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"},
+    {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"},
+    {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"},
+    {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"},
+    {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"},
+    {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"},
+    {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"},
+    {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"},
     {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"},
     {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"},
     {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"},
@@ -1433,6 +1445,17 @@ files = [
 [package.extras]
 plugins = ["importlib-metadata"]
 
+[[package]]
+name = "pyhcl"
+version = "0.4.5"
+description = "HCL configuration parser for python"
+optional = false
+python-versions = "*"
+files = [
+    {file = "pyhcl-0.4.5-py3-none-any.whl", hash = "sha256:30ee337d330d1f90c9f5ed8f49c468f66c8e6e43192bdc7c6ece1420beb3070c"},
+    {file = "pyhcl-0.4.5.tar.gz", hash = "sha256:c47293a51ccdd25e18bb5c8c0ab0ffe355b37c87f8d6f9d3280dc41efd4740bc"},
+]
+
 [[package]]
 name = "pymacaroons"
 version = "0.13.0"
@@ -1577,6 +1600,23 @@ reference = "v5.0.0"
 resolved_reference = "3ffc460ad9d3a1280e29e32a4419456f4772c385"
 subdirectory = "python/pytest_plugins/github_secrets"
 
+[[package]]
+name = "pytest-mock"
+version = "3.11.1"
+description = "Thin-wrapper around the mock package for easier use with pytest"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "pytest-mock-3.11.1.tar.gz", hash = "sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f"},
+    {file = "pytest_mock-3.11.1-py3-none-any.whl", hash = "sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39"},
+]
+
+[package.dependencies]
+pytest = ">=5.0"
+
+[package.extras]
+dev = ["pre-commit", "pytest-asyncio", "tox"]
+
 [[package]]
 name = "pytest-operator"
 version = "0.28.0"
@@ -1671,6 +1711,7 @@ files = [
     {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
     {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
     {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
+    {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
     {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
     {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
     {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
@@ -1678,8 +1719,15 @@ files = [
     {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
     {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
     {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
+    {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
     {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
     {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
+    {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
+    {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
+    {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
+    {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
+    {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
+    {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
     {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
     {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
     {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
@@ -1696,6 +1744,7 @@ files = [
     {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
     {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
     {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
+    {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
     {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
     {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
     {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
@@ -1703,6 +1752,7 @@ files = [
     {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
     {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
     {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
+    {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
     {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
     {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
     {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
@@ -1983,21 +2033,6 @@ files = [
 [package.extras]
 doc = ["reno", "sphinx", "tornado (>=4.5)"]
 
-[[package]]
-name = "theblues"
-version = "0.5.2"
-description = "Python library for using the juju charm store API."
-optional = false
-python-versions = "*"
-files = [
-    {file = "theblues-0.5.2.tar.gz", hash = "sha256:a9aded6b151c67d83eb9adcbcb38640872d9f29db985053259afd2fc012e5ed9"},
-]
-
-[package.dependencies]
-jujubundlelib = ">=0.5.1"
-macaroonbakery = ">=0.0.6"
-requests = ">=2.18.4"
-
 [[package]]
 name = "tomli"
 version = "2.0.1"
@@ -2186,4 +2221,4 @@ files = [
 [metadata]
 lock-version = "2.0"
 python-versions = "^3.10"
-content-hash = "48f21199e8d41f4fd624329a3ef02cffb9afe9eb953b7a9d9bccf4bfbe796b15"
+content-hash = "66256d13f4d0a9a1165886231a1c4de39ccb0ce67b384673e347498f56dee50b"
diff --git a/pyproject.toml b/pyproject.toml
index 1888271e7..ca20fed5f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -56,6 +56,7 @@ shellcheck-py = "^0.9.0.5"
 
 [tool.poetry.group.unit.dependencies]
 pytest = "^7.4.0"
+pytest-mock = "^3.11.1"
 coverage = {extras = ["toml"], version = "^7.2.7"}
 
 [tool.poetry.group.integration.dependencies]
@@ -64,7 +65,9 @@ pytest-github-secrets = {git = "https://github.com/canonical/data-platform-workf
 pytest-operator = "^0.28.0"
 pytest-operator-cache = {git = "https://github.com/canonical/data-platform-workflows", tag = "v5.0.0", subdirectory = "python/pytest_plugins/pytest_operator_cache"}
 pytest-operator-groups = {git = "https://github.com/canonical/data-platform-workflows", tag = "v5.0.0", subdirectory = "python/pytest_plugins/pytest_operator_groups"}
-juju = "^2.9.44.0"
+juju = "^3.2.2"
+ops = "^2.5.0"
+pytest-mock = "^3.11.1"
 mysql-connector-python = "~8.0.33"
 tenacity = "^8.2.2"
 boto3 = "^1.28.11"
diff --git a/src/relations/mysql_provider.py b/src/relations/mysql_provider.py
index 6b87d4329..5a5b01ae0 100644
--- a/src/relations/mysql_provider.py
+++ b/src/relations/mysql_provider.py
@@ -187,10 +187,10 @@ def _get_or_set_password(self, relation) -> str:
         Returns:
             str: The password.
         """
-        if password := relation.data[self.charm.app].get("password"):
+        if password := self.database.fetch_my_relation_field(relation.id, "password"):
             return password
         password = generate_random_password(PASSWORD_LENGTH)
-        relation.data[self.charm.app]["password"] = password
+        self.database.update_relation_data(relation.id, {"password": password})
         return password
 
     def _on_database_requested(self, event: DatabaseRequestedEvent):
diff --git a/tests/conftest.py b/tests/conftest.py
index 7e438b377..8e5962646 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -2,6 +2,49 @@
 # See LICENSE file for licensing details.
 
 import argparse
+import os
+from unittest.mock import PropertyMock
+
+import pytest
+from ops import JujuVersion
+from pytest_mock import MockerFixture
+
+
+@pytest.fixture(autouse=True)
+def juju_has_secrets(mocker: MockerFixture):
+    """This fixture will force the usage of secrets whenever run on Juju 3.x.
+
+    NOTE: This is needed, as normally JujuVersion is set to 0.0.0 in tests
+    (i.e. not the real juju version)
+    """
+    juju_version = os.environ["LIBJUJU_VERSION_SPECIFIER"].split("/")[0]
+    if juju_version < "3":
+        mocker.patch.object(
+            JujuVersion, "has_secrets", new_callable=PropertyMock
+        ).return_value = False
+        return False
+    else:
+        mocker.patch.object(
+            JujuVersion, "has_secrets", new_callable=PropertyMock
+        ).return_value = True
+        return True
+
+
+@pytest.fixture
+def only_with_juju_secrets(juju_has_secrets):
+    """Pretty way to skip Juju 3 tests."""
+    if not juju_has_secrets:
+        pytest.skip("Secrets test only applies on Juju 3.x")
+
+
+@pytest.fixture
+def only_without_juju_secrets(juju_has_secrets):
+    """Pretty way to skip Juju 2-specific tests.
+
+    Typically: to save CI time, when the same check were executed in a Juju 3-specific way already
+    """
+    if juju_has_secrets:
+        pytest.skip("Skipping legacy secrets tests")
 
 
 def pytest_addoption(parser):
diff --git a/tests/integration/helpers.py b/tests/integration/helpers.py
index d3dce10bc..fe276057a 100644
--- a/tests/integration/helpers.py
+++ b/tests/integration/helpers.py
@@ -18,6 +18,7 @@
     OperationalError,
     ProgrammingError,
 )
+from ops import JujuVersion
 from pytest_operator.plugin import OpsTest
 from tenacity import RetryError, Retrying, retry, stop_after_attempt, wait_fixed
 
@@ -42,8 +43,15 @@ async def run_command_on_unit(unit, command: str) -> Optional[str]:
         command execution output or none if
         the command produces no output.
     """
-    action = await unit.run(command)
-    return action.results.get("Stdout", None)
+    juju_version = JujuVersion.from_environ()
+
+    # Syntax changed across Juju major versions
+    if juju_version.has_secrets:
+        action = await unit.run(command, block=True)
+        return action.results.get("stdout", None)
+    else:
+        action = await unit.run(command)
+        return action.results.get("Stdout", None)
 
 
 def generate_random_string(length: int) -> str:
diff --git a/tests/integration/relations/test_database.py b/tests/integration/relations/test_database.py
index 7478fd90a..2b3ab7a2b 100644
--- a/tests/integration/relations/test_database.py
+++ b/tests/integration/relations/test_database.py
@@ -204,6 +204,27 @@ async def test_password_rotation_root_user_implicit(ops_test: OpsTest):
 
 @pytest.mark.group(1)
 @pytest.mark.abort_on_fail
+@pytest.mark.usefixtures("only_without_juju_secrets")
+async def test_relation_creation_databag(ops_test: OpsTest):
+    """Relate charms and wait for the expected changes in status."""
+    await ops_test.model.relate(
+        f"{APPLICATION_APP_NAME}:{ENDPOINT}", f"{DATABASE_APP_NAME}:{ENDPOINT}"
+    )
+
+    async with ops_test.fast_forward("60s"):
+        await ops_test.model.block_until(
+            lambda: is_relation_joined(ops_test, ENDPOINT, ENDPOINT) == True  # noqa: E712
+        )
+
+        await ops_test.model.wait_for_idle(apps=APPS, status="active")
+
+    relation_data = await get_relation_data(ops_test, APPLICATION_APP_NAME, "database")
+    assert set(["password", "username"]) <= set(relation_data[0]["application-data"])
+
+
+@pytest.mark.group(1)
+@pytest.mark.abort_on_fail
+@pytest.mark.usefixtures("only_with_juju_secrets")
 async def test_relation_creation(ops_test: OpsTest):
     """Relate charms and wait for the expected changes in status."""
     await ops_test.model.relate(
@@ -217,6 +238,10 @@ async def test_relation_creation(ops_test: OpsTest):
 
         await ops_test.model.wait_for_idle(apps=APPS, status="active")
 
+    relation_data = await get_relation_data(ops_test, APPLICATION_APP_NAME, "database")
+    assert not set(["password", "username"]) <= set(relation_data[0]["application-data"])
+    assert "secret-user" in relation_data[0]["application-data"]
+
 
 @pytest.mark.group(1)
 @pytest.mark.abort_on_fail
diff --git a/tests/integration/test_backups.py b/tests/integration/test_backups.py
index d8d17630f..33ba622ad 100644
--- a/tests/integration/test_backups.py
+++ b/tests/integration/test_backups.py
@@ -7,6 +7,7 @@
 
 import boto3
 import pytest
+from ops import JujuVersion
 from pytest_operator.plugin import OpsTest
 
 from .helpers import (
@@ -246,7 +247,12 @@ async def test_restore_on_same_cluster(
             action_name="restore", **{"backup-id": backups_by_cloud[cloud_name]}
         )
         result = await action.wait()
-        assert result.results.get("Code") == "0"
+
+        # Syntax changed across Juju major versions
+        if JujuVersion.from_environ().has_secrets:
+            assert result.results.get("return-code") == 0
+        else:
+            assert result.results.get("Code") == "0"
 
         # ensure the correct inserted values exist
         logger.info(
@@ -367,7 +373,11 @@ async def test_restore_on_new_cluster(
             action_name="restore", **{"backup-id": backups_by_cloud[cloud_name]}
         )
         result = await action.wait()
-        assert result.results.get("Code") == "0"
+
+        if JujuVersion.from_environ().has_secrets:
+            assert result.results.get("return-code") == 0
+        else:
+            assert result.results.get("Code") == "0"
 
         # ensure the correct inserted values exist
         logger.info(
diff --git a/tests/unit/test_backups.py b/tests/unit/test_backups.py
index 68d393107..0f58af6a3 100644
--- a/tests/unit/test_backups.py
+++ b/tests/unit/test_backups.py
@@ -151,7 +151,6 @@ def test_on_list_backups_failure(self, _list_backups_in_s3_path, _retrieve_s3_pa
     @patch(
         "charms.mysql.v0.backups.MySQLBackups._can_unit_perform_backup", return_value=(True, None)
     )
-    @patch("ops.jujuversion.JujuVersion.from_environ", return_value=MagicMock())
     @patch("charms.mysql.v0.backups.upload_content_to_s3")
     @patch("charms.mysql.v0.backups.MySQLBackups._pre_backup", return_value=(True, None))
     @patch("charms.mysql.v0.backups.MySQLBackups._backup", return_value=(True, None))
@@ -164,21 +163,18 @@ def test_on_create_backup(
         _backup,
         _pre_backup,
         _upload_content_to_s3,
-        _from_environ,
         _can_unit_perform_backup,
         _retrieve_s3_parameters,
         _datetime,
     ):
         """Test _on_create_backup()."""
-        _from_environ.return_value.__str__.return_value = "test-juju-version"
-
         _datetime.now.return_value.strftime.return_value = "2023-03-07%13:43:15Z"
 
         expected_metadata = f"""Date Backup Requested: 2023-03-07%13:43:15Z
 Model Name: {self.charm.model.name}
 Application Name: {self.charm.model.app.name}
 Unit Name: {self.charm.unit.name}
-Juju Version: test-juju-version
+Juju Version: 0.0.0
 """
         expected_backup_path = "/path/2023-03-07%13:43:15Z"
         expected_s3_params = {"path": "/path"}
@@ -189,7 +185,6 @@ def test_on_create_backup(
 
         _retrieve_s3_parameters.assert_called_once()
         _can_unit_perform_backup.assert_called_once()
-        _from_environ.assert_called()
         _upload_content_to_s3.assert_called_once_with(
             expected_metadata, f"{expected_backup_path}.metadata", expected_s3_params
         )
@@ -209,7 +204,6 @@ def test_on_create_backup(
     @patch(
         "charms.mysql.v0.backups.MySQLBackups._can_unit_perform_backup", return_value=(True, None)
     )
-    @patch("ops.jujuversion.JujuVersion.from_environ", return_value=MagicMock())
     @patch("charms.mysql.v0.backups.upload_content_to_s3")
     @patch("charms.mysql.v0.backups.MySQLBackups._pre_backup", return_value=(True, None))
     @patch("charms.mysql.v0.backups.MySQLBackups._backup", return_value=(True, None))
@@ -222,14 +216,11 @@ def test_on_create_backup_failure(
         _backup,
         _pre_backup,
         _upload_content_to_s3,
-        _from_environ,
         _can_unit_perform_backup,
         _retrieve_s3_parameters,
         _datetime,
     ):
         """Test failure of _on_create_backup()."""
-        _from_environ.return_value.__str__.return_value = "test-juju-version"
-
         _datetime.now.return_value.strftime.return_value = "2023-03-07%13:43:15Z"
 
         # test failure with _post_backup
diff --git a/tests/unit/test_charm.py b/tests/unit/test_charm.py
index 63c9264e3..7de67d2a4 100644
--- a/tests/unit/test_charm.py
+++ b/tests/unit/test_charm.py
@@ -4,6 +4,7 @@
 import unittest
 from unittest.mock import patch
 
+import pytest
 from charms.mysql.v0.mysql import (
     MySQLConfigureInstanceError,
     MySQLConfigureMySQLUsersError,
@@ -69,6 +70,7 @@ def test_on_install_exception(
 
         self.assertTrue(isinstance(self.harness.model.unit.status, BlockedStatus))
 
+    @pytest.mark.usefixtures("only_without_juju_secrets")
     def test_on_leader_elected_sets_mysql_passwords_in_peer_databag(self):
         # ensure that the peer relation databag is empty
         peer_relation_databag = self.harness.get_relation_data(
@@ -96,6 +98,33 @@ def test_on_leader_elected_sets_mysql_passwords_in_peer_databag(self):
             sorted(peer_relation_databag.keys()), sorted(expected_peer_relation_databag_keys)
         )
 
+    @pytest.mark.usefixtures("only_with_juju_secrets")
+    def test_on_leader_elected_sets_mysql_passwords_secret(self):
+        # ensure that the peer relation databag is empty
+        peer_relation_databag = self.harness.get_relation_data(
+            self.peer_relation_id, self.harness.charm.app
+        )
+        self.assertEqual(peer_relation_databag, {})
+
+        # trigger the leader_elected event
+        self.harness.set_leader(True)
+
+        # ensure passwords set in the peer relation databag
+        secret_id = self.harness.get_relation_data(self.peer_relation_id, self.harness.charm.app)[
+            "secret-id"
+        ]
+
+        expected_peer_relation_databag_keys = [
+            "root-password",
+            "server-config-password",
+            "cluster-admin-password",
+            "monitoring-password",
+            "backups-password",
+        ]
+
+        secret_data = self.harness.model.get_secret(id=secret_id).get_content()
+        self.assertEqual(sorted(secret_data.keys()), sorted(expected_peer_relation_databag_keys))
+
     @patch_network_get(private_address="1.1.1.1")
     def test_on_leader_elected_sets_config_cluster_name_in_peer_databag(self):
         # ensure that the peer relation databag is empty
@@ -281,9 +310,10 @@ def test_get_secret(self, _):
         )
         assert self.charm.get_secret("unit", "password") == "test-password"
 
+    @pytest.mark.usefixtures("only_without_juju_secrets")
     @patch_network_get(private_address="1.1.1.1")
     @patch("charm.MySQLOperatorCharm._on_leader_elected")
-    def test_set_secret(self, _):
+    def test_set_secret_databag(self, _):
         self.harness.set_leader()
 
         # Test application scope.
@@ -306,6 +336,42 @@ def test_set_secret(self, _):
             == "test-password"
         )
 
+    @pytest.mark.usefixtures("only_with_juju_secrets")
+    @patch_network_get(private_address="1.1.1.1")
+    @patch("charm.MySQLOperatorCharm._on_leader_elected")
+    def test_set_secret(self, _):
+        self.harness.set_leader()
+
+        # Test application scope.
+        assert "password" not in self.harness.get_relation_data(
+            self.peer_relation_id, self.charm.app.name
+        )
+        self.charm.set_secret("app", "password", "test-password")
+        secret_id = self.harness.get_relation_data(self.peer_relation_id, self.charm.app.name)[
+            "secret-id"
+        ]
+        secret_data = self.harness.model.get_secret(id=secret_id).get_content()
+        assert secret_data["password"] == "test-password"
+
+        assert "password" not in self.harness.get_relation_data(
+            self.peer_relation_id, self.charm.app.name
+        )
+
+        # Test unit scope.
+        assert "password" not in self.harness.get_relation_data(
+            self.peer_relation_id, self.charm.unit.name
+        )
+        self.charm.set_secret("unit", "password", "test-password")
+        secret_id = self.harness.get_relation_data(self.peer_relation_id, self.charm.unit.name)[
+            "secret-id"
+        ]
+        secret_data = self.harness.model.get_secret(id=secret_id).get_content()
+        assert secret_data["password"] == "test-password"
+
+        assert "password" not in self.harness.get_relation_data(
+            self.peer_relation_id, self.charm.unit.name
+        )
+
     @patch_network_get(private_address="1.1.1.1")
     @patch("mysql_vm_helpers.MySQL.get_cluster_node_count", return_value=1)
     @patch("mysql_vm_helpers.MySQL.get_member_state")
diff --git a/tests/unit/test_relation_mysql_legacy.py b/tests/unit/test_relation_mysql_legacy.py
index 48192692b..54913ee44 100644
--- a/tests/unit/test_relation_mysql_legacy.py
+++ b/tests/unit/test_relation_mysql_legacy.py
@@ -4,6 +4,7 @@
 import unittest
 from unittest.mock import patch
 
+import pytest
 from ops.testing import Harness
 
 from charm import MySQLOperatorCharm
@@ -21,6 +22,7 @@ def setUp(self):
         self.harness.add_relation_unit(self.peer_relation_id, "mysql/1")
         self.charm = self.harness.charm
 
+    @pytest.mark.usefixtures("only_without_juju_secrets")
     @patch_network_get(private_address="1.1.1.1")
     @patch("mysql_vm_helpers.MySQL.does_mysql_user_exist", return_value=False)
     @patch("mysql_vm_helpers.MySQL.get_cluster_primary_address", return_value="1.1.1.1:3306")
@@ -76,6 +78,66 @@ def test_maria_db_relation_created(
             },
         )
 
+    @pytest.mark.usefixtures("only_with_juju_secrets")
+    @patch_network_get(private_address="1.1.1.1")
+    @patch("mysql_vm_helpers.MySQL.does_mysql_user_exist", return_value=False)
+    @patch("mysql_vm_helpers.MySQL.get_cluster_primary_address", return_value="1.1.1.1:3306")
+    @patch(
+        "relations.mysql.MySQLRelation._get_or_set_password_in_peer_secrets",
+        return_value="super_secure_password",
+    )
+    @patch("mysql_vm_helpers.MySQL.create_application_database_and_scoped_user")
+    def test_maria_db_relation_created_with_secrets(
+        self,
+        _create_application_database_and_scoped_user,
+        _get_or_set_password_in_peer_secrets,
+        _get_cluster_primary_address,
+        _does_mysql_user_exist,
+    ):
+        # run start-up events to enable usage of the helper class
+        self.harness.set_leader(True)
+        self.charm.on.config_changed.emit()
+        self.charm.unit_peer_data["unit-initialized"] = "True"
+        self.harness.update_config(
+            {"mysql-interface-user": "mysql", "mysql-interface-database": "default_database"}
+        )
+
+        # Relate to emit relation created event
+        self.maria_db_relation_id = self.harness.add_relation(LEGACY_MYSQL, "other-app")
+        self.harness.add_relation_unit(self.maria_db_relation_id, "other-app/0")
+
+        self.assertEqual(_get_or_set_password_in_peer_secrets.call_count, 1)
+        _create_application_database_and_scoped_user.assert_called_once_with(
+            "default_database",
+            "mysql",
+            "super_secure_password",
+            "%",
+            unit_name="mysql-legacy-relation",
+        )
+
+        _get_cluster_primary_address.assert_called_once()
+        _does_mysql_user_exist.assert_called_once_with("mysql", "%")
+
+        maria_db_relation = self.charm.model.get_relation(LEGACY_MYSQL)
+        peer_relation = self.charm.model.get_relation(PEER)
+        secret_id = self.harness.get_relation_data(peer_relation.id, self.harness.charm.app.name)[
+            "secret-id"
+        ]
+        root_pw = self.harness.model.get_secret(id=secret_id).get_content()["root-password"]
+
+        # confirm that the relation databag is populated
+        self.assertEqual(
+            maria_db_relation.data.get(self.charm.unit),
+            {
+                "database": "default_database",
+                "host": "1.1.1.1",
+                "password": "super_secure_password",
+                "port": "3306",
+                "root_password": root_pw,
+                "user": "mysql",
+            },
+        )
+
     @patch_network_get(private_address="1.1.1.1")
     @patch("mysql_vm_helpers.MySQL.does_mysql_user_exist", return_value=False)
     @patch("mysql_vm_helpers.MySQL.get_cluster_primary_address", return_value="1.1.1.1:3306")
diff --git a/tox.ini b/tox.ini
index ed74e208c..4e9d9dd6c 100644
--- a/tox.ini
+++ b/tox.ini
@@ -60,6 +60,9 @@ commands =
 
 [testenv:unit]
 description = Run unit tests
+set_env =
+    {[testenv]set_env}
+    LIBJUJU_VERSION_SPECIFIER = {env:LIBJUJU_VERSION_SPECIFIER:3.2.2}
 commands_pre =
     poetry install --only main,charm-libs,unit
 commands =
@@ -74,6 +77,7 @@ set_env =
     {[testenv]set_env}
     # Workaround for https://github.com/python-poetry/poetry/issues/6958
     POETRY_INSTALLER_PARALLEL = false
+    LIBJUJU_VERSION_SPECIFIER = {env:LIBJUJU_VERSION_SPECIFIER:3.2.2}
 pass_env =
     CI
     GITHUB_OUTPUT