Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix some assign arc buf with brt clone and O_TRUNC #15139

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 12 additions & 1 deletion module/zfs/dbuf.c
Original file line number Diff line number Diff line change
Expand Up @@ -2945,7 +2945,8 @@ dbuf_assign_arcbuf(dmu_buf_impl_t *db, arc_buf_t *buf, dmu_tx_t *tx)
while (db->db_state == DB_READ || db->db_state == DB_FILL)
cv_wait(&db->db_changed, &db->db_mtx);

ASSERT(db->db_state == DB_CACHED || db->db_state == DB_UNCACHED);
ASSERT(db->db_state == DB_CACHED || db->db_state == DB_UNCACHED ||
db->db_state == DB_NOFILL);

if (db->db_state == DB_CACHED &&
zfs_refcount_count(&db->db_holds) - 1 > db->db_dirtycnt) {
Expand Down Expand Up @@ -2982,7 +2983,17 @@ dbuf_assign_arcbuf(dmu_buf_impl_t *db, arc_buf_t *buf, dmu_tx_t *tx)
arc_buf_destroy(db->db_buf, db);
}
db->db_buf = NULL;
} else if (db->db_state == DB_NOFILL) {
/*
* We will be completely replacing the cloned block. In case
* it was cloned in this transaction group, let's undirty the
* pending clone and mark the block as uncached. This will be
* as if the clone was never done.
*/
VERIFY(!dbuf_undirty(db, tx));
db->db_state = DB_UNCACHED;
}

ASSERT(db->db_buf == NULL);
dbuf_set_data(db, buf);
db->db_state = DB_FILL;
Expand Down
3 changes: 2 additions & 1 deletion tests/runfiles/linux.run
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,8 @@ tests = ['block_cloning_copyfilerange', 'block_cloning_copyfilerange_partial',
'block_cloning_copyfilerange_cross_dataset',
'block_cloning_cross_enc_dataset',
'block_cloning_copyfilerange_fallback_same_txg',
'block_cloning_replay', 'block_cloning_replay_encrypted']
'block_cloning_replay', 'block_cloning_replay_encrypted',
'block_cloning_ficlone_and_write']
tags = ['functional', 'block_cloning']

[tests/functional/chattr:Linux]
Expand Down
2 changes: 2 additions & 0 deletions tests/test-runner/bin/zts-report.py.in
Original file line number Diff line number Diff line change
Expand Up @@ -311,6 +311,8 @@ elif sys.platform.startswith('linux'):
['SKIP', cfr_cross_reason],
'block_cloning/block_cloning_cross_enc_dataset':
['SKIP', cfr_cross_reason],
'block_cloning/block_cloning_ficlone_and_write':
['SKIP', cfr_reason],
})

# Not all Github actions runners have scsi_debug module, so we may skip
Expand Down
1 change: 1 addition & 0 deletions tests/zfs-tests/tests/Makefile.am
Original file line number Diff line number Diff line change
Expand Up @@ -454,6 +454,7 @@ nobase_dist_datadir_zfs_tests_tests_SCRIPTS += \
functional/block_cloning/block_cloning_cross_enc_dataset.ksh \
functional/block_cloning/block_cloning_replay.ksh \
functional/block_cloning/block_cloning_replay_encrypted.ksh \
functional/block_cloning/block_cloning_ficlone_and_write.ksh \
functional/bootfs/bootfs_001_pos.ksh \
functional/bootfs/bootfs_002_neg.ksh \
functional/bootfs/bootfs_003_pos.ksh \
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
#!/bin/ksh -p
#
# CDDL HEADER START
#
# The contents of this file are subject to the terms of the
# Common Development and Distribution License (the "License").
# You may not use this file except in compliance with the License.
#
# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
# or https://opensource.org/licenses/CDDL-1.0.
# See the License for the specific language governing permissions
# and limitations under the License.
#
# When distributing Covered Code, include this CDDL HEADER in each
# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
# If applicable, add the following below this CDDL HEADER, with the
# fields enclosed by brackets "[]" replaced with your own identifying
# information: Portions Copyright [yyyy] [name of copyright owner]
#
# CDDL HEADER END
#

#
# Copyright (c) 2023, Kay Pedersen <mail@mkwg.de>
#

. $STF_SUITE/include/libtest.shlib
. $STF_SUITE/tests/functional/block_cloning/block_cloning.kshlib

verify_runnable "global"

if [[ $(linux_version) -lt $(linux_version "4.5") ]]; then
log_unsupported "copy_file_range not available before Linux 4.5"
fi

claim="O_TRUNC, writing and FICLONE to a large (>4G) file shouldn't fail"

log_assert $claim

NO_LOOP_BREAK=true

function cleanup
{
datasetexists $TESTPOOL && destroy_pool $TESTPOOL
}

function loop
{
while $NO_LOOP_BREAK; do clonefile -c -t -q /$TESTPOOL/file /$TESTPOOL/clone; done
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In the CI environment it appears this loop can completely starve out the dd's below. Can we both reduce the size of this file and cap the number of iterations here and still have this test be useful?

}

log_onexit cleanup

log_must zpool create -o feature@block_cloning=enabled $TESTPOOL $DISKS

log_must dd if=/dev/urandom of=/$TESTPOOL/file bs=1M count=4000
log_must sync_pool $TESTPOOL


log_note "Copying entire file with FICLONE"

log_must clonefile -c /$TESTPOOL/file /$TESTPOOL/clone
log_must sync_pool $TESTPOOL

log_must have_same_content /$TESTPOOL/file /$TESTPOOL/clone

log_note "looping a clone"
loop &

log_must dd if=/dev/urandom of=/$TESTPOOL/clone bs=1M count=4000
log_must dd if=/dev/urandom of=/$TESTPOOL/clone bs=1M count=4000

NO_LOOP_BREAK=false

# just to be sure all background jobs are killed.
log_must kill $(jobs -p)

log_must sync_pool $TESTPOOL

log_pass $claim