ast_flags |= CEF_NEVER;
}
+ /* for compressed files, we must round the locking to cover whole
+ * compression chunks if the IO is not chunk aligned
+ */
+ if (io->ci_compressed_file &&
+ (io->ci_cmpc_leading.e_start != -1 ||
+ io->ci_cmpc_trailing.e_end != -1)) {
+ loff_t leading_chunk_start;
+ loff_t trailing_chunk_end;
+ loff_t orig_start = start;
+ loff_t orig_end = end;
+
+ /* if the IO is not already chunk aligned, the leading or
+ * trailing values will be set, use them accordingly
+ */
+ if (io->ci_cmpc_leading.e_start != -1) {
+ leading_chunk_start =
+ io->ci_cmpc_leading.e_start << PAGE_SHIFT;
+ start = leading_chunk_start;
+ }
+
+ if (io->ci_cmpc_trailing.e_end != -1) {
+ /* 'end' is the index of the last page of the IO, so
+ * to lock correctly, we must round it to the last byte
+ * of that page - this is done by adding 1 to the page
+ * index before converting it and then subtracting 1
+ * from the bytes
+ */
+ trailing_chunk_end =
+ (io->ci_cmpc_trailing.e_end + 1) << PAGE_SHIFT;
+ trailing_chunk_end = trailing_chunk_end - 1;
+ end = trailing_chunk_end;
+ }
+
+ CDEBUG(D_VFSTRACE,
+ "Compressed file - rounding lock from [%llu, %llu) to [%llu, %llu)\n",
+ orig_start, orig_end, start, end);
+
+ }
+
result = vvp_mmap_locks(env, vio, io);
if (result == 0)
result = vvp_io_one_lock(env, io, ast_flags, mode, start, end);