From 99760f372bfdb1c19c0f85672fa0111b15d3fbbf Mon Sep 17 00:00:00 2001 From: Ivan Vilata-i-Balaguer Date: Mon, 11 Dec 2023 13:56:21 +0100 Subject: [PATCH] Example: Use custom function for more compact print calls. --- examples/blosc2_optimized_slicing.py | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/examples/blosc2_optimized_slicing.py b/examples/blosc2_optimized_slicing.py index 8836b38..1af4a06 100644 --- a/examples/blosc2_optimized_slicing.py +++ b/examples/blosc2_optimized_slicing.py @@ -49,6 +49,10 @@ file_name = 'b2nd-example.h5' dataset_name = 'data' + +def printl(*args, **kwargs): + print(*args, **kwargs, sep='\n') + # Creating a Blosc2-compressed dataset # ------------------------------------ with h5py.File(file_name, 'w') as f: @@ -71,15 +75,13 @@ # One just uses slicing as usual. dataset = f[dataset_name] # Slices with step == 1 may be optimized. - print("Contiguous slice from dataset (optimized):", dataset[150:, 150:], - sep='\n') - print("Contiguous slice from input array:", data[150:, 150:], sep='\n') + printl("Contiguous slice from dataset (optimized):", dataset[150:, 150:]) + printl("Contiguous slice from input array:", data[150:, 150:]) # Slices with step != 1 (or with datasets of a foreign endianness) # are not optimized, but still work # (via the HDF5 filter pipeline and hdf5plugin). - print("Sparse slice from dataset (filter):", dataset[150::2, 150::2], - sep='\n') - print("Sparse slice from input array:", data[150::2, 150::2], sep='\n') + printl("Sparse slice from dataset (filter):", dataset[150::2, 150::2]) + printl("Sparse slice from input array:", data[150::2, 150::2]) print() # Disabling Blosc2 optimized slicing @@ -92,8 +94,8 @@ b2h5py.unpatch_dataset_class() assert(not b2h5py.is_dataset_class_patched()) dataset = f[dataset_name] - print("Slice from dataset (filter):", dataset[150:, 150:], sep='\n') - print("Slice from input array:", data[150:, 150:], sep='\n') + printl("Slice from dataset (filter):", dataset[150:, 150:]) + printl("Slice from input array:", data[150:, 150:]) b2h5py.patch_dataset_class() # back to normal assert(b2h5py.is_dataset_class_patched()) print() @@ -108,11 +110,10 @@ b2h5py.unpatch_dataset_class() assert(not b2h5py.is_dataset_class_patched()) dataset = f[dataset_name] - print("Slice from dataset (filter):", dataset[150:, 150:], sep='\n') + printl("Slice from dataset (filter):", dataset[150:, 150:]) with b2h5py.patching_dataset_class(): assert(b2h5py.is_dataset_class_patched()) - print("Slice from dataset (optimized):", dataset[150:, 150:], - sep='\n') + printl("Slice from dataset (optimized):", dataset[150:, 150:]) assert(not b2h5py.is_dataset_class_patched()) - print("Slice from input array:", data[150:, 150:], sep='\n') + printl("Slice from input array:", data[150:, 150:]) print()