Skip to content

Commit

Permalink
feat: add encoding_effort for JPEG XL to image handling
Browse files Browse the repository at this point in the history
  • Loading branch information
william-silversmith committed Sep 17, 2024
1 parent df4518e commit d7f9641
Show file tree
Hide file tree
Showing 3 changed files with 53 additions and 20 deletions.
4 changes: 3 additions & 1 deletion igneous/task_creation/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ def compute_shard_params_for_hashed(

return (int(shard_bits), int(minishard_bits), 0)

def set_encoding(cv, mip, encoding, encoding_level):
def set_encoding(cv, mip, encoding, encoding_level, encoding_effort):
scale = cv.meta.scale(mip)
if encoding is not None:
scale['encoding'] = encoding
Expand All @@ -228,6 +228,8 @@ def set_encoding(cv, mip, encoding, encoding_level):
scale["jpeg_quality"] = encoding_level
elif encoding == "jpegxl":
scale["jpegxl_quality"] = encoding_level
if encoding_effort is not None:
scale["jpegxl_effort"] = int(encoding_effort)
elif encoding == "png":
scale["png_level"] = encoding_level
elif encoding == "fpzip":
Expand Down
34 changes: 25 additions & 9 deletions igneous/task_creation/image.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,7 @@ def create_downsampling_tasks(
bounds_mip:int = 0,
memory_target:int = MEMORY_TARGET,
encoding_level:Optional[int] = None,
encoding_effort:Optional[int] = None,
method:int = DownsampleMethods.AUTO,
):
"""
Expand Down Expand Up @@ -280,7 +281,7 @@ def ds_shape(mip, chunk_size=None, factor=None):
)

for mip_i in range(mip+1, min(mip + num_mips, len(vol.available_mips))):
set_encoding(vol, mip_i, encoding, encoding_level)
set_encoding(vol, mip_i, encoding, encoding_level, encoding_effort)
vol.commit_info()

if not preserve_chunk_size or chunk_size:
Expand Down Expand Up @@ -325,6 +326,8 @@ def on_finish(self):
'chunk_size': (list(chunk_size) if chunk_size else None),
'preserve_chunk_size': preserve_chunk_size,
'encoding': encoding,
'encoding_level': encoding_level,
'encoding_effort': encoding_effort,
'fill_missing': bool(fill_missing),
'delete_black_uploads': bool(delete_black_uploads),
'background_color': background_color,
Expand Down Expand Up @@ -483,7 +486,7 @@ def update_bits():
if preshift_bits + shard_bits + minishard_bits > max_bits:
raise ValueError(f"{preshift_bits} preshift_bits {shard_bits} shard_bits + {minishard_bits} minishard_bits must be <= {max_bits}. Try reducing the number of minishards.")

if encoding in ("jpeg", "png", "kempressed", "fpzip", "zfpc"):
if encoding in ("jpeg", "jpegxl", "jxl", "png", "kempressed", "fpzip", "zfpc"):
data_encoding = "raw"

return {
Expand Down Expand Up @@ -531,7 +534,7 @@ def create_image_shard_transfer_tasks(
src_vol, dst_layer_path,
dest_voxel_offset,
mip, bounds_mip,
encoding, encoding_level,
encoding, encoding_level, encoding_effort,
chunk_size, truncate_scales,
clean_info, cutout, bounds
)
Expand Down Expand Up @@ -618,6 +621,7 @@ def create_image_shard_downsample_tasks(
agglomerate=False, timestamp=None,
factor=(2,2,1), bounds=None, bounds_mip=0,
encoding_level:Optional[int] = None,
encoding_effort:Optional[int] = None,
method=DownsampleMethods.AUTO,
):
"""
Expand All @@ -639,7 +643,7 @@ def create_image_shard_downsample_tasks(
dtype=cv.dtype,
uncompressed_shard_bytesize=int(memory_target),
)
set_encoding(cv, mip + 1, encoding, encoding_level)
set_encoding(cv, mip + 1, encoding, encoding_level, encoding_effort)
cv.commit_info()

shape = image_shard_shape_from_spec(
Expand Down Expand Up @@ -682,6 +686,8 @@ def on_finish(self):
"agglomerate": agglomerate,
"timestamp": timestamp,
"method": method,
"encoding_level": encoding_level,
"encoding_effort": encoding_effort,
},
"by": operator_contact(),
"date": strftime("%Y-%m-%d %H:%M %Z"),
Expand Down Expand Up @@ -747,7 +753,7 @@ def create_transfer_cloudvolume(
src_vol:CloudVolume, dst_cloudpath:str,
dest_voxel_offset:ShapeType,
mip:int, bounds_mip:int,
encoding:str, encoding_level:int,
encoding:str, encoding_level:int, encoding_effort:Optional[int],
chunk_size:ShapeType,
truncate_scales:bool, clean_info:bool,
cutout:bool, bounds:Bbox
Expand Down Expand Up @@ -784,7 +790,7 @@ def create_transfer_cloudvolume(
(dest_voxel_offset + bounds.minpt) * (bounds_resolution / dest_vol.meta.resolution(i))
)

set_encoding(dest_vol, mip, encoding, encoding_level)
set_encoding(dest_vol, mip, encoding, encoding_level, encoding_effort)
if truncate_scales:
dest_vol.info['scales'] = dest_vol.info['scales'][:mip+1]
dest_vol.info['scales'][mip]['chunk_sizes'] = [ chunk_size.tolist() ]
Expand Down Expand Up @@ -823,6 +829,7 @@ def create_transfer_tasks(
cutout:bool = False,
stop_layer:Optional[int] = None,
downsample_method:int = DownsampleMethods.AUTO,
encoding_effort:Optional[int] = None,
) -> Iterator:
"""
Transfer data to a new data layer. You can use this operation
Expand Down Expand Up @@ -860,10 +867,12 @@ def create_transfer_tasks(
image type-specific first stage of compression and the "compress" flag as the data
agnostic second stage compressor. For example, compressed_segmentation and gzip work
well together, but not jpeg and gzip.
encoding_level: Some encoding schemes (png,jpeg,fpzip) offer a simple scalar knob
encoding_level: Some encoding schemes (png,jpeg,jpegxl,fpzip,zfpc) offer a simple scalar knob
to control encoding quality. This number corresponds to png level, jpeg quality,
and fpzip precision. Other schemes might require more complex inputs and may
require info file modifications.
encoding_effort: (jpeg xl only) Sets JPEG XL effort to hit the specified quality target.
Higher values are slower, but more reliable.
factor: (overrides axis) can manually specify what each downsampling round is
supposed to do: e.g. (2,2,1), (2,2,2), etc
fill_missing: Treat missing image tiles as zeroed for both src and dest.
Expand Down Expand Up @@ -919,7 +928,7 @@ def create_transfer_tasks(
src_vol, dest_layer_path,
dest_voxel_offset,
mip, bounds_mip,
encoding, encoding_level,
encoding, encoding_level, encoding_effort,
chunk_size, truncate_scales,
clean_info, cutout, bounds
)
Expand Down Expand Up @@ -1015,6 +1024,7 @@ def on_finish(self):
'factor': (tuple(factor) if factor else None),
'sparse': bool(sparse),
'encoding_level': encoding_level,
'encoding_effort': encoding_effort,
'stop_layer': stop_layer,
'downsample_method': int(downsample_method),
},
Expand Down Expand Up @@ -1064,6 +1074,7 @@ def create_reordering_tasks(
background_color:int = 0,
encoding:Optional[str] = None,
encoding_level:Optional[int] = None,
encoding_effort:Optional[int] = None,
):
src_cv = CloudVolume(src, mip=mip)
zstart, zend = src_cv.bounds.min[2], src_cv.bounds.max[2]
Expand All @@ -1080,7 +1091,11 @@ def create_reordering_tasks(
dest_cv.info['scales'][mip]['chunk_sizes'] = [ chunk_size ]
dest_cv.info = clean_xfer_info(dest_cv.info)
if encoding:
set_encoding(dest_cv, mip, encoding, encoding_level=encoding_level)
set_encoding(
dest_cv, mip, encoding,
encoding_level=encoding_level,
encoding_effort=encoding_effort,
)

dest_cv.commit_info()

Expand Down Expand Up @@ -1131,6 +1146,7 @@ def on_finish(self):
# 'factor': (tuple(factor) if factor else None),
# 'sparse': bool(sparse),
'encoding_level': encoding_level,
'encoding_effort': encoding_effort,
},
'by': operator_contact(),
'date': strftime('%Y-%m-%d %H:%M %Z'),
Expand Down
35 changes: 25 additions & 10 deletions igneous_cli/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,12 +44,15 @@ def normalize_encoding(encoding):
return "crackle"
elif encoding == "cpso":
return "compresso"
elif encoding == "jxl":
return "jpegxl"
elif encoding == "auto":
return None

return encoding

ENCODING_HELP = "Which image encoding to use. Options: [all] raw, png; [images] jpeg; [segmentations] compressed_segmentation (cseg), compresso (cpso), crackle (ckl); [floats] fpzip, kempressed, zfpc"
ENCODING_HELP = "Which image encoding to use. Options: [all] raw, png; [images] jpeg, jpegxl (jxl); [segmentations] compressed_segmentation (cseg), compresso (cpso), crackle (ckl); [floats] fpzip, kempressed, zfpc"
ENCODING_EFFORT = 5

def enqueue_tasks(ctx, queue, tasks):
parallel = int(ctx.obj.get("parallel", 1))
Expand Down Expand Up @@ -220,7 +223,8 @@ def imagegroup():
@click.option('--fill-missing', is_flag=True, default=False, help="Interpret missing image files as background instead of failing.")
@click.option('--num-mips', default=None, type=int, help="Build this many additional pyramid levels. Each increment increases memory requirements per task 4-8x.")
@click.option('--encoding', type=EncodingType(), default="auto", help=ENCODING_HELP, show_default=True)
@click.option('--encoding-level', default=None, help="For some encodings (png level,jpeg quality,fpzip precision) a simple scalar value can adjust the compression efficiency.", show_default=True)
@click.option('--encoding-level', default=None, help="For some encodings (png level, jpeg & jpeg xl quality, fpzip precision) a simple scalar value can adjust the compression efficiency.", show_default=True)
@click.option('--encoding-effort', default=ENCODING_EFFORT, help="(JPEG XL) Set effort (1-10) used by JPEG XL to hit the quality target.", show_default=True)
@click.option('--sparse', is_flag=True, default=False, help="Don't count black pixels in mode or average calculations. For images, eliminates edge ghosting in 2x2x2 downsample. For segmentation, prevents small objects from disappearing at high mip levels.")
@click.option('--chunk-size', type=Tuple3(), default=None, help="Chunk size of new layers. e.g. 128,128,64")
@click.option('--compress', default=None, help="Set the image compression scheme. Options: 'none', 'gzip', 'br'")
Expand All @@ -236,8 +240,8 @@ def imagegroup():
@click.pass_context
def downsample(
ctx, path, queue, mip, fill_missing,
num_mips, encoding, encoding_level, sparse,
chunk_size, compress, volumetric,
num_mips, encoding, encoding_level, encoding_effort,
sparse, chunk_size, compress, volumetric,
delete_bg, bg_color, sharded, memory,
xrange, yrange, zrange, method,
):
Expand Down Expand Up @@ -279,6 +283,7 @@ def downsample(
encoding=encoding, memory_target=memory,
factor=factor, bounds=bounds, bounds_mip=mip,
encoding_level=encoding_level, method=method,
encoding_effort=encoding_effort,
)
else:
tasks = tc.create_downsampling_tasks(
Expand All @@ -293,6 +298,7 @@ def downsample(
memory_target=memory,
encoding_level=encoding_level,
method=method,
encoding_effort=encoding_effort,
)

enqueue_tasks(ctx, queue, tasks)
Expand All @@ -308,7 +314,8 @@ def downsample(
@click.option('--memory', default=3.5e9, type=int, help="Task memory limit in bytes. Task shape will be chosen to fit and maximize downsamples.", show_default=True)
@click.option('--max-mips', default=5, help="Maximum number of additional pyramid levels.", show_default=True)
@click.option('--encoding', type=EncodingType(), default="auto", help=ENCODING_HELP, show_default=True)
@click.option('--encoding-level', default=None, help="For some encodings (png level,jpeg quality,fpzip precision) a simple scalar value can adjust the compression efficiency.", show_default=True)
@click.option('--encoding-level', default=None, help="For some encodings (png level,jpeg & jpegxl quality,fpzip precision) a simple scalar value can adjust the compression efficiency.", show_default=True)
@click.option('--encoding-effort', default=ENCODING_EFFORT, help="(JPEG XL) Set effort (1-10) used by JPEG XL to hit the quality target.", show_default=True)
@click.option('--sparse', is_flag=True, default=False, help="Don't count black pixels in mode or average calculations. For images, eliminates edge ghosting in 2x2x2 downsample. For segmentation, prevents small objects from disappearing at high mip levels.", show_default=True)
@click.option('--shape', type=Tuple3(), default=None, help="(overrides --memory) Set the task shape in voxels. This also determines how many downsamples you get. e.g. 2048,2048,64", show_default=True)
@click.option('--chunk-size', type=Tuple3(), default=None, help="Chunk size of destination layer. e.g. 128,128,64", show_default=True)
Expand All @@ -332,7 +339,8 @@ def xfer(
ctx, src, dest, queue, translate,
downsample, mip, fill_missing,
memory, max_mips, shape, sparse,
encoding, encoding_level, chunk_size, compress,
encoding, encoding_level, encoding_effort,
chunk_size, compress,
volumetric, delete_bg, bg_color, sharded,
dest_voxel_offset, clean_info, no_src_update,
truncate_scales,
Expand Down Expand Up @@ -379,7 +387,7 @@ def xfer(
encoding=encoding, memory_target=memory, clean_info=clean_info,
encoding_level=encoding_level, truncate_scales=truncate_scales,
compress=compress, bounds=bounds, bounds_mip=bounds_mip,
cutout=cutout,
cutout=cutout, encoding_effort=encoding_effort,
)
else:
tasks = tc.create_transfer_tasks(
Expand All @@ -393,7 +401,7 @@ def xfer(
clean_info=clean_info, no_src_update=no_src_update,
encoding_level=encoding_level, truncate_scales=truncate_scales,
bounds=bounds, bounds_mip=bounds_mip, cutout=cutout,
downsample_method=downsample_method,
downsample_method=downsample_method, encoding_effort=encoding_effort,
)

enqueue_tasks(ctx, queue, tasks)
Expand Down Expand Up @@ -425,6 +433,7 @@ def image_roi(src, progress, suppress_faint, dust, z_step, max_axial_len):
@click.option('--fill-missing', is_flag=True, default=False, help="Interpret missing image files as background instead of failing.")
@click.option('--encoding', type=EncodingType(), default="auto", help=ENCODING_HELP, show_default=True)
@click.option('--encoding-level', default=None, help="For some encodings (png level,jpeg quality,fpzip precision) a simple scalar value can adjust the compression efficiency.", show_default=True)
@click.option('--encoding-effort', default=ENCODING_EFFORT, help="(JPEG XL) Set effort (1-10) used by JPEG XL to hit the quality target.", show_default=True)
@click.option('--compress', default="br", help="Set the image compression scheme. Options: 'none', 'gzip', 'br'", show_default=True)
@click.option('--delete-bg', is_flag=True, default=False, help="Issue a delete instead of uploading a background tile. This is helpful on systems that don't like tiny files.")
@click.option('--bg-color', default=0, help="Determines which color is regarded as background.", show_default=True)
Expand All @@ -434,7 +443,7 @@ def image_reorder(
ctx, src, dest,
queue, mip,
fill_missing,
encoding, encoding_level,
encoding, encoding_level, encoding_effort,
compress,
delete_bg, bg_color,
clean_info,
Expand Down Expand Up @@ -471,6 +480,7 @@ def image_reorder(
background_color=bg_color,
encoding=encoding,
encoding_level=encoding_level,
encoding_effort=encoding_effort,
)

enqueue_tasks(ctx, queue, tasks)
Expand Down Expand Up @@ -1689,14 +1699,17 @@ def view(path, browser, port, ng):
@click.option('--offset', type=Tuple3(), default=(0,0,0), help="Voxel offset in x,y, and z.", show_default=True)
@click.option('--seg', is_flag=True, default=False, help="Sets layer type to segmentation (default image).", show_default=True)
@click.option('--encoding', type=EncodingType(), default="raw", help=ENCODING_HELP, show_default=True)
@click.option('--encoding-level', default=None, help="For some encodings (png level, jpeg & jpeg xl quality, fpzip precision) a simple scalar value can adjust the compression efficiency.", show_default=True)
@click.option('--encoding-effort', default=ENCODING_EFFORT, help="(JPEG XL) Set effort (1-10) used by JPEG XL to hit the quality target.", show_default=True)
@click.option('--compress', type=CompressType(), default="br", help="Set the image compression scheme. Options: 'none', 'gzip', 'br'", show_default=True)
@click.option('--chunk-size', type=Tuple3(), default=(128,128,64), help="Chunk size of new layers. e.g. 128,128,64", show_default=True)
@click.option('--h5-dataset', default="main", help="Which h5 dataset to acccess (hdf5 imports only).", show_default=True)
@click.pass_context
def create(
ctx, src, dest,
resolution, offset,
seg, encoding,
seg,
encoding, encoding_level, encoding_effort,
compress, chunk_size,
h5_dataset
):
Expand Down Expand Up @@ -1735,6 +1748,8 @@ def create(
encoding=encoding,
compress=compress,
progress=True,
encoding_level=encoding_level,
encoding_effort=encoding_effort,
)

def normalize_file_ext(filename):
Expand Down

0 comments on commit d7f9641

Please sign in to comment.