diff --git a/src/main/java/org/janelia/saalfeldlab/n5/spark/N5ToVVDSpark.java b/src/main/java/org/janelia/saalfeldlab/n5/spark/N5ToVVDSpark.java index 8b2cff5..d914ca1 100644 --- a/src/main/java/org/janelia/saalfeldlab/n5/spark/N5ToVVDSpark.java +++ b/src/main/java/org/janelia/saalfeldlab/n5/spark/N5ToVVDSpark.java @@ -382,9 +382,11 @@ public static < I extends NativeType< I > & RealType< I >, O extends NativeType< System.err.println(x); } - //System.out.println( "dim: " + outputDimensions[0] + " " + outputDimensions[1] + " "+ outputDimensions[2]); + // - List> result = sparkContext.parallelize( blockIndexes, Math.min( blockIndexes.size(), MAX_PARTITIONS ) ).map( blockIndex -> + System.out.println( "Number of partitions: " + Math.min( blockIndexes.size(), sparkContext.defaultParallelism()*2 )); + + List> result = sparkContext.parallelize( blockIndexes, Math.min( blockIndexes.size(), sparkContext.defaultParallelism()*2 ) ).map( blockIndex -> { final CellGrid cellGrid = new CellGrid( outputDimensions, outputBlockSize ); final long[] blockGridPosition = new long[ cellGrid.numDimensions() ]; @@ -799,10 +801,16 @@ else if (metaMap.get(PIXEL_RESOLUTION_ATTRIBUTE_KEY) == Object.class) { int[] blockSize = parsedArgs.getBlockSize(); if (i == downsamplingFactors.length - 1) { final int dim = inputDimensions.length; - blockSize = new int[ dim ]; - for ( int d = 0; d < dim; ++d ) - blockSize[d] = (int)(inputDimensions[d] / adjustedDownsamplingFactor[d] + 0.5); + final int[] newBlockSize = new int[ dim ]; + long elemnum = bit_depth / 8; + for ( int d = 0; d < dim; ++d ) { + newBlockSize[d] = (int) (inputDimensions[d] / adjustedDownsamplingFactor[d] + 0.5); + elemnum *= (long)newBlockSize[d]; + } + if (elemnum <= Integer.MAX_VALUE) + blockSize = newBlockSize; } + System.out.println("adjustedBlockSize:" + Arrays.toString(blockSize)); vvdxml.add(downsample( sparkContext, @@ -1217,6 +1225,8 @@ public static final > List saveNonEmpt try (final LockedFileChannel lockedChannel = LockedFileChannel.openForWriting(path)/*LockedFileChannel.openForAppend(path)*/) { final long file_offset = lockedChannel.getFileChannel().position(); final OutputStream ostream = Channels.newOutputStream(lockedChannel.getFileChannel()); + System.out.println("id: " + brickID + " " + dataBlock.getNumElements() + " " + (dataBlock.getNumElements() * 8)); + System.out.println(Arrays.toString(dataBlock.getSize())); DefaultBlockWriter.writeBlock(ostream, attributes, dataBlock); final long data_size = 0L;//lockedChannel.getFileChannel().position() - file_offset; final long[] bound_min = {gridPosition[0] * blockSize[0], gridPosition[1] * blockSize[1], gridPosition[2] * blockSize[2]}; diff --git a/src/main/java/org/janelia/saalfeldlab/n5/spark/SliceTiffToVVDSpark.java b/src/main/java/org/janelia/saalfeldlab/n5/spark/SliceTiffToVVDSpark.java index 686ee30..377789d 100644 --- a/src/main/java/org/janelia/saalfeldlab/n5/spark/SliceTiffToVVDSpark.java +++ b/src/main/java/org/janelia/saalfeldlab/n5/spark/SliceTiffToVVDSpark.java @@ -250,7 +250,8 @@ public static < T extends NativeType< T > > void createTempN5DatasetFromTiff( // convert to temporary N5 dataset with block size = 1 in the slice dimension and increased block size in other dimensions n5TempOutput.createDataset( tempDatasetPath, inputDimensions, tmpBlockSize, inputDataType, compression ); final List< Integer > sliceIndices = IntStream.range( 0, tiffSliceFilepaths.size() ).boxed().collect( Collectors.toList() ); - sparkContext.parallelize( sliceIndices, Math.min( sliceIndices.size(), MAX_PARTITIONS ) ).foreach( sliceIndex -> + System.out.println( "Number of partitions: " + Math.min( sliceIndices.size(), sparkContext.defaultParallelism()*2 )); + sparkContext.parallelize( sliceIndices, Math.min( sliceIndices.size(), sparkContext.defaultParallelism()*2 ) ).foreach( sliceIndex -> { final ImagePlus imp = tiffReader.openTiff( tiffSliceFilepaths.get( sliceIndex ) ); final RandomAccessibleInterval< T > img = ( RandomAccessibleInterval< T > ) ImagePlusImgs.from( imp ); @@ -403,10 +404,16 @@ public static void main( final String... args ) throws IOException, CmdLineExcep int[] blockSize = parsedArgs.getBlockSize(); if (i == downsamplingFactors.length - 1) { final int dim = inputDimensions.length; - blockSize = new int[ dim ]; - for ( int d = 0; d < dim; ++d ) - blockSize[d] = (int)(inputDimensions[d] / adjustedDownsamplingFactor[d] + 0.5); + final int[] newBlockSize = new int[ dim ]; + long elemnum = bit_depth / 8; + for ( int d = 0; d < dim; ++d ) { + newBlockSize[d] = (int) (inputDimensions[d] / adjustedDownsamplingFactor[d] + 0.5); + elemnum *= (long)newBlockSize[d]; + } + if (elemnum <= Integer.MAX_VALUE) + blockSize = newBlockSize; } + System.out.println("adjustedBlockSize:" + Arrays.toString(blockSize)); vvdxml.add(N5ToVVDSpark.downsample( sparkContext,