Skip to content

Commit

Permalink
fix vvd converters
Browse files Browse the repository at this point in the history
  • Loading branch information
takashi310 committed Jul 13, 2021
1 parent 242e49c commit d76cd4a
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 9 deletions.
20 changes: 15 additions & 5 deletions src/main/java/org/janelia/saalfeldlab/n5/spark/N5ToVVDSpark.java
Original file line number Diff line number Diff line change
Expand Up @@ -382,9 +382,11 @@ public static < I extends NativeType< I > & RealType< I >, O extends NativeType<
System.err.println(x);
}

//System.out.println( "dim: " + outputDimensions[0] + " " + outputDimensions[1] + " "+ outputDimensions[2]);
//

List<List<VVDBlockMetadata>> result = sparkContext.parallelize( blockIndexes, Math.min( blockIndexes.size(), MAX_PARTITIONS ) ).map( blockIndex ->
System.out.println( "Number of partitions: " + Math.min( blockIndexes.size(), sparkContext.defaultParallelism()*2 ));

List<List<VVDBlockMetadata>> result = sparkContext.parallelize( blockIndexes, Math.min( blockIndexes.size(), sparkContext.defaultParallelism()*2 ) ).map( blockIndex ->
{
final CellGrid cellGrid = new CellGrid( outputDimensions, outputBlockSize );
final long[] blockGridPosition = new long[ cellGrid.numDimensions() ];
Expand Down Expand Up @@ -799,10 +801,16 @@ else if (metaMap.get(PIXEL_RESOLUTION_ATTRIBUTE_KEY) == Object.class) {
int[] blockSize = parsedArgs.getBlockSize();
if (i == downsamplingFactors.length - 1) {
final int dim = inputDimensions.length;
blockSize = new int[ dim ];
for ( int d = 0; d < dim; ++d )
blockSize[d] = (int)(inputDimensions[d] / adjustedDownsamplingFactor[d] + 0.5);
final int[] newBlockSize = new int[ dim ];
long elemnum = bit_depth / 8;
for ( int d = 0; d < dim; ++d ) {
newBlockSize[d] = (int) (inputDimensions[d] / adjustedDownsamplingFactor[d] + 0.5);
elemnum *= (long)newBlockSize[d];
}
if (elemnum <= Integer.MAX_VALUE)
blockSize = newBlockSize;
}
System.out.println("adjustedBlockSize:" + Arrays.toString(blockSize));

vvdxml.add(downsample(
sparkContext,
Expand Down Expand Up @@ -1217,6 +1225,8 @@ public static final <T extends NativeType<T>> List<VVDBlockMetadata> saveNonEmpt
try (final LockedFileChannel lockedChannel = LockedFileChannel.openForWriting(path)/*LockedFileChannel.openForAppend(path)*/) {
final long file_offset = lockedChannel.getFileChannel().position();
final OutputStream ostream = Channels.newOutputStream(lockedChannel.getFileChannel());
System.out.println("id: " + brickID + " " + dataBlock.getNumElements() + " " + (dataBlock.getNumElements() * 8));
System.out.println(Arrays.toString(dataBlock.getSize()));
DefaultBlockWriter.writeBlock(ostream, attributes, dataBlock);
final long data_size = 0L;//lockedChannel.getFileChannel().position() - file_offset;
final long[] bound_min = {gridPosition[0] * blockSize[0], gridPosition[1] * blockSize[1], gridPosition[2] * blockSize[2]};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,8 @@ public static < T extends NativeType< T > > void createTempN5DatasetFromTiff(
// convert to temporary N5 dataset with block size = 1 in the slice dimension and increased block size in other dimensions
n5TempOutput.createDataset( tempDatasetPath, inputDimensions, tmpBlockSize, inputDataType, compression );
final List< Integer > sliceIndices = IntStream.range( 0, tiffSliceFilepaths.size() ).boxed().collect( Collectors.toList() );
sparkContext.parallelize( sliceIndices, Math.min( sliceIndices.size(), MAX_PARTITIONS ) ).foreach( sliceIndex ->
System.out.println( "Number of partitions: " + Math.min( sliceIndices.size(), sparkContext.defaultParallelism()*2 ));
sparkContext.parallelize( sliceIndices, Math.min( sliceIndices.size(), sparkContext.defaultParallelism()*2 ) ).foreach( sliceIndex ->
{
final ImagePlus imp = tiffReader.openTiff( tiffSliceFilepaths.get( sliceIndex ) );
final RandomAccessibleInterval< T > img = ( RandomAccessibleInterval< T > ) ImagePlusImgs.from( imp );
Expand Down Expand Up @@ -403,10 +404,16 @@ public static void main( final String... args ) throws IOException, CmdLineExcep
int[] blockSize = parsedArgs.getBlockSize();
if (i == downsamplingFactors.length - 1) {
final int dim = inputDimensions.length;
blockSize = new int[ dim ];
for ( int d = 0; d < dim; ++d )
blockSize[d] = (int)(inputDimensions[d] / adjustedDownsamplingFactor[d] + 0.5);
final int[] newBlockSize = new int[ dim ];
long elemnum = bit_depth / 8;
for ( int d = 0; d < dim; ++d ) {
newBlockSize[d] = (int) (inputDimensions[d] / adjustedDownsamplingFactor[d] + 0.5);
elemnum *= (long)newBlockSize[d];
}
if (elemnum <= Integer.MAX_VALUE)
blockSize = newBlockSize;
}
System.out.println("adjustedBlockSize:" + Arrays.toString(blockSize));

vvdxml.add(N5ToVVDSpark.downsample(
sparkContext,
Expand Down

0 comments on commit d76cd4a

Please sign in to comment.