Skip to content

Commit

Permalink
Merge pull request #178 from saalfeldlab/apply-transform-tests-refactor
Browse files Browse the repository at this point in the history
Apply transform tests and refactor
  • Loading branch information
bogovicj authored Oct 15, 2024
2 parents d987cee + a412ef1 commit b5eba1b
Show file tree
Hide file tree
Showing 15 changed files with 845 additions and 252 deletions.
296 changes: 280 additions & 16 deletions src/main/java/bdv/ij/ApplyBigwarpPlugin.java

Large diffs are not rendered by default.

7 changes: 0 additions & 7 deletions src/main/java/bdv/ij/BigWarpCommand.java
Original file line number Diff line number Diff line change
Expand Up @@ -42,13 +42,6 @@ public void run( String args )
BigWarpInitDialog.runMacro( macroOptions );
else
{
// if( datasetService != null )
// {
// System.out.println( "dset service exists");
// for( final Dataset d : datasetService.getDatasets() )
// System.out.println( d.getName());
// }

final BigWarpInitDialog dialog = BigWarpInitDialog.createAndShow( datasetService );
// dialog sets recorder to its initial state on cancel or execution
dialog.setInitialRecorderState( initialRecorderState );
Expand Down
1 change: 0 additions & 1 deletion src/main/java/bigwarp/BigWarp.java
Original file line number Diff line number Diff line change
Expand Up @@ -3336,7 +3336,6 @@ else if ( ke.getID() == KeyEvent.KEY_RELEASED )
}
}

// TODO,
// consider this
// https://github.com/kwhat/jnativehook
// for arbitrary modifiers
Expand Down
3 changes: 1 addition & 2 deletions src/main/java/bigwarp/BigWarpBatchTransform.java
Original file line number Diff line number Diff line change
Expand Up @@ -91,8 +91,7 @@ public static void main( String[] args ) throws IOException, FormatException
public static final SpimDataMinimal createSpimData( IFormatReader reader )
{
Hashtable< String, Object > gmeta = reader.getGlobalMetadata();
System.out.println( gmeta ); // header stuff here TODO


// get relevant metadata
double pw = 1.0;
double ph = 1.0;
Expand Down
141 changes: 106 additions & 35 deletions src/main/java/bigwarp/BigWarpInit.java
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
Expand Down Expand Up @@ -129,7 +130,6 @@
import net.imglib2.type.numeric.RealType;
import net.imglib2.type.numeric.integer.UnsignedIntType;
import net.imglib2.type.volatiles.VolatileARGBType;
import net.imglib2.util.Util;
import net.imglib2.view.ExtendedRandomAccessibleInterval;
import net.imglib2.view.IntervalView;
import net.imglib2.view.Views;
Expand Down Expand Up @@ -258,8 +258,6 @@ public static < T extends RealType< T > > void initSourceReal( final Source< T >
data.sourceInfos.put( setupId++, info );
}

data.wrapUp();

if ( names != null )
{
final ArrayList wrappedSources = wrapSourcesAsRenamable( data.sources, names );
Expand Down Expand Up @@ -415,23 +413,30 @@ public static < T > BigWarpData< T > add( BigWarpData bwdata, Source< T > src, i
return bwdata;
}

@SuppressWarnings("unchecked")
public static < T > BigWarpData< T > add( BigWarpData< T > bwdata, LinkedHashMap< Source< T >, SourceInfo > sources, RealTransform transform, Supplier<String> transformUriSupplier )
{
sources.forEach( ( source, info ) -> {
addSourceToListsGenericType( source, info.getId(), bwdata.converterSetups, bwdata.sources );
final SourceAndConverter< T > addedSource = bwdata.sources.get( bwdata.sources.size() - 1 );
info.setSourceAndConverter( addedSource );
for( Entry<Source<T>, SourceInfo> entry : sources.entrySet() ) {

final Source<T> source = entry.getKey();
final SourceInfo info = entry.getValue();

// some initializers set the SourceAndConverter, some do not
if( info.getSourceAndConverter() == null ) {
addSourceToListsGenericType( source, info.getId(), bwdata.converterSetups, bwdata.sources );
final SourceAndConverter< T > addedSource = bwdata.sources.get( bwdata.sources.size() - 1 );
info.setSourceAndConverter( addedSource );
}

if ( transform != null )
{
info.setTransform( transform, transformUriSupplier );
}

bwdata.sourceInfos.put( info.getId(), info );
} );
}
return bwdata;
}

@SuppressWarnings( { "rawtypes" } )
@SuppressWarnings( { "rawtypes", "unchecked" } )
public static < T > LinkedHashMap< Source< T >, SourceInfo > createSources( BigWarpData bwdata, Dataset data, int baseId, final boolean isMoving )
{
boolean first = true;
Expand Down Expand Up @@ -468,9 +473,7 @@ public static < T > LinkedHashMap< Source< T >, SourceInfo > createSources( BigW
final IntervalView<RealType<?>> channelRaw = Views.hyperSlice( data, channelIdx, c );
final IntervalView<RealType<?>> channel = hasZ ? channelRaw : Views.addDimension( channelRaw, 0, 0 );

@SuppressWarnings( "unchecked" )
final RandomAccessibleIntervalSource source = new RandomAccessibleIntervalSource( channel, data.getType(), res, data.getName() );

final SourceInfo info = new SourceInfo( baseId + c, isMoving, data.getName(), () -> data.getSource() );
info.setSerializable( first );
if ( first )
Expand All @@ -483,9 +486,7 @@ public static < T > LinkedHashMap< Source< T >, SourceInfo > createSources( BigW
{
final RandomAccessibleInterval<RealType<?>> img = hasZ ? data : Views.addDimension( data, 0, 0 );

@SuppressWarnings( "unchecked" )
final RandomAccessibleIntervalSource source = new RandomAccessibleIntervalSource( img, data.getType(), res, data.getName() );

final SourceInfo info = new SourceInfo( baseId, isMoving, data.getName(), () -> data.getSource() );
info.setSerializable( true );
sourceInfoMap.put( source, info );
Expand Down Expand Up @@ -523,6 +524,7 @@ public static < T > LinkedHashMap< Source< T >, SourceInfo > createSources( BigW
int setupId = baseId;
for ( final SourceAndConverter sac : tmpSources )
{
@SuppressWarnings("unchecked")
final Source< T > source = sac.getSpimSource();
sourceInfoMap.put( source, new SourceInfo( setupId++, isMoving, source.getName() ) );
}
Expand All @@ -542,6 +544,7 @@ private static String schemeSpecificPartWithoutQuery( URI uri )
return uri.getSchemeSpecificPart().replaceAll( "\\?" + uri.getQuery(), "" ).replaceAll( "//", "" );
}

@SuppressWarnings("unchecked")
public static < T > LinkedHashMap< Source< T >, SourceInfo > createSources( final BigWarpData< T > bwData, String uri, int setupId, boolean isMoving ) throws URISyntaxException, IOException, SpimDataException
{
final SharedQueue sharedQueue = BigWarpData.getSharedQueue();
Expand Down Expand Up @@ -569,8 +572,8 @@ public static < T > LinkedHashMap< Source< T >, SourceInfo > createSources( fina
throw new URISyntaxException( firstScheme, "Unsupported Top Level Protocol" );
}

final Source< T > source = (Source<T>)loadN5Source( n5reader, n5URL.getGroupPath(), sharedQueue );
sourceStateMap.put( source, new SourceInfo( setupId, isMoving, n5URL.getGroupPath() ) );
final SourceInfo info = loadN5SourceInfo(bwData, n5reader, n5URL.getGroupPath(), sharedQueue, setupId, isMoving );
sourceStateMap.put( (Source<T>)info.getSourceAndConverter().getSpimSource(), info );
}
else
{
Expand All @@ -579,14 +582,12 @@ public static < T > LinkedHashMap< Source< T >, SourceInfo > createSources( fina
{
final String containerWithoutN5Scheme = n5URL.getContainerPath().replaceFirst( "^n5://", "" );
final N5Reader n5reader = new N5Factory().openReader( containerWithoutN5Scheme );
final String group = n5URL.getGroupPath();
final Source< T > source = (Source<T>)loadN5Source( n5reader, group, sharedQueue );

if( source != null )
sourceStateMap.put( source, new SourceInfo( setupId, isMoving, group ) );
final SourceInfo info = loadN5SourceInfo(bwData, n5reader, n5URL.getGroupPath(), sharedQueue, setupId, isMoving );
sourceStateMap.put( (Source<T>)info.getSourceAndConverter().getSpimSource(), info );
}
catch ( final Exception ignored )
{}

if ( sourceStateMap.isEmpty() )
{
final String containerPath = n5URL.getContainerPath();
Expand Down Expand Up @@ -662,7 +663,7 @@ public static < T > Map< Source< T >, SourceInfo > createSources( final BigWarpD
return createSources( bwdata, isMoving, setupId, rootPath, dataset, null );
}

private static < T > LinkedHashMap< Source< T >, SourceInfo > createSources( final BigWarpData< T > bwdata, final boolean isMoving, final int setupId, final String rootPath, final String dataset, final AtomicReference< SpimData > returnMovingSpimData )
private static < T > LinkedHashMap< Source< T >, SourceInfo > createSources( final BigWarpData< T > bwdata, final boolean isMoving, final int setupId, final String rootPath, final String dataset, final AtomicReference< SpimData > returnMovingSpimData )
{
final SharedQueue sharedQueue = new SharedQueue(Math.max(1, Runtime.getRuntime().availableProcessors() / 2));
if ( rootPath.endsWith( "xml" ) )
Expand Down Expand Up @@ -706,16 +707,57 @@ private static < T > LinkedHashMap< Source< T >, SourceInfo > createSources( fi
}
else
{
final LinkedHashMap< Source< T >, SourceInfo > map = new LinkedHashMap<>();
final Source< T > source = (Source<T>)loadN5Source( rootPath, dataset, sharedQueue );
final SourceInfo info = new SourceInfo( setupId, isMoving, dataset, () -> rootPath + "$" + dataset );
info.setSerializable( true );
map.put( source, info );
return map;
return makeMap( loadN5SourceInfo(bwdata, rootPath, dataset, sharedQueue, setupId, isMoving));
}
}

@SuppressWarnings("unchecked")
private static <T> LinkedHashMap<Source<T>, SourceInfo> makeMap(final SourceInfo info) {

final LinkedHashMap<Source<T>, SourceInfo> map = new LinkedHashMap<>();
info.setSerializable(true);
map.put((Source<T>)info.getSourceAndConverter().getSpimSource(), info);
return map;
}

public static < T extends NativeType<T> > SourceInfo loadN5SourceInfo( final BigWarpData<?> bwData, final String n5Root, final String n5Dataset, final SharedQueue queue,
final int sourceId, final boolean moving )
{
final N5Reader n5;
try
{
n5 = new N5Factory().openReader( n5Root );
}
catch ( final RuntimeException e ) {
e.printStackTrace();
return null;
}
return loadN5SourceInfo( bwData, n5, n5Dataset, queue, sourceId, moving );
}

@SuppressWarnings("unchecked")
public static < T extends NativeType<T>> SourceInfo loadN5SourceInfo( final BigWarpData<?> bwData, final N5Reader n5, final String n5Dataset, final SharedQueue queue,
final int sourceId, final boolean moving )
{

N5Metadata meta = null;
try
{
final N5DatasetDiscoverer discoverer = new N5DatasetDiscoverer( n5, N5DatasetDiscoverer.fromParsers( PARSERS ), N5DatasetDiscoverer.fromParsers( GROUP_PARSERS ) );
final N5TreeNode node = discoverer.discoverAndParseRecursive("");
meta = node.getDescendant(n5Dataset).map(N5TreeNode::getMetadata).orElse(null);
}
catch ( final IOException e )
{}

final SourceAndConverter<T> sac = (SourceAndConverter<T>)openN5VSourceAndConverter( bwData, n5, meta, queue);
final String uri = n5.getURI().toString() + "$" + n5Dataset;
final SourceInfo info = new SourceInfo(sourceId, moving, sac.getSpimSource().getName(), () -> uri );
info.setSourceAndConverter(sac);
return info;
}

@Deprecated
public static < T extends NativeType<T> > Source< T > loadN5Source( final String n5Root, final String n5Dataset, final SharedQueue queue )
{
final N5Reader n5;
Expand All @@ -731,6 +773,7 @@ public static < T extends NativeType<T> > Source< T > loadN5Source( final String
}

@SuppressWarnings("unchecked")
@Deprecated
public static < T extends NativeType<T>> Source< T > loadN5Source( final N5Reader n5, final String n5Dataset, final SharedQueue queue )
{

Expand Down Expand Up @@ -798,15 +841,43 @@ public static < T extends NativeType<T>, M extends N5Metadata > Source< T > open
return null;
}

public static < T extends NativeType<T> & NumericType<T>> Source< T > openN5V( final N5Reader n5, final MultiscaleMetadata< ? > multiMeta, final SharedQueue sharedQueue )
{
/**
* Use openN5VSourceAndConverter instead
*
* @param <T> source type
* @param n5 the n5 reader
* @param multiMeta the multiscale metadata
* @param sharedQueue the shared queue
* @return a source
*/
@SuppressWarnings("unchecked")
@Deprecated
public static <T extends NativeType<T> & NumericType<T>> Source<T> openN5V( final N5Reader n5, final MultiscaleMetadata<?> multiMeta,
final SharedQueue sharedQueue) {

return (Source<T>)openN5VSourceAndConverter(null, n5, multiMeta, sharedQueue).getSpimSource();
}

public static <T extends NativeType<T> & NumericType<T>> SourceAndConverter<T> openN5VSourceAndConverter(
final BigWarpData<?> bwData,
final N5Reader n5,
final N5Metadata multiMeta,
final SharedQueue sharedQueue) {

final List<SourceAndConverter<T>> sources = new ArrayList<>();
final List<ConverterSetup> converterSetups = new ArrayList<>();
try {
N5Viewer.buildN5Sources(n5, new DataSelection(n5, Collections.singletonList(multiMeta)), sharedQueue, converterSetups, sources, BdvOptions.options());
if( sources.size() > 0 )
return (Source<T>)sources.get(0).getSpimSource();
} catch (final IOException e) { }
N5Viewer.buildN5Sources(n5, new DataSelection(n5, Collections.singletonList(multiMeta)), sharedQueue, converterSetups, sources,
BdvOptions.options());

if (sources.size() > 0) {
if( bwData != null ) {
bwData.sources.add((SourceAndConverter)sources.get(0));
bwData.converterSetups.add(converterSetups.get(0));
}
return sources.get(0);
}
} catch (final IOException e) {}

return null;
}
Expand Down
1 change: 1 addition & 0 deletions src/main/java/bigwarp/BigWarpRealExporter.java
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,7 @@ public RandomAccessibleInterval<?> exportRai(Source<?> src) {
src.getSourceTransform(0, 0, srcXfm);

// in pixel space
@SuppressWarnings("unchecked")
final RealRandomAccessible<T> raiRaw = (RealRandomAccessible<T>)src.getInterpolatedSource(0, 0, interp);

// the transform from world to new pixel coordinates
Expand Down
68 changes: 68 additions & 0 deletions src/main/java/bigwarp/FieldOfView.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
package bigwarp;

import bdv.viewer.Source;
import net.imglib2.FinalRealInterval;
import net.imglib2.RealInterval;
import net.imglib2.realtransform.AffineTransform3D;
import net.imglib2.realtransform.BoundingBoxEstimation;

public class FieldOfView {

public static RealInterval getPhysicaInterval(final Source<?> source) {

final AffineTransform3D tform = new AffineTransform3D();
return BoundingBoxEstimation.corners(tform, source.getSource(0, 0));
}

public static RealInterval fromMinSize(final double[] min, final double[] size) {

final int nd = size.length;
final double[] max = new double[nd];
for (int i = 0; i < nd; i++)
max[i] = min[i] + size[i];

return new FinalRealInterval(min, max);
}

public static RealInterval fromPixelMinSize(final double[] minPixel, final double[] sizePixel, final double[] resolution) {

final int nd = sizePixel.length;
final double[] min = new double[nd];
final double[] max = new double[nd];
for (int i = 0; i < nd; i++) {
min[i] = resolution[i] * min[i];
max[i] = resolution[i] * (min[i] + sizePixel[i]);
}

return new FinalRealInterval(min, max);
}

public static RealInterval computeInterval(
final double[] resolution,
final double[] offset,
final long[] dimensions) {

final int nd = resolution.length;
final double[] max = new double[resolution.length];
for (int i = 0; i < nd; i++) {
max[i] = offset[i] + resolution[i] * dimensions[i];
}
return new FinalRealInterval(offset, max);
}

public static RealInterval expand(
final RealInterval interval,
final double... amounts) {

final int nd = interval.numDimensions();
final double[] min = new double[nd];
final double[] max = new double[nd];

for (int i = 0; i < nd; i++) {
min[i] = interval.realMin(i) - amounts[i];
max[i] = interval.realMax(i) + amounts[i];
}
return new FinalRealInterval(min, max);
}

}
8 changes: 0 additions & 8 deletions src/main/java/bigwarp/landmarks/LandmarkTableModel.java
Original file line number Diff line number Diff line change
Expand Up @@ -1660,14 +1660,6 @@ public void setValueAt(Object value, int row, int col)
return;
}

if (DEBUG)
{
System.out.println("Setting value at " + row + "," + col
+ " to " + value
+ " (an instance of "
+ value.getClass() + ")");
}

if( col == NAMECOLUMN )
{
names.set(row, (String)value );
Expand Down
Loading

0 comments on commit b5eba1b

Please sign in to comment.