Skip to content

Commit ae6e872

Browse files
committed
chore(script): update BigWarp_N5Viewer script
1 parent 2ad5d82 commit ae6e872

File tree

1 file changed

+54
-84
lines changed

1 file changed

+54
-84
lines changed

scripts/BigWarp_N5Viewer.groovy

+54-84
Original file line numberDiff line numberDiff line change
@@ -11,106 +11,76 @@
1111
* 2020-Jan-06 : Initial script
1212
* 2020-Jan-07 : Add pyramid support
1313
* 2020-Feb-13 : Switch to N5Viewer style loading
14+
* 2024-Mar-07 : Use new BigWarp API
1415
*
1516
* @author John Bogovic
1617
*/
1718

1819
import java.lang.Exception;
19-
import java.util.regex.Pattern;
20+
import java.io.*;
21+
import java.util.*;
2022

21-
import bigwarp.BigWarp;
22-
import bigwarp.BigWarpInit;
23-
import bigwarp.landmarks.LandmarkTableModel;
24-
25-
import bdv.util.*;
26-
import bdv.util.volatiles.*;
23+
import bigwarp.*;
24+
import bdv.cache.SharedQueue;
2725
import bdv.export.*;
26+
import bdv.gui.*;
2827
import bdv.viewer.*;
29-
30-
31-
import net.imglib2.util.*;
32-
33-
import mpicbg.spim.data.sequence.*;
28+
import net.imglib2.*;
29+
import net.imglib2.type.*;
30+
import net.imglib2.type.numeric.*;
3431

3532
import org.janelia.saalfeldlab.n5.*;
36-
import org.janelia.saalfeldlab.n5.imglib2.*;
37-
import org.janelia.saalfeldlab.n5.bdv.*;
38-
import org.janelia.saalfeldlab.n5.bdv.dataaccess.*;
39-
40-
def isPyramid( N5FSReader n5, String group )
41-
{
42-
/*
43-
* return true if there exists at least one dataset under the group
44-
* of the form "s#" where # is a number
45-
*/
46-
def pattern = Pattern.compile( /s\d/ );
47-
48-
numMatching = 0;
49-
subDatasets = n5.list( group );
50-
for( d in subDatasets )
51-
{
52-
numMatching += (d =~ pattern).size()
53-
}
54-
return (numMatching >= 1);
55-
}
56-
57-
58-
def makeSourcesN5Viewer( File n5Base, SharedQueue sharedQueue )
59-
{
60-
try
61-
{
62-
dataAccessFactory = new DataAccessFactory( DataAccessType.FILESYSTEM );
63-
}
64-
catch ( final DataAccessException e )
65-
{
66-
return null;
67-
}
68-
69-
n5Path = n5Base.getAbsolutePath();
70-
final N5Reader n5 = dataAccessFactory.createN5Reader( n5Path );
71-
final N5ExportMetadataReader metadata = N5ExportMetadata.openForReading( n5 );
72-
73-
final int numChannels = metadata.getNumChannels();
33+
import org.janelia.saalfeldlab.n5.ij.*;
34+
35+
36+
def makeSources(
37+
BigWarpData bwData,
38+
File baseDir,
39+
int baseId,
40+
boolean isMoving,
41+
SharedQueue sharedQueue )
42+
throws Exception {
43+
String n5Path = baseDir.getAbsolutePath();
44+
N5Importer.N5ViewerReaderFun n5fun = new N5Importer.N5ViewerReaderFun();
45+
N5Reader n5 = n5fun.apply(n5Path);
46+
String dataset = new N5Importer.N5BasePathFun().apply(n5Path);
7447

75-
final String displayName = metadata.getName() != null ? metadata.getName() : "";
76-
final int numTimepoints = 1;
77-
Prefs.showScaleBar( true );
78-
79-
80-
sources = [];
81-
for ( int c = 0; c < numChannels; ++c )
82-
{
83-
final Source<?> volatileSource = N5MultiscaleSource.getVolatileSource( n5, c, displayName, sharedQueue );
84-
sources.add( volatileSource );
85-
}
86-
return sources as Source[];
48+
Source src = BigWarpInit.loadN5Source(n5, dataset, sharedQueue);
49+
BigWarpInit.add( bwData, BigWarpInit.createSources(bwData, src, baseId, isMoving));
50+
return Collections.singletonList(src);
8751
}
8852

89-
procs = Runtime.getRuntime().availableProcessors() / 2;
90-
if( procs < 1 )
91-
procs = 1;
92-
93-
sharedQueue = new SharedQueue( (int) procs );
53+
int procs = ij.Prefs.getThreads();
54+
procs = ( procs < 1 ) ? 1 : procs;
9455

95-
// build moving and target sources
96-
movingSources = makeSourcesN5Viewer( movingN5Base, sharedQueue );
97-
targetSources = makeSourcesN5Viewer( targetN5Base, sharedQueue );
9856

99-
srcNames = []
100-
movingSources.each{ x -> srcNames.add( x.getName() )};
101-
targetSources.each{ x -> srcNames.add( x.getName() )};
57+
BigWarpData bwData = BigWarpInit.initData();
58+
SharedQueue sharedQueue = new SharedQueue( procs );
10259

103-
bwData = BigWarpInit.createBigWarpData( movingSources, targetSources, srcNames as String[] );
10460

105-
try
106-
{
107-
bw = new BigWarp( bwData, "bigwarp", new ProgressWriterConsole() );
108-
109-
// load the landmark points if they exist
110-
if ( landmarksFile != null )
111-
bw.getLandmarkPanel().getTableModel().load( landmarksFile );
61+
// build moving sourcesSwitch to N5Viewer style loading
62+
try {
63+
makeSources(bwData, movingN5Base, 0, true, sharedQueue);
64+
} catch (Exception e) {
65+
System.err.println("error making moving sources");
66+
e.printStackTrace();
11267
}
113-
catch(Exception e)
114-
{
115-
e.printStackTrace();
68+
69+
// build target sources
70+
try {
71+
makeSources(bwData, targetN5Base, bwData.numMovingSources(), false, sharedQueue);
72+
} catch (Exception e) {
73+
System.err.println("error making target sources");
74+
e.printStackTrace();
11675
}
76+
77+
try {
78+
BigWarpViewerOptions opts = (BigWarpViewerOptions)BigWarpViewerOptions.options().numRenderingThreads(procs);
79+
BigWarp bw = new BigWarp(bwData, opts, new ProgressWriterConsole());
80+
81+
// load the landmark points if they exist
82+
if (landmarksFile != null)
83+
bw.getLandmarkPanel().getTableModel().load(landmarksFile);
84+
} catch (Exception e) {
85+
e.printStackTrace();
86+
}

0 commit comments

Comments
 (0)