diff --git a/.gitignore b/.gitignore
index e6ed443baa23e5fd37e6963a07d0840211c3c62a..fa22ae82739dff0ec1ab1de56f62a43ff0fe01cb 100644
--- a/.gitignore
+++ b/.gitignore
@@ -9,7 +9,8 @@
 # Maven #
 /target/
 
+# IntelliJ
 .idea/
 classes/
-bigdataviewer-server.iml
 src/main/resources/META-INF/MANIFEST.MF
+*.iml
diff --git a/pom.xml b/pom.xml
index 7cadc59703f4e2632faef37b6b323bb68f305935..b12c0abeb9461460a554caaf14725a0434e18d29 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,13 +5,13 @@
 	<parent>
 		<groupId>org.scijava</groupId>
 		<artifactId>pom-scijava</artifactId>
-		<version>26.0.0</version>
+		<version>28.0.0</version>
 		<relativePath />
 	</parent>
 
 	<groupId>sc.fiji</groupId>
 	<artifactId>bigdataviewer-server</artifactId>
-	<version>2.1.1-SNAPSHOT</version>
+	<version>3.0.1-SNAPSHOT</version>
 
 	<name>BigDataServer</name>
 	<description>A web server for BigDataViewer datasets.</description>
@@ -77,6 +77,7 @@
 
 	<properties>
 		<package-name>bdv.server</package-name>
+		<main-class>bdv.server.BigDataServer</main-class>
 		<license.licenseName>gpl_v3</license.licenseName>
 		<license.copyrightOwners>BigDataViewer developers.</license.copyrightOwners>
 
@@ -87,8 +88,10 @@
 		<stringtemplate.version>3.2.1</stringtemplate.version>
 
 		<!-- TODO: Remove these version pins and update the code. -->
-		<bigdataviewer-core.version>3.0.3</bigdataviewer-core.version>
-		<imglib2.version>3.3.0</imglib2.version>
+		<bigdataviewer-core.version>9.0.3</bigdataviewer-core.version>
+		<imglib2.version>5.9.0</imglib2.version>
+		<imglib2-cache.version>1.0.0-beta-13</imglib2-cache.version>
+		<spim_data.version>2.2.4</spim_data.version>
 	</properties>
 
 	<repositories>
@@ -122,5 +125,67 @@
 			<artifactId>DataCompressor</artifactId>
 			<version>1.0-SNAPSHOT</version>
 		</dependency>
-    </dependencies>
+		<dependency>
+			<groupId>org.slf4j</groupId>
+			<artifactId>slf4j-simple</artifactId>
+			<version>1.7.28</version>
+		</dependency>
+	</dependencies>
+
+	<profiles>
+		<profile>
+			<id>fatjar</id>
+			<build>
+				<plugins>
+					<!-- Maven shade for Uber Jar -->
+					<!-- https://maven.apache.org/plugins/maven-shade-plugin/shade-mojo.html -->
+					<!-- https://databricks.gitbooks.io/databricks-spark-knowledge-base/content/troubleshooting/missing_dependencies_in_jar_files.html -->
+					<plugin>
+						<groupId>org.apache.maven.plugins</groupId>
+						<artifactId>maven-shade-plugin</artifactId>
+						<configuration>
+							<!-- Do not minimize for now to speed up packaging. -->
+							<transformers combine.children="append">
+								<transformer implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
+									<resource>META-INF/json/org.scijava.plugin.Plugin</resource>
+								</transformer>
+								<transformer implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
+									<resource>META-INF/json/mpicbg.spim.data.generic.sequence.ImgLoaderIo</resource>
+								</transformer>
+							</transformers>
+							<!--<minimizeJar>true</minimizeJar> -->
+							<filters>
+								<filter>
+									<artifact>*:*</artifact>
+									<excludes>
+										<exclude>META-INF/*.SF</exclude>
+										<exclude>META-INF/*.DSA</exclude>
+										<exclude>META-INF/*.RSA</exclude>
+									</excludes>
+								</filter>
+							</filters>
+							<!-- Additional configuration. -->
+							<relocations>
+								<relocation>
+									<pattern>org.apache.commons.compress</pattern>
+									<shadedPattern>org.janelia.saalfeldlab.org.apache.commons.compress</shadedPattern>
+								</relocation>
+							</relocations>
+						</configuration>
+						<!-- binds by default to package phase -->
+						<executions>
+							<execution>
+								<phase>package</phase>
+								<goals>
+									<goal>shade</goal>
+								</goals>
+							</execution>
+						</executions>
+					</plugin>
+					<!-- Maven shade end -->
+				</plugins>
+			</build>
+		</profile>
+	</profiles>
+
 </project>
diff --git a/src/main/java/bdv/server/BigDataServer.java b/src/main/java/bdv/server/BigDataServer.java
index 12e3e875b0967d60fe43c8757f9dca3a8a1b8e04..f5cd0074dc0cc3278d149fa7bf4387a893eec72c 100644
--- a/src/main/java/bdv/server/BigDataServer.java
+++ b/src/main/java/bdv/server/BigDataServer.java
@@ -58,9 +58,20 @@ import java.util.Optional;
 public class BigDataServer {
     private static final org.eclipse.jetty.util.log.Logger LOG = Log.getLogger(BigDataServer.class);
 
-    //    private static ScalarQuantizer quantizer;
+    public static class ExtendedCompressionOptions extends CompressionOptions {
+        private int compressFromMipmapLevel;
+
+        public int getCompressFromMipmapLevel() {
+            return compressFromMipmapLevel;
+        }
+
+        public void setCompressFromMipmapLevel(final int compressFromMipmapLevel) {
+            this.compressFromMipmapLevel = compressFromMipmapLevel;
+        }
+    }
 
     static Parameters getDefaultParameters() {
+
         final int port = 8080;
         String hostname;
         try {
@@ -69,25 +80,68 @@ public class BigDataServer {
             hostname = "localhost";
         }
         final String thumbnailDirectory = null;
+        final String baseUrl = null;
         final boolean enableManagerContext = false;
         return new Parameters(port,
                               hostname,
                               new HashMap<String, String>(),
                               thumbnailDirectory,
+                              baseUrl,
                               enableManagerContext,
                               new ExtendedCompressionOptions());
     }
 
-    public static class ExtendedCompressionOptions extends CompressionOptions {
-        private int compressFromMipmapLevel;
+    public static void main(final String[] args) throws Exception {
+        System.setProperty("org.eclipse.jetty.util.log.class", "org.eclipse.jetty.util.log.StdErrLog");
 
-        public int getCompressFromMipmapLevel() {
-            return compressFromMipmapLevel;
-        }
+        final Parameters params = processOptions(args, getDefaultParameters());
+        if (params == null)
+            return;
 
-        public void setCompressFromMipmapLevel(final int compressFromMipmapLevel) {
-            this.compressFromMipmapLevel = compressFromMipmapLevel;
+        final String thumbnailsDirectoryName = getThumbnailDirectoryPath(params);
+
+        // Threadpool for multiple connections
+        final Server server = new Server(new QueuedThreadPool(200, 8));
+
+        // ServerConnector configuration
+        final ServerConnector connector = new ServerConnector(server);
+        connector.setHost(params.getHostname());
+        connector.setPort(params.getPort());
+        LOG.info("Set connectors: " + connector);
+        server.setConnectors(new Connector[]{connector});
+        final String baseURL = params.getBaseUrl() != null ? params.getBaseUrl() :
+                "http://" + server.getURI().getHost() + ":" + params.getPort();
+        System.out.println("baseURL = " + baseURL);
+
+        // Handler initialization
+        final HandlerCollection handlers = new HandlerCollection();
+
+        final ContextHandlerCollection datasetHandlers = createHandlers(baseURL,
+                                                                        params.getDatasets(),
+                                                                        thumbnailsDirectoryName,
+                                                                        params.getCompressionParams());
+        handlers.addHandler(datasetHandlers);
+        handlers.addHandler(new JsonDatasetListHandler(server, datasetHandlers));
+
+        Handler handler = handlers;
+        if (params.enableManagerContext()) {
+            // Add Statistics bean to the connector
+            final ConnectorStatistics connectorStats = new ConnectorStatistics();
+            connector.addBean(connectorStats);
+
+            // create StatisticsHandler wrapper and ManagerHandler
+            final StatisticsHandler statHandler = new StatisticsHandler();
+            handlers.addHandler(new ManagerHandler(baseURL, server, connectorStats, statHandler, datasetHandlers, thumbnailsDirectoryName));
+            statHandler.setHandler(handlers);
+            handler = statHandler;
         }
+
+        LOG.info("Set handler: " + handler);
+        server.setHandler(handler);
+        LOG.info("Server Base URL: " + baseURL);
+        LOG.info("BigDataServer starting");
+        server.start();
+        server.join();
     }
 
     /**
@@ -105,19 +159,26 @@ public class BigDataServer {
 
         private final String thumbnailDirectory;
 
-        private final ExtendedCompressionOptions compressionParam;
+        private final String baseUrl;
 
         private final boolean enableManagerContext;
 
-        Parameters(final int port, final String hostname, final Map<String, String> datasetNameToXml,
-                   final String thumbnailDirectory, final boolean enableManagerContext,
-                   final ExtendedCompressionOptions customCompressionParameters) {
+        private final ExtendedCompressionOptions compressionParam;
+
+        Parameters(final int port,
+                   final String hostname,
+                   final Map<String, String> datasetNameToXml,
+                   final String thumbnailDirectory,
+                   final String baseUrl,
+                   final boolean enableManagerContext,
+                   final ExtendedCompressionOptions extendedCompressionOptions) {
             this.port = port;
             this.hostname = hostname;
             this.datasetNameToXml = datasetNameToXml;
             this.thumbnailDirectory = thumbnailDirectory;
+            this.baseUrl = baseUrl;
             this.enableManagerContext = enableManagerContext;
-            this.compressionParam = customCompressionParameters;
+            this.compressionParam = extendedCompressionOptions;
         }
 
         public int getPort() {
@@ -128,6 +189,10 @@ public class BigDataServer {
             return hostname;
         }
 
+        public String getBaseUrl() {
+            return baseUrl;
+        }
+
         public String getThumbnailDirectory() {
             return thumbnailDirectory;
         }
@@ -150,14 +215,15 @@ public class BigDataServer {
         }
     }
 
-
     @SuppressWarnings("static-access")
     static private Parameters processOptions(final String[] args, final Parameters defaultParameters) throws IOException {
+        final String ENABLE_COMPRESSION = "qcmp";
+        final String CompressFromKey = "compressFrom";
+
         // create Options object
         final Options options = new Options();
 
         final String cmdLineSyntax = "BigDataServer [OPTIONS] [NAME XML] ...\n";
-        final String CompressFromKey = "compressFrom";
 
         final String description =
                 "Serves one or more XML/HDF5 datasets for remote access over HTTP.\n" +
@@ -165,7 +231,6 @@ public class BigDataServer {
                         "dataset should be made accessible and XML is the path to the XML file of the dataset.\n" +
                         "If -qcmp option is specified, these options are enabled:\u001b[35m-sq,-vq,-b,-cbc\u001b[0m\n";
 
-
         options.addOption(OptionBuilder
                                   .withDescription("Hostname of the server.\n(default: " + defaultParameters.getHostname() + ")")
                                   .hasArg()
@@ -193,16 +258,14 @@ public class BigDataServer {
                                   .withArgName("DIRECTORY")
                                   .create("t"));
 
-        final String ENABLE_COMPRESSION = "qcmp";
-
-        final Option test = OptionBuilder
-                .withDescription("Enable QCMP compression")
-                .create(ENABLE_COMPRESSION);
-
+        options.addOption(OptionBuilder
+                                  .withDescription("Base URL under which the server will be made visible (e.g., if behind a proxy)")
+                                  .hasArg()
+                                  .withArgName("BASEURL")
+                                  .create("b"));
 
         int optionOrder = 0;
-        options.addOption(new OptionWithOrder(OptionBuilder
-                                                      .withDescription("Enable QCMP compression")
+        options.addOption(new OptionWithOrder(OptionBuilder.withDescription("Enable QCMP compression")
                                                       .create(ENABLE_COMPRESSION), ++optionOrder));
 
 
@@ -228,15 +291,16 @@ public class BigDataServer {
             final String portString = cmd.getOptionValue("p", Integer.toString(defaultParameters.getPort()));
             final int port = Integer.parseInt(portString);
 
-
             // Getting server name option
             final String serverName = cmd.getOptionValue("s", defaultParameters.getHostname());
 
             // Getting thumbnail directory option
             final String thumbnailDirectory = cmd.getOptionValue("t", defaultParameters.getThumbnailDirectory());
 
-            final HashMap<String, String> datasets = new HashMap<String, String>(defaultParameters.getDatasets());
+            // Getting base url option
+            final String baseUrl = cmd.getOptionValue("b", defaultParameters.getBaseUrl());
 
+            final HashMap<String, String> datasets = new HashMap<String, String>(defaultParameters.getDatasets());
 
             final boolean enableQcmpCompression = cmd.hasOption(ENABLE_COMPRESSION);
             final ExtendedCompressionOptions compressionOptions = new ExtendedCompressionOptions();
@@ -306,7 +370,6 @@ public class BigDataServer {
                     enableManagerContext = true;
             }
 
-
             if (cmd.hasOption("d")) {
                 // process the file given with "-d"
                 final String datasetFile = cmd.getOptionValue("d");
@@ -351,6 +414,7 @@ public class BigDataServer {
                                   serverName,
                                   datasets,
                                   thumbnailDirectory,
+                                  baseUrl,
                                   enableManagerContext,
                                   enableQcmpCompression ? compressionOptions : null);
         } catch (final ParseException | IllegalArgumentException e) {
@@ -416,73 +480,23 @@ public class BigDataServer {
     }
 
     private static ContextHandlerCollection createHandlers(final String baseURL,
-                                                           final Parameters params,
-                                                           final String thumbnailsDirectoryName) throws SpimDataException, IOException {
-
+                                                           final Map<String, String> dataSet,
+                                                           final String thumbnailsDirectoryName,
+                                                           final ExtendedCompressionOptions compressionOps) throws SpimDataException,
+            IOException {
         final ContextHandlerCollection handlers = new ContextHandlerCollection();
 
-        final Map<String, String> dataSet = params.getDatasets();
         for (final Entry<String, String> entry : dataSet.entrySet()) {
             final String name = entry.getKey();
             final String xmlpath = entry.getValue();
             final String context = "/" + name;
-            final CellHandler ctx = new CellHandler(baseURL + context + "/", xmlpath, name,
-                                                    thumbnailsDirectoryName,
-                                                    params.getCompressionParams());
-
+            final CellHandler ctx = new CellHandler(baseURL + context + "/", xmlpath,
+                                                    name, thumbnailsDirectoryName,
+                                                    compressionOps);
             ctx.setContextPath(context);
             handlers.addHandler(ctx);
         }
 
         return handlers;
     }
-
-    public static void main(final String[] args) throws Exception {
-        System.setProperty("org.eclipse.jetty.util.log.class", "org.eclipse.jetty.util.log.StdErrLog");
-
-        final Parameters params = processOptions(args, getDefaultParameters());
-        if (params == null)
-            return;
-
-        final String thumbnailsDirectoryName = getThumbnailDirectoryPath(params);
-
-        // Threadpool for multiple connections
-        final Server server = new Server(new QueuedThreadPool(200, 8));
-
-        // ServerConnector configuration
-        final ServerConnector connector = new ServerConnector(server);
-        connector.setHost(params.getHostname());
-        connector.setPort(params.getPort());
-        LOG.info("Set connectors: " + connector);
-        server.setConnectors(new Connector[]{connector});
-        final String baseURL = "http://" + server.getURI().getHost() + ":" + params.getPort();
-
-        // Handler initialization
-        final HandlerCollection handlers = new HandlerCollection();
-
-        final ContextHandlerCollection datasetHandlers = createHandlers(baseURL, params, thumbnailsDirectoryName);
-        handlers.addHandler(datasetHandlers);
-        handlers.addHandler(new JsonDatasetListHandler(server, datasetHandlers));
-
-        Handler handler = handlers;
-        if (params.enableManagerContext()) {
-            // Add Statistics bean to the connector
-            final ConnectorStatistics connectorStats = new ConnectorStatistics();
-            connector.addBean(connectorStats);
-
-            // create StatisticsHandler wrapper and ManagerHandler
-            final StatisticsHandler statHandler = new StatisticsHandler();
-            handlers.addHandler(new ManagerHandler(baseURL, server, connectorStats, statHandler, datasetHandlers, thumbnailsDirectoryName));
-            statHandler.setHandler(handlers);
-            handler = statHandler;
-        }
-
-
-        LOG.info("Set handler: " + handler);
-        server.setHandler(handler);
-        LOG.info("Server Base URL: " + baseURL);
-        LOG.info("BigDataServer starting");
-        server.start();
-        server.join();
-    }
 }
diff --git a/src/main/java/bdv/server/CellHandler.java b/src/main/java/bdv/server/CellHandler.java
index b96500544a6de1399dcde2f396b9c3be8b03d203..927abd9f7ad034a58f5afc2b581bc558dd87ab3e 100644
--- a/src/main/java/bdv/server/CellHandler.java
+++ b/src/main/java/bdv/server/CellHandler.java
@@ -11,12 +11,7 @@ import azgracompress.io.InputData;
 import azgracompress.io.MemoryOutputStream;
 import azgracompress.utilities.Stopwatch;
 import bdv.BigDataViewer;
-import bdv.cache.CacheHints;
-import bdv.cache.LoadingStrategy;
-import bdv.img.cache.VolatileCell;
 import bdv.img.cache.VolatileGlobalCellCache;
-import bdv.img.cache.VolatileGlobalCellCache.Key;
-import bdv.img.cache.VolatileGlobalCellCache.VolatileCellLoader;
 import bdv.img.hdf5.Hdf5ImageLoader;
 import bdv.img.hdf5.Hdf5VolatileShortArrayLoader;
 import bdv.img.remote.AffineTransform3DJsonSerializer;
@@ -28,7 +23,11 @@ import bdv.spimdata.XmlIoSpimDataMinimal;
 import bdv.util.ThumbnailGenerator;
 import com.google.gson.GsonBuilder;
 import mpicbg.spim.data.SpimDataException;
+import net.imglib2.cache.CacheLoader;
+import net.imglib2.cache.LoaderCache;
+import net.imglib2.cache.ref.SoftRefLoaderCache;
 import net.imglib2.img.basictypeaccess.volatiles.array.VolatileShortArray;
+import net.imglib2.img.cell.Cell;
 import net.imglib2.realtransform.AffineTransform3D;
 import org.eclipse.jetty.server.Request;
 import org.eclipse.jetty.server.handler.ContextHandler;
@@ -51,18 +50,71 @@ import java.util.ArrayList;
 import java.util.Comparator;
 import java.util.HashMap;
 import java.util.Stack;
+import java.util.concurrent.ExecutionException;
 
 public class CellHandler extends ContextHandler {
-    private final long transferedDataSize = 0;
-
     private static final org.eclipse.jetty.util.log.Logger LOG = Log.getLogger(CellHandler.class);
 
-    private final int counter = 0;
-    private final VolatileGlobalCellCache cache;
+    /**
+     * Key for a cell identified by timepoint, setup, level, and index
+     * (flattened spatial coordinate).
+     */
+    public static class Key {
+        private final int timepoint;
+
+        private final int setup;
+
+        private final int level;
+
+        private final long index;
+
+        private final String[] parts;
+
+        /**
+         * Create a Key for the specified cell. Note that {@code cellDims} and
+         * {@code cellMin} are not used for {@code hashcode()/equals()}.
+         *
+         * @param timepoint timepoint coordinate of the cell
+         * @param setup     setup coordinate of the cell
+         * @param level     level coordinate of the cell
+         * @param index     index of the cell (flattened spatial coordinate of the
+         *                  cell)
+         */
+        public Key(final int timepoint, final int setup, final int level, final long index, final String[] parts) {
+            this.timepoint = timepoint;
+            this.setup = setup;
+            this.level = level;
+            this.index = index;
+            this.parts = parts;
+
+            int value = Long.hashCode(index);
+            value = 31 * value + level;
+            value = 31 * value + setup;
+            value = 31 * value + timepoint;
+            hashcode = value;
+        }
+
+        @Override
+        public boolean equals(final Object other) {
+            if (this == other)
+                return true;
+            if (!(other instanceof VolatileGlobalCellCache.Key))
+                return false;
+            final Key that = (Key) other;
+            return (this.index == that.index) && (this.timepoint == that.timepoint) && (this.setup == that.setup) && (this.level == that.level);
+        }
+
+        final int hashcode;
+
+        @Override
+        public int hashCode() {
+            return hashcode;
+        }
+    }
 
-    private final Hdf5VolatileShortArrayLoader loader;
+    private final CacheLoader<Key, Cell<?>> loader;
 
-    private final CacheHints cacheHints;
+    private final LoaderCache<Key, Cell<?>> cache;
 
     /**
      * Full path of the dataset xml file this {@link CellHandler} is serving.
@@ -125,19 +177,32 @@ public class CellHandler extends ContextHandler {
         return uncompressedAccumulation;
     }
 
-    public CellHandler(final String baseUrl, final String xmlFilename, final String datasetName, final String thumbnailsDirectory,
-                       final BigDataServer.ExtendedCompressionOptions compressionParams) throws SpimDataException, IOException {
 
+    public CellHandler(final String baseUrl,
+                       final String xmlFilename,
+                       final String datasetName,
+                       final String thumbnailsDirectory,
+                       final BigDataServer.ExtendedCompressionOptions compressionOps) throws SpimDataException, IOException {
         final XmlIoSpimDataMinimal io = new XmlIoSpimDataMinimal();
         final SpimDataMinimal spimData = io.load(xmlFilename);
         final SequenceDescriptionMinimal seq = spimData.getSequenceDescription();
-
         final Hdf5ImageLoader imgLoader = (Hdf5ImageLoader) seq.getImgLoader();
-        this.compressionParams = compressionParams;
+        this.compressionParams = compressionOps;
 
-        cache = imgLoader.getCacheControl();
-        loader = imgLoader.getShortArrayLoader();
-        cacheHints = new CacheHints(LoadingStrategy.BLOCKING, 0, false);
+        final Hdf5VolatileShortArrayLoader cacheArrayLoader = imgLoader.getShortArrayLoader();
+        loader = key -> {
+            final int[] cellDims = new int[]{
+                    Integer.parseInt(key.parts[5]),
+                    Integer.parseInt(key.parts[6]),
+                    Integer.parseInt(key.parts[7])};
+            final long[] cellMin = new long[]{
+                    Long.parseLong(key.parts[8]),
+                    Long.parseLong(key.parts[9]),
+                    Long.parseLong(key.parts[10])};
+            return new Cell<>(cellDims, cellMin, cacheArrayLoader.loadArray(key.timepoint, key.setup, key.level, cellDims, cellMin));
+        };
+
+        cache = new SoftRefLoaderCache<>();
 
         // dataSetURL property is used for providing the XML file by replace
         // SequenceDescription>ImageLoader>baseUrl
@@ -145,14 +210,12 @@ public class CellHandler extends ContextHandler {
         baseFilename = xmlFilename.endsWith(".xml") ? xmlFilename.substring(0, xmlFilename.length() - ".xml".length()) : xmlFilename;
         dataSetURL = baseUrl;
 
-        final int numberOfMipmapLevels = imgLoader.getSetupImgLoader(0).numMipmapLevels();
-
         datasetXmlString = buildRemoteDatasetXML(io, spimData, baseUrl);
         metadataJson = buildMetadataJsonString(imgLoader, seq);
         settingsXmlString = buildSettingsXML(baseFilename);
         thumbnailFilename = createThumbnail(spimData, baseFilename, datasetName, thumbnailsDirectory);
 
-
+        final int numberOfMipmapLevels = imgLoader.getSetupImgLoader(0).numMipmapLevels();
         initializeCompression(numberOfMipmapLevels);
     }
 
@@ -221,31 +284,11 @@ public class CellHandler extends ContextHandler {
         cachedBuffers.push(buffer);
     }
 
-    private short[] getCachedVolatileCellData(final String[] parts, final int[] cellDims, final int level) {
-        final int index = Integer.parseInt(parts[1]);
-        final int timepoint = Integer.parseInt(parts[2]);
-        final int setup = Integer.parseInt(parts[3]);
-        final Key key = new VolatileGlobalCellCache.Key(timepoint, setup, level, index);
-        VolatileCell<?> cell = cache.getLoadingVolatileCache().getIfPresent(key, cacheHints);
-
-        final long[] cellMin = new long[]{
-                Long.parseLong(parts[8]),
-                Long.parseLong(parts[9]),
-                Long.parseLong(parts[10])};
-        if (cell == null) {
-            cell = cache.getLoadingVolatileCache().get(key,
-                                                       cacheHints,
-                                                       new VolatileCellLoader<>(loader, timepoint, setup, level, cellDims, cellMin));
-        }
-        //noinspection unchecked
-        return ((VolatileCell<VolatileShortArray>) cell).getData().getCurrentStorageArray();
-    }
-
     private FlatBufferInputData createInputDataObject(final short[] data, final int[] cellDims) {
         return new FlatBufferInputData(data, new V3i(cellDims[0], cellDims[1], cellDims[2]), InputData.PixelType.Gray16, this.baseFilename);
     }
 
-    private void responseWithShortArray(final HttpServletResponse response, final short[] data) throws IOException {
+    private void respondWithShortArray(final HttpServletResponse response, final short[] data) throws IOException {
         final OutputStream responseStream = response.getOutputStream();
 
         final byte[] buf = new byte[2 * data.length];
@@ -259,6 +302,22 @@ public class CellHandler extends ContextHandler {
         responseStream.close();
     }
 
+    private short[] getCachedVolatileCellData(final String[] parts, final int level) {
+        final int index = Integer.parseInt(parts[1]);
+        final int timepoint = Integer.parseInt(parts[2]);
+        final int setup = Integer.parseInt(parts[3]);
+
+        final Key key = new Key(timepoint, setup, level, index, parts);
+        short[] data;
+        try {
+            final Cell<?> cell = cache.get(key, loader);
+            data = ((VolatileShortArray) cell.getData()).getCurrentStorageArray();
+        } catch (final ExecutionException e) {
+            data = new short[0];
+        }
+        return data;
+    }
+
     @Override
     public void doHandle(final String target,
                          final Request baseRequest,
@@ -281,28 +340,23 @@ public class CellHandler extends ContextHandler {
             respondWithString(baseRequest, response, "application/xml", datasetXmlString);
             return;
         }
+
         final String[] parts = cellString.split("/");
         if (parts[0].equals("cell")) {
-            final int level = Integer.parseInt(parts[4]);
-            final int[] cellDims = new int[]{
-                    Integer.parseInt(parts[5]),
-                    Integer.parseInt(parts[6]),
-                    Integer.parseInt(parts[7])};
-
-            final short[] data = getCachedVolatileCellData(parts, cellDims, level);
 
-            responseWithShortArray(response, data);
+            final int level = Integer.parseInt(parts[4]);
+            final short[] data = getCachedVolatileCellData(parts, level);
+            respondWithShortArray(response, data);
 
             response.setContentType("application/octet-stream");
             response.setStatus(HttpServletResponse.SC_OK);
             baseRequest.setHandled(true);
-
         } else if (parts[0].equals("cell_qcmp")) {
             final Stopwatch stopwatch = Stopwatch.startNew();
             final int mipmapLevel = Integer.parseInt(parts[4]);
             final int[] cellDims = new int[]{Integer.parseInt(parts[5]), Integer.parseInt(parts[6]), Integer.parseInt(parts[7])};
 
-            final short[] data = getCachedVolatileCellData(parts, cellDims, mipmapLevel);
+            final short[] data = getCachedVolatileCellData(parts, mipmapLevel);
             assert (compressors != null && !compressors.isEmpty());
 
             final FlatBufferInputData inputData = createInputDataObject(data, cellDims);
@@ -349,6 +403,23 @@ public class CellHandler extends ContextHandler {
         }
     }
 
+    private void provideThumbnail(final Request baseRequest, final HttpServletResponse response) throws IOException {
+        final Path path = Paths.get(thumbnailFilename);
+        if (Files.exists(path)) {
+            final byte[] imageData = Files.readAllBytes(path);
+            if (imageData != null) {
+                response.setContentType("image/png");
+                response.setContentLength(imageData.length);
+                response.setStatus(HttpServletResponse.SC_OK);
+                baseRequest.setHandled(true);
+
+                final OutputStream os = response.getOutputStream();
+                os.write(imageData);
+                os.close();
+            }
+        }
+    }
+
     private void respondWithCompressionInfo(final Request baseRequest, final HttpServletResponse response) throws IOException {
         if (cachedCodebooks == null || cachedCodebooks.isEmpty()) {
             LOG.info("QCMP initialization request was refused, QCMP compression is not enabled.");
@@ -369,23 +440,6 @@ public class CellHandler extends ContextHandler {
         baseRequest.setHandled(true);
     }
 
-    private void provideThumbnail(final Request baseRequest, final HttpServletResponse response) throws IOException {
-        final Path path = Paths.get(thumbnailFilename);
-        if (Files.exists(path)) {
-            final byte[] imageData = Files.readAllBytes(path);
-            if (imageData != null) {
-                response.setContentType("image/png");
-                response.setContentLength(imageData.length);
-                response.setStatus(HttpServletResponse.SC_OK);
-                baseRequest.setHandled(true);
-
-                final OutputStream os = response.getOutputStream();
-                os.write(imageData);
-                os.close();
-            }
-        }
-    }
-
     public String getXmlFile() {
         return xmlFilename;
     }
@@ -506,4 +560,4 @@ public class CellHandler extends ContextHandler {
         ow.write(string);
         ow.close();
     }
-}
+}
\ No newline at end of file
diff --git a/src/main/java/bdv/util/ThumbnailGenerator.java b/src/main/java/bdv/util/ThumbnailGenerator.java
index 92c0f322bce7e29f9406fd1d063dba49b65e25f5..f8d64a61efc32e9f4be13b7bee888a998077ea9a 100644
--- a/src/main/java/bdv/util/ThumbnailGenerator.java
+++ b/src/main/java/bdv/util/ThumbnailGenerator.java
@@ -114,7 +114,7 @@ public class ThumbnailGenerator
 		final int numGroups = 10;
 		final ArrayList< SourceGroup > groups = new ArrayList< SourceGroup >( numGroups );
 		for ( int i = 0; i < numGroups; ++i )
-			groups.add( new SourceGroup( "", null ) );
+			groups.add( new SourceGroup( "" ) );
 
 		state = new ViewerState( sources, groups, numTimePoints );
 		if ( !sources.isEmpty() )