diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 8a55178f49580e5d2bf15326d5188fd29a77e092..92164cb14d1c5adfbf9ed6ac0eb0d40bb2111823 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -15,8 +15,7 @@ test_enpt:
     - export PYTHONPATH=$PYTHONPATH:/root  # /root <- here are the sicor tables
 
     # update py_tools_ds
-    - pip install 'py_tools_ds>=0.14.2'  # FIXME remove as soon as git runner has been rebuilt (included in geoarray)
-    - pip install 'geoarray>=0.8.9'  # FIXME remove as soon as git runner has been rebuilt (included in geoarray)
+    - pip install 'py_tools_ds>=0.14.8'
 
     # update sicor
     # - conda install -y -q -c conda-forge basemap
@@ -69,10 +68,10 @@ test_enpt_install:
 
     # install not pip-installable deps of geoarray
     - conda install -y -c conda-forge numpy scikit-image matplotlib pandas gdal rasterio pyproj basemap shapely
-    - conda install -y -c conda-forge 'icu=58.*' lxml  # fixes bug for conda-forge gdal build
+    - conda install -y -c conda-forge 'icu=58.*'  # fixes bug for conda-forge gdal build
 
     # install py_tools_ds
-    - pip install 'py_tools_ds>=0.14.2'  # FIXME remove as soon as git runner has been rebuilt (included in geoarray)
+    - pip install 'py_tools_ds>=0.14.8'
 
     # install sicor
     - conda install -y -q -c conda-forge pygrib h5py pytables pyfftw numba llvmlite scikit-learn
diff --git a/AUTHORS.rst b/AUTHORS.rst
index 18e590a94514c51e3bb53e5843e7bd54d2606f19..eebf49209e8956e6e1db8cef2bcc7551fd0c8e85 100644
--- a/AUTHORS.rst
+++ b/AUTHORS.rst
@@ -10,7 +10,7 @@ Karl Segl <segl@gfz-potsdam.de>
 Contributors
 ------------
 
-* André Hollstein
 * Daniel Scheffler
-* Maximilian Brell
-* Theres Küster
+* André Hollstein
+* Stephane Guillaso
+* Niklas Bohn
diff --git a/HISTORY.rst b/HISTORY.rst
index 625d01fe583cf973bd0dfc3c041bae02964702f9..c9d8dab6df63c886a242b104355f301e122c5342 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -6,13 +6,33 @@ History
 -------------------
 
 New features:
+
 * TBD
 
 
+0.7.0 (2019-01-21)
+------------------
+
+New features / improvements:
+
+* Added a lot of software tests
+* Added output writer for EnMAP Level-2 data
+* Added metadata class for EnMAP Level-2 data
+* Revised dead pixel correction (now 40-50 times faster; added spatial interpolation)
+* Added support for dead pixel correction based on 3D dead pixel maps
+* Added orthorectification module
+* Added support for 3D (band-wise) geometry layers
+* Added 3D geolayer generation based on band-wise RPC coefficients.
+* Updated L1B reader to match DLR L1B format
+* Added subsets of official DLR test data
+* Improved DEM processor (added overlap and geographic datum check)
+
+
 0.6.0 (2018-12-13)
 -------------------
 
 New features:
+
 * Updated test datasets (bugfix for wrong corner coordinates)
 * Added dem in map geometry to test data
 * Added spatial_transform module to transform between sensor and map geometry
@@ -25,6 +45,7 @@ New features:
 ------------------
 
 New features:
+
 * Added algorithm to automatically append a second EnMAP image to the main image in order to fill the along-track gap
 * Updated test data (updated metadata header file, now 2 EnMAP subset scenes)
 * Updated metadata reader
@@ -34,6 +55,7 @@ New features:
 0.4.0 (2018-06-01)
 ------------------
 New features:
+
 * Implemented dead pixel corrector
 * Implemented SICOR atmospheric correction
 
@@ -42,6 +64,7 @@ New features:
 ----------
 
 New features:
+
 * TBD
 
 
diff --git a/bin/enpt_cli.py b/bin/enpt_cli.py
index d758fb22e6f23e3e164013376be611308b889cbc..eb890e5e361fafef3c73bf2e4ea49da37a7ca020 100644
--- a/bin/enpt_cli.py
+++ b/bin/enpt_cli.py
@@ -65,6 +65,9 @@ def get_enpt_argparser():
         help='Enable VNIR/SWIR co-registration')
     add('--path_reference_image', type=str, default=None,
         help='Reference image for co-registration.')
+    add('--enable_ac', type=bool, default=True,
+        help="Enable atmospheric correction using SICOR algorithm (default: True). If False, the L2A output contains "
+             "top-of-atmosphere reflectance")
     add('--sicor_cache_dir', type=str, default=None,
         help='SICOR cache directory')
     add('--auto_download_ecmwf', type=bool, default=False,
@@ -79,11 +82,14 @@ def get_enpt_argparser():
         help='Enable dead pixel correction')
     add('--deadpix_P_algorithm', type=str, default="spectral",
         help="Algorithm for dead pixel correction ('spectral' or 'spatial')")
-    add('--deadpix_P_interp', type=str, default="linear",
-        help="Interpolation algorithm to be used during dead pixel correction "
-             "('linear', 'bilinear', 'cubic', 'spline')")
-    add('--ortho_resampAlg', type=int, default=1,
-        help='Ortho-rectification resampling algorithm')
+    add('--deadpix_P_interp_spectral', type=str, default="linear",
+        help="Spectral interpolation algorithm to be used during dead pixel correction "
+             "('linear', 'nearest', 'zero', 'slinear', 'quadratic', 'cubic')")
+    add('--deadpix_P_interp_spatial', type=str, default="linear",
+        help="Spatial interpolation algorithm to be used during dead pixel correction "
+             "('linear', 'nearest', 'zero', 'slinear', 'quadratic', 'cubic')")
+    add('--ortho_resampAlg', type=str, default='bilinear',
+        help="Ortho-rectification resampling algorithm ('nearest', 'bilinear', 'gauss')")
 
     # link parser to run function
     parser.set_defaults(func=run_job)
diff --git a/enpt/execution/controller.py b/enpt/execution/controller.py
index 59184e5afd13750ce0d9d4b5390cc9a4cddd7320..a1001f2e73bb3b53bcf439ec4ff31f55b7352cf4 100644
--- a/enpt/execution/controller.py
+++ b/enpt/execution/controller.py
@@ -10,7 +10,7 @@ import warnings
 
 from ..options.config import EnPTConfig
 from ..io.reader import L1B_Reader
-from ..model.images import EnMAPL1Product_SensorGeo
+from ..model.images import EnMAPL1Product_SensorGeo, EnMAPL2Product_MapGeo  # noqa: F401
 
 
 class EnPT_Controller(object):
@@ -31,26 +31,31 @@ class EnPT_Controller(object):
         self._finalizer = weakref.finalize(self, self._cleanup, self.tempDir,
                                            warn_message="Implicitly cleaning up {!r}".format(self))
 
-        # read the L1B data
-        self.L1_obj = self.read_L1B_data()
+        # defaults
+        self.L1_obj = None  # type: EnMAPL1Product_SensorGeo
+        self.L2_obj = None  # type: EnMAPL2Product_MapGeo
 
-    def extract_zip_archive(self, path_zipfile: str) -> str:
+    def extract_zip_archive(self, path_zipfile: str, subdir: str = '') -> str:
         """Extract the given EnMAP image zip archive and return the L1B image root directory path.
 
         :param path_zipfile:    /path/to/zipfile.zip
+        :param subdir:          subdirectory name to be created within temporary directory
         :return:                /tmp/tmpk2qp0yri/rootdir/
         """
-        with zipfile.ZipFile(path_zipfile, "r") as zf:
-            zf.extractall(self.tempDir)
+        outdir = os.path.join(self.tempDir, subdir) if self.cfg.is_dlr_dataformat else self.tempDir
 
-        outdir = os.path.join(self.tempDir, os.path.basename(path_zipfile).split('.zip')[0])
+        with zipfile.ZipFile(path_zipfile, "r") as zf:
+            zf.extractall(outdir)
 
         if not os.path.isdir(outdir):
             raise NotADirectoryError(outdir)
 
-        return outdir
+        if self.cfg.is_dlr_dataformat:
+            return outdir
+        else:
+            return os.path.join(self.tempDir, os.path.basename(path_zipfile).split('.zip')[0])
 
-    def read_L1B_data(self) -> EnMAPL1Product_SensorGeo:
+    def read_L1B_data(self) -> None:
         """Read the provider L1B data given in config and return an EnMAP image object."""
         path_enmap_image = self.cfg.path_l1b_enmap_image
         path_enmap_image_gapfill = self.cfg.path_l1b_enmap_image_gapfill
@@ -63,18 +68,16 @@ class EnPT_Controller(object):
 
         # extract L1B image archive if needed
         if path_enmap_image.endswith('.zip'):
-            path_enmap_image = self.extract_zip_archive(path_enmap_image)
+            path_enmap_image = self.extract_zip_archive(path_enmap_image, subdir='image_main')
 
         # extract L1B gap fill image archive if needed
         if path_enmap_image_gapfill and path_enmap_image_gapfill.endswith('.zip'):
-            path_enmap_image_gapfill = self.extract_zip_archive(path_enmap_image_gapfill)
+            path_enmap_image_gapfill = self.extract_zip_archive(path_enmap_image_gapfill, subdir='image_gapfill')
 
         # run the reader
         RD = L1B_Reader(config=self.cfg)
-        L1_obj = RD.read_inputdata(root_dir_main=path_enmap_image, root_dir_ext=path_enmap_image_gapfill,
-                                   n_line_ext=self.cfg.n_lines_to_append)
-
-        return L1_obj
+        self.L1_obj = RD.read_inputdata(root_dir_main=path_enmap_image, root_dir_ext=path_enmap_image_gapfill,
+                                        n_line_ext=self.cfg.n_lines_to_append)
 
     def run_toaRad2toaRef(self):
         """Run conversion from TOA radiance to TOA reflectance."""
@@ -95,19 +98,36 @@ class EnPT_Controller(object):
         """Run atmospheric correction only."""
         self.L1_obj.run_AC()
 
+    def run_orthorectification(self):
+        """Run orthorectification to transform L1 sensor geometry image to L2 map geometry."""
+        # run transformation from sensor to map geometry
+        self.L2_obj = EnMAPL2Product_MapGeo.from_L1B_sensorgeo(config=self.cfg, enmap_ImageL1=self.L1_obj)
+
     def write_output(self):
         if self.cfg.output_dir:
-            self.L1_obj.save(self.cfg.output_dir)
+            try:
+                self.L2_obj.save(self.cfg.output_dir)
+            except NotImplementedError:
+                self.L2_obj.logger.warning('Currently L2A EnMAP images cannot be written to disk. '
+                                           'Writing level 1 image instead.')
+                self.L1_obj.save(self.cfg.output_dir)
 
     def run_all_processors(self):
         """Run all processors at once."""
         try:
+            self.read_L1B_data()
             if self.cfg.run_deadpix_P:
                 self.L1_obj.correct_dead_pixels()
             # self.run_toaRad2toaRef()  # this is only needed for geometry processor but AC expects radiance
             self.run_dem_processor()
-            self.run_atmospheric_correction()
+            if self.cfg.enable_ac:
+                self.L1_obj.logger.info('Skipping atmospheric correction as configured and '
+                                        'computing top-of-atmosphere reflectance instead.')
+                self.run_atmospheric_correction()
+            else:
+                self.run_toaRad2toaRef()
             self.run_geometry_processor()
+            self.run_orthorectification()
             self.write_output()
         finally:
             self.cleanup()
diff --git a/enpt/io/reader.py b/enpt/io/reader.py
index b49ccb1487c1b03add5941a976a156b1ce54dee4..aaf2448232368a1e135b492c347d327bd799fdec 100644
--- a/enpt/io/reader.py
+++ b/enpt/io/reader.py
@@ -41,16 +41,12 @@ class L1B_Reader(object):
                        root_dir_main,
                        root_dir_ext: str = None,
                        n_line_ext: int = None,
-                       lon_lat_smpl: tuple = (15, 15),
                        compute_snr: bool = True):
-        # All information are read from data itself now
-        # In case of multiple files, temporary files are created to store them.
         """
         Read L1B EnMAP data. Extend the image by adding a second image [entire, partial]
         :param root_dir_main: Root directory of the main EnMAP Level-1B product
         :param root_dir_ext:  Root directory of the extended EnMAP Level-1B product [optional]
         :param n_line_ext:    Number of lines to be added to the main image [if None, use the whole image]
-        :param lon_lat_smpl:  number of sampling points in lon, lat fields
         :param compute_snr:   whether to compute SNR or not (default: True)
         :return: instance of EnMAPL1Product_SensorGeo
         """
@@ -59,16 +55,24 @@ class L1B_Reader(object):
         self.logger.info("Reading Input Data")
 
         # Get a new instance of the EnMAPL1Product_SensorGeo for the main image
-        l1b_main_obj = EnMAPL1Product_SensorGeo(root_dir_main, config=self.cfg, logger=self.logger,
-                                                lon_lat_smpl=lon_lat_smpl)
+        l1b_main_obj = EnMAPL1Product_SensorGeo(root_dir_main, config=self.cfg, logger=self.logger)
 
         # associate raster attributes with file links (raster data is read lazily / on demand)
         l1b_main_obj.vnir.data = l1b_main_obj.paths.vnir.data
         l1b_main_obj.vnir.mask_clouds = l1b_main_obj.paths.vnir.mask_clouds
-        l1b_main_obj.vnir.deadpixelmap = l1b_main_obj.paths.vnir.deadpixelmap
         l1b_main_obj.swir.data = l1b_main_obj.paths.swir.data
         l1b_main_obj.swir.mask_clouds = l1b_main_obj.paths.swir.mask_clouds
-        l1b_main_obj.swir.deadpixelmap = l1b_main_obj.paths.swir.deadpixelmap
+
+        try:  # FIXME remove as soon as DLR pixelmask is correct
+            l1b_main_obj.vnir.deadpixelmap = l1b_main_obj.paths.vnir.deadpixelmap
+            l1b_main_obj.swir.deadpixelmap = l1b_main_obj.paths.swir.deadpixelmap
+        except ValueError:
+            self.logger.warning("Unexpected dimensions of dead pixel mask: %s. Setting all pixels to 'normal'.")
+            l1b_main_obj.vnir.deadpixelmap = np.zeros(l1b_main_obj.vnir.data.shape)
+            l1b_main_obj.swir.deadpixelmap = np.zeros(l1b_main_obj.swir.data.shape)
+
+        # NOTE: We leave the quicklook out here because merging the quicklook of adjacent scenes might cause a
+        #       brightness jump that can be avoided by recomputing the quicklook after DN/radiance conversion.
 
         # compute radiance
         l1b_main_obj.DN2TOARadiance()
@@ -78,8 +82,26 @@ class L1B_Reader(object):
         # NOTE: We do the following hypothesis:
         #         - The dead pixel map will not change when acquiring 2 adjacent images.
         if root_dir_ext:
-            l1b_ext_obj = EnMAPL1Product_SensorGeo(root_dir_ext, config=self.cfg, lon_lat_smpl=lon_lat_smpl)
-            l1b_main_obj.append_new_image(l1b_ext_obj, n_line_ext)
+            l1b_ext_obj = EnMAPL1Product_SensorGeo(root_dir_ext, config=self.cfg)
+            # TODO simplify redundant code
+            l1b_ext_obj.vnir.data = l1b_ext_obj.paths.vnir.data
+            l1b_ext_obj.vnir.mask_clouds = l1b_ext_obj.paths.vnir.mask_clouds
+            l1b_ext_obj.swir.data = l1b_ext_obj.paths.swir.data
+            l1b_ext_obj.swir.mask_clouds = l1b_ext_obj.paths.swir.mask_clouds
+
+            try:  # FIXME remove as soon as DLR pixelmask is correct
+                l1b_ext_obj.vnir.deadpixelmap = l1b_ext_obj.paths.vnir.deadpixelmap
+                l1b_ext_obj.swir.deadpixelmap = l1b_ext_obj.paths.swir.deadpixelmap
+            except ValueError:
+                self.logger.warning("Unexpected dimensions of dead pixel mask: %s. Setting all pixels to 'normal'.")
+                l1b_ext_obj.vnir.deadpixelmap = np.zeros(l1b_ext_obj.vnir.data.shape)
+                l1b_ext_obj.swir.deadpixelmap = np.zeros(l1b_ext_obj.swir.data.shape)
+
+            l1b_ext_obj.DN2TOARadiance()
+            l1b_main_obj.vnir.append_new_image(l1b_ext_obj.vnir, n_line_ext)
+            l1b_main_obj.swir.append_new_image(l1b_ext_obj.swir, n_line_ext)
+
+            # l1b_main_obj.append_new_image(l1b_ext_obj, n_line_ext)
 
         # compute SNR
         if compute_snr:
@@ -94,6 +116,15 @@ class L1B_Reader(object):
                     l1b_main_obj.swir.detector_meta.calc_snr_from_radiance(rad_data=l1b_main_obj.swir.data,
                                                                            dir_snr_models=tmpDir)
 
+        # compute geolayer if not already done
+        if l1b_main_obj.meta.vnir.lons is None or l1b_main_obj.meta.vnir.lats is None:
+            l1b_main_obj.meta.vnir.lons, l1b_main_obj.meta.vnir.lats = \
+                l1b_main_obj.meta.vnir.compute_geolayer_for_cube()
+
+        if l1b_main_obj.meta.swir.lons is None or l1b_main_obj.meta.swir.lats is None:
+            l1b_main_obj.meta.swir.lons, l1b_main_obj.meta.swir.lats = \
+                l1b_main_obj.meta.swir.compute_geolayer_for_cube()
+
         # Return the l1b_main_obj
         return l1b_main_obj
 
diff --git a/enpt/model/images.py b/enpt/model/images.py
index 0422ae1973bcba8fbca2cbb85ec9ba656aa5942e..0984818c01fe84835c9d7f93c9e4b031902cac8d 100644
--- a/enpt/model/images.py
+++ b/enpt/model/images.py
@@ -3,15 +3,13 @@
 
 import logging
 from types import SimpleNamespace
+from typing import Tuple  # noqa: F401
 import numpy as np
 from os import path, makedirs
-from lxml import etree
 from glob import glob
 import utm
 from scipy.interpolate import interp2d
 
-# Use to generate preview
-import imageio
 # noinspection PyPackageRequirements
 from skimage import exposure  # contained in package requirements as scikit-image
 
@@ -19,9 +17,11 @@ from geoarray import GeoArray, NoDataMask, CloudMask
 
 from ..utils.logging import EnPT_Logger
 from ..model.metadata import EnMAP_Metadata_L1B_SensorGeo, EnMAP_Metadata_L1B_Detector_SensorGeo
+from ..model.metadata import EnMAP_Metadata_L2A_MapGeo  # noqa: F401  # only used for type hint
 from ..options.config import EnPTConfig
 from ..processors.dead_pixel_correction import Dead_Pixel_Corrector
 from ..processors.dem_preprocessing import DEM_Processor
+from ..processors.spatial_transform import compute_mapCoords_within_sensorGeoDims
 
 
 ################
@@ -196,7 +196,7 @@ class _EnMAP_Image(object):
 
             self._mask_clouds_confidence = cnfArr
         else:
-            del self._mask_clouds_confidence
+            del self.mask_clouds_confidence
 
     @mask_clouds_confidence.deleter
     def mask_clouds_confidence(self):
@@ -275,25 +275,51 @@ class _EnMAP_Image(object):
         """
         if self._deadpixelmap is not None:
             self._deadpixelmap.arr = self._deadpixelmap[:].astype(np.bool)  # ensure boolean map
-            return self._deadpixelmap
+
+        return self._deadpixelmap
 
     @deadpixelmap.setter
     def deadpixelmap(self, *geoArr_initArgs):
         if geoArr_initArgs[0] is not None:
             dpm = GeoArray(*geoArr_initArgs)
 
-            if dpm.shape != (self.data.bands, self.data.cols):
+            if dpm.ndim == 3 and dpm.shape != self.data.shape:
+                raise ValueError("The 'deadpixelmap' GeoArray can only be instanced with a 3D array with the same size "
+                                 "like _EnMAP_Image.arr, i.e.: %s "
+                                 "Received %s." % (str(self.data.shape), str(dpm.shape)))
+            elif dpm.ndim == 2 and dpm.shape != (self.data.bands, self.data.cols):
                 raise ValueError("The 'deadpixelmap' GeoArray can only be instanced with an array with the size "
-                                 "'bands x columns' of the GeoArray _EnMAP_Image.arr. Got %s." % str(dpm.shape))
+                                 "'bands x columns' of the GeoArray _EnMAP_Image.arr. "
+                                 "Received %s. Expected %s" % (str(dpm.shape), str((self.data.bands, self.data.cols))))
 
             self._deadpixelmap = dpm
         else:
-            del self._deadpixelmap
+            del self.deadpixelmap
 
     @deadpixelmap.deleter
     def deadpixelmap(self):
         self._deadpixelmap = None
 
+    def generate_quicklook(self, bands2use: Tuple[int, int, int]) -> GeoArray:
+        """
+        Generate image quicklook and save into a file
+
+        :param bands2use:   (red, green, blue) band indices of self.data to be used for quicklook image, e.g., (3, 2, 1)
+        :return: GeoArray
+        """
+        p2 = np.percentile(self.data[:, :, bands2use[0]], 2)
+        p98 = np.percentile(self.data[:, :, bands2use[0]], 98)
+        red_rescaled = exposure.rescale_intensity(self.data[:, :, bands2use[0]], (p2, p98))
+        p2 = np.percentile(self.data[:, :, bands2use[1]], 2)
+        p98 = np.percentile(self.data[:, :, bands2use[1]], 98)
+        green_rescaled = exposure.rescale_intensity(self.data[:, :, bands2use[1]], (p2, p98))
+        p2 = np.percentile(self.data[:, :, bands2use[2]], 2)
+        p98 = np.percentile(self.data[:, :, bands2use[2]], 98)
+        blue_rescaled = exposure.rescale_intensity(self.data[:, :, bands2use[2]], (p2, p98))
+        pix = np.dstack((red_rescaled, green_rescaled, blue_rescaled))
+        pix = np.uint8(pix * 255)
+
+        return GeoArray(pix)
 
 #######################################
 # EnPT EnMAP objects in sensor geometry
@@ -341,61 +367,178 @@ class EnMAP_Detector_SensorGeo(_EnMAP_Image):
     def get_paths(self):
         """
         Get all file paths associated with the current instance of EnMAP_Detector_SensorGeo
-        These information are reading from the detector_meta
+        These information are read from the detector_meta.
         :return: paths as SimpleNamespace
         """
         paths = SimpleNamespace()
         paths.root_dir = self._root_dir
-        paths.metaxml = glob(path.join(self._root_dir, "*_header.xml"))[0]
         paths.data = path.join(self._root_dir, self.detector_meta.data_filename)
-        paths.mask_clouds = path.join(self._root_dir, self.detector_meta.cloud_mask_filename)
-        paths.deadpixelmap = path.join(self._root_dir, self.detector_meta.dead_pixel_filename)
+
+        paths.mask_clouds = path.join(self._root_dir, self.detector_meta.cloud_mask_filename) \
+            if self.detector_meta.cloud_mask_filename else None
+        paths.deadpixelmap = path.join(self._root_dir, self.detector_meta.dead_pixel_filename) \
+            if self.detector_meta.dead_pixel_filename else None
+
         paths.quicklook = path.join(self._root_dir, self.detector_meta.quicklook_filename)
+
         return paths
 
     def correct_dead_pixels(self):
         """Correct dead pixels with respect to the dead pixel mask."""
-        self.logger.info("Correcting dead pixels of %s detector..." % self.detector_name)
+        algo = self.cfg.deadpix_P_algorithm
+        method_spectral, method_spatial = self.cfg.deadpix_P_interp_spectral, self.cfg.deadpix_P_interp_spatial
+        self.logger.info("Correcting dead pixels of %s detector...\n"
+                         "Used algorithm / interpolation: %s / %s"
+                         % (self.detector_name, algo, method_spectral if algo == 'spectral' else method_spatial))
 
         self.data = \
-            Dead_Pixel_Corrector(algorithm=self.cfg.deadpix_P_algorithm,
-                                 interp=self.cfg.deadpix_P_interp,
+            Dead_Pixel_Corrector(algorithm=algo,
+                                 interp_spectral=method_spectral,
+                                 interp_spatial=method_spatial,
                                  logger=self.logger)\
-            .correct(self.data, self.deadpixelmap, progress=False if self.cfg.disable_progress_bars else True)
+            .correct(self.data, self.deadpixelmap)
 
     def get_preprocessed_dem(self):
         if self.cfg.path_dem:
             self.logger.info('Pre-processing DEM for %s...' % self.detector_name)
-            DP = DEM_Processor(self.cfg.path_dem, CPUs=self.cfg.CPUs)
-            DP.fill_gaps()
+            DP = DEM_Processor(self.cfg.path_dem, enmapIm_cornerCoords=tuple(zip(self.detector_meta.lon_UL_UR_LL_LR,
+                                                                                 self.detector_meta.lat_UL_UR_LL_LR)),
+                               CPUs=self.cfg.CPUs)
+            DP.fill_gaps()  # FIXME this will also be needed at other places
 
             R, C = self.data.shape[:2]
             if DP.dem.is_map_geo:
-                # FIXME replace linear interpolation by native geolayers
                 lons = self.detector_meta.lons
                 lats = self.detector_meta.lats
 
-                msg = 'Unable to use the full 3D information of geolayers for transforming the DEM '\
-                      'to sensor geometry. Using first band of %s array.'
-                if self.detector_meta.lons.ndim > 2:
-                    self.logger.warning(msg % 'longitude')
+                if not (lons.ndim == 2 and lats.ndim == 2) and not (lons.ndim == 3 and lats.ndim == 3):
+                    raise ValueError((lons.ndim, lats.ndim), 'Geolayer must be either 2- or 3-dimensional.')
+
+                msg_bandinfo = ''
+                if lons.ndim == 3:
                     lons = lons[:, :, 0]
-                if self.detector_meta.lats.ndim > 2:
-                    self.logger.warning(msg % 'latitude')
                     lats = lats[:, :, 0]
-
-                if lons.shape != self.data.shape:
-                    lons = self.detector_meta.interpolate_corners(*self.detector_meta.lon_UL_UR_LL_LR, nx=C, ny=R)
-                if lats.shape != self.data.shape:
-                    lats = self.detector_meta.interpolate_corners(*self.detector_meta.lat_UL_UR_LL_LR, nx=C, ny=R)
-
-                self.logger.info(('Transforming %s DEM to sensor geometry...' % self.detector_name))
+                    msg_bandinfo = ' (using first band of %s geolayer)' % self.detector_name
+                else:
+                    # 2D geolayer
+                    # FIXME replace linear interpolation by native geolayers
+                    if lons.shape != self.data.shape:
+                        lons = self.detector_meta.interpolate_corners(*self.detector_meta.lon_UL_UR_LL_LR, nx=C, ny=R)
+                    if lats.shape != self.data.shape:
+                        lats = self.detector_meta.interpolate_corners(*self.detector_meta.lat_UL_UR_LL_LR, nx=C, ny=R)
+
+                self.logger.info(('Transforming DEM to %s sensor geometry%s...' % (self.detector_name, msg_bandinfo)))
                 self.dem = DP.to_sensor_geometry(lons=lons, lats=lats)
             else:
                 self.dem = DP.dem
 
         return self.dem
 
+    def append_new_image(self, img2: 'EnMAP_Detector_SensorGeo', n_lines: int = None) -> None:
+        # TODO convert method to function?
+        """
+        Check if a second image could pass with the first image.
+        In this version we assume that the image will be add below
+        If it is the case, the method will create temporary files that will be used in the following.
+
+        :param img2:
+        :param n_lines: number of line to be added
+        :return: None
+        """
+        if self.cfg.is_dlr_dataformat:
+            basename_img1 = self.detector_meta.data_filename.split('-SPECTRAL_IMAGE')[0] + '::%s' % self.detector_name
+            basename_img2 = img2.detector_meta.data_filename.split('-SPECTRAL_IMAGE')[0] + '::%s' % img2.detector_name
+        else:
+            basename_img1 = path.basename(self._root_dir)
+            basename_img2 = path.basename(img2._root_dir)
+
+        self.logger.info("Check new image for %s: %s " % (self.detector_name, basename_img2))
+
+        distance_min = 27.0
+        distance_max = 34.0
+
+        # check bottom left
+        x1, y1, _, _ = utm.from_latlon(self.detector_meta.lat_UL_UR_LL_LR[2], self.detector_meta.lon_UL_UR_LL_LR[2])
+        x2, y2, _, _ = utm.from_latlon(img2.detector_meta.lat_UL_UR_LL_LR[0], img2.detector_meta.lon_UL_UR_LL_LR[0])
+        distance_left = np.sqrt((x1 - x2) ** 2 + (y1 - y2) ** 2)
+
+        # check bottom right
+        x1, y1, _, _ = utm.from_latlon(self.detector_meta.lat_UL_UR_LL_LR[3], self.detector_meta.lon_UL_UR_LL_LR[3])
+        x2, y2, _, _ = utm.from_latlon(img2.detector_meta.lat_UL_UR_LL_LR[1], img2.detector_meta.lon_UL_UR_LL_LR[1])
+        distance_right = np.sqrt((x1 - x2) ** 2 + (y1 - y2) ** 2)
+
+        if distance_min < distance_left < distance_max and distance_min < distance_right < distance_max:
+            self.logger.info("Append new image to %s: %s" % (self.detector_name, basename_img2))
+        else:
+            self.logger.warning("%s and %s don't fit to be appended." % (basename_img1, basename_img2))
+            return
+
+        # set new number of line
+        n_lines = n_lines or img2.detector_meta.nrows
+
+        if n_lines > img2.detector_meta.nrows:
+            self.logger.warning("n_lines (%s) exceeds the total number of line of second image" % n_lines)
+            self.logger.warning("Set to the image number of line")
+            n_lines = img2.detector_meta.nrows
+
+        if n_lines < 50:  # TODO: determine these values
+            self.logger.warning("A minimum of 50 lines is required, only %s were selected" % n_lines)
+            self.logger.warning("Set the number of line to 50")
+            n_lines = 50
+
+        self.detector_meta.nrows += n_lines
+
+        # Create new corner coordinate
+        if self.cfg.is_dlr_dataformat:
+            enmapIm_cornerCoords = tuple(zip(img2.detector_meta.lon_UL_UR_LL_LR,
+                                             img2.detector_meta.lat_UL_UR_LL_LR))
+            dem_validated = DEM_Processor(img2.cfg.path_dem,
+                                          enmapIm_cornerCoords=enmapIm_cornerCoords).dem
+            LL, LR = compute_mapCoords_within_sensorGeoDims(
+                rpc_coeffs=list(img2.detector_meta.rpc_coeffs.values())[0],  # RPC coeffs of first band of the detector
+                dem=dem_validated,
+                enmapIm_cornerCoords=enmapIm_cornerCoords,
+                enmapIm_dims_sensorgeo=(img2.detector_meta.nrows, img2.detector_meta.ncols),
+                sensorgeoCoords_YX=[(n_lines - 1, 0),  # LL
+                                    (n_lines - 1, img2.detector_meta.ncols - 1)]  # LR
+            )
+
+            self.detector_meta.lon_UL_UR_LL_LR[2], self.detector_meta.lat_UL_UR_LL_LR[2] = LL
+            self.detector_meta.lon_UL_UR_LL_LR[3], self.detector_meta.lat_UL_UR_LL_LR[3] = LR
+        else:
+            # lats
+            ff = interp2d(x=[0, 1],
+                          y=[0, 1],
+                          z=[[img2.detector_meta.lat_UL_UR_LL_LR[0], img2.detector_meta.lat_UL_UR_LL_LR[1]],
+                             [img2.detector_meta.lat_UL_UR_LL_LR[2], img2.detector_meta.lat_UL_UR_LL_LR[3]]],
+                          kind='linear')
+            self.detector_meta.lat_UL_UR_LL_LR[2] = np.array(ff(0, int(n_lines / img2.detector_meta.nrows)))[0]
+            self.detector_meta.lat_UL_UR_LL_LR[3] = np.array(ff(1, int(n_lines / img2.detector_meta.nrows)))[0]
+            self.detector_meta.lats = self.detector_meta.interpolate_corners(*self.detector_meta.lat_UL_UR_LL_LR,
+                                                                             self.detector_meta.ncols,
+                                                                             self.detector_meta.nrows)
+            # lons
+            ff = interp2d(x=[0, 1],
+                          y=[0, 1],
+                          z=[[img2.detector_meta.lon_UL_UR_LL_LR[0], img2.detector_meta.lon_UL_UR_LL_LR[1]],
+                             [img2.detector_meta.lon_UL_UR_LL_LR[2], img2.detector_meta.lon_UL_UR_LL_LR[3]]],
+                          kind='linear')
+            self.detector_meta.lon_UL_UR_LL_LR[2] = np.array(ff(0, int(n_lines / img2.detector_meta.nrows)))[0]
+            self.detector_meta.lon_UL_UR_LL_LR[3] = np.array(ff(1, int(n_lines / img2.detector_meta.nrows)))[0]
+            self.detector_meta.lons = self.detector_meta.interpolate_corners(*self.detector_meta.lon_UL_UR_LL_LR,
+                                                                             self.detector_meta.ncols,
+                                                                             self.detector_meta.nrows)
+
+        # append the raster data
+        self.data = np.append(self.data, img2.data[0:n_lines, :, :], axis=0)
+        self.mask_clouds = np.append(self.mask_clouds, img2.mask_clouds[0:n_lines, :], axis=0)
+        if self.cfg.is_dlr_dataformat:
+            self.deadpixelmap = np.append(self.deadpixelmap, img2.deadpixelmap[0:n_lines, :], axis=0)
+        # TODO append remaining raster layers - additional cloud masks, ...
+
+        # NOTE: We leave the quicklook out here because merging the quicklook of adjacent scenes might cause a
+        #       brightness jump that can be avoided by recomputing the quicklook after DN/radiance conversion.
+
     def DN2TOARadiance(self):
         """Convert DNs to TOA radiance.
 
@@ -406,9 +549,32 @@ class EnMAP_Detector_SensorGeo(_EnMAP_Image):
         """
         # TODO move to processors.radiometric_transform?
         if self.detector_meta.unitcode == 'DN':
-            self.logger.info('Converting DN values to radiance for %s detector...' % self.detector_name)
-            self.data = (self.detector_meta.l_min + (self.detector_meta.l_max - self.detector_meta.l_min) /
-                         (2 ** 16 - 1) * self.data[:])
+            self.logger.info('Converting DN values to radiance [mW/m^2/sr/nm] for %s detector...' % self.detector_name)
+
+            if self.detector_meta.l_min is not None and self.detector_meta.l_max is not None:
+                # Lλ = (LMINλ + ((LMAXλ - LMINλ)/(QCALMAX-QCALMIN)) * (QCAL-QCALMIN))
+                # FIXME this asserts LMIN and LMAX in mW m^-2 sr^-1 nm^-1
+
+                QCALMIN = 1
+                QCALMAX = 2 ** 16  # 65535 (16-bit maximum value)
+                LMIN = self.detector_meta.l_min
+                LMAX = self.detector_meta.l_max
+                QCAL = self.data[:]
+
+                self.data = ((LMAX - LMIN)/(QCALMAX - QCALMIN)) * (QCAL - QCALMIN) + LMIN
+
+            elif self.detector_meta.gains is not None and self.detector_meta.offsets is not None:
+                # Lλ = QCAL / GAIN + OFFSET
+                # FIXME this asserts LMIN and LMAX in mW/cm2/sr/um
+                # NOTE: - DLR provides gains between 2000 and 10000, so we have to DEVIDE by gains
+                #       - DLR gains / offsets are provided in mW/cm2/sr/um, so we have to multiply by 10 to get
+                #         mW m^-2 sr^-1 nm^-1 as needed later
+                self.data = 10 * (self.data[:] / self.detector_meta.gains + self.detector_meta.offsets)
+
+            else:
+                raise ValueError("Neighter 'l_min'/'l_max' nor 'gains'/'offsets' "
+                                 "are available for radiance computation.")
+
             self.detector_meta.unit = "mW m^-2 sr^-1 nm^-1"
             self.detector_meta.unitcode = "TOARad"
         else:
@@ -417,25 +583,6 @@ class EnMAP_Detector_SensorGeo(_EnMAP_Image):
                     code=self.detector_meta.unitcode)
             )
 
-    def generate_quicklook(self, filename):
-        """
-        Generate image quicklook and save into a file
-        :param filename: file path to store the image
-        :return: None
-        """
-        p2 = np.percentile(self.data[:, :, self.detector_meta.preview_bands[0]], 2)
-        p98 = np.percentile(self.data[:, :, self.detector_meta.preview_bands[0]], 98)
-        red_rescaled = exposure.rescale_intensity(self.data[:, :, self.detector_meta.preview_bands[0]], (p2, p98))
-        p2 = np.percentile(self.data[:, :, self.detector_meta.preview_bands[1]], 2)
-        p98 = np.percentile(self.data[:, :, self.detector_meta.preview_bands[1]], 98)
-        green_rescaled = exposure.rescale_intensity(self.data[:, :, self.detector_meta.preview_bands[1]], (p2, p98))
-        p2 = np.percentile(self.data[:, :, self.detector_meta.preview_bands[2]], 2)
-        p98 = np.percentile(self.data[:, :, self.detector_meta.preview_bands[2]], 98)
-        blue_rescaled = exposure.rescale_intensity(self.data[:, :, self.detector_meta.preview_bands[2]], (p2, p98))
-        pix = np.dstack((red_rescaled, green_rescaled, blue_rescaled))
-        pix = np.uint8(pix * 255)
-        imageio.imwrite(filename, pix)
-
 
 class EnMAPL1Product_SensorGeo(object):
     """Class for EnPT EnMAP object in sensor geometry.
@@ -456,10 +603,11 @@ class EnMAPL1Product_SensorGeo(object):
 
     """
 
-    def __init__(self, root_dir: str, config: EnPTConfig, logger=None, lon_lat_smpl=None):
+    def __init__(self, root_dir: str, config: EnPTConfig, logger=None):
         """Get instance of EnPT EnMAP object in sensor geometry.
 
         :param root_dir:    Root directory of EnMAP Level-1B product
+        :param config:      EnPT configuration object
         :param logger:      None or logging instance to be appended to EnMAPL1Product_SensorGeo instance
                             (If None, a default EnPT_Logger is used).
         """
@@ -472,30 +620,35 @@ class EnMAPL1Product_SensorGeo(object):
             self.logger = logger
 
         # Read metadata here in order to get all information needed by to get paths.
-        self.meta = EnMAP_Metadata_L1B_SensorGeo(glob(path.join(root_dir, "*_header.xml"))[0],
-                                                 config=self.cfg, logger=self.logger)
-        self.meta.read_metadata(lon_lat_smpl)
+        if self.cfg.is_dlr_dataformat:
+            self.meta = EnMAP_Metadata_L1B_SensorGeo(glob(path.join(root_dir, "*METADATA.XML"))[0],
+                                                     config=self.cfg, logger=self.logger)
+        else:
+            self.meta = EnMAP_Metadata_L1B_SensorGeo(glob(path.join(root_dir, "*_header.xml"))[0],
+                                                     config=self.cfg, logger=self.logger)
+        self.meta.read_metadata()
 
         # define VNIR and SWIR detector
         self.vnir = EnMAP_Detector_SensorGeo('VNIR', root_dir, config=self.cfg, logger=self.logger, meta=self.meta.vnir)
         self.swir = EnMAP_Detector_SensorGeo('SWIR', root_dir, config=self.cfg, logger=self.logger, meta=self.meta.swir)
 
         # Get the paths according information delivered in the metadata
-        self.paths = self.get_paths(root_dir)
+        self.paths = self.get_paths()
 
         self.detector_attrNames = ['vnir', 'swir']
 
-    def get_paths(self, root_dir):
+    def get_paths(self):
         """
         Get all file paths associated with the current instance of EnMAPL1Product_SensorGeo
-        :param root_dir: directory where the data are located
+
         :return: paths.SimpleNamespace()
         """
         paths = SimpleNamespace()
         paths.vnir = self.vnir.get_paths()
         paths.swir = self.swir.get_paths()
-        paths.root_dir = root_dir
-        paths.metaxml = glob(path.join(root_dir, "*_header.xml"))[0]
+        paths.root_dir = self.meta.rootdir
+        paths.metaxml = self.meta.path_xml
+
         return paths
 
     @property
@@ -605,159 +758,51 @@ class EnMAPL1Product_SensorGeo(object):
         AC = AtmosphericCorrector(config=self.cfg)
         AC.run_ac(self)
 
-    # Define a new save to take into account the fact that 2 images might be appended
-    # Here we save the radiance and not DN (otherwise there will be a problem with the concatened images)
     def save(self, outdir: str, suffix="") -> str:
-        """
-        Save the product to disk using almost the same input format
+        """Save the product to disk using almost the same input format.
+
+        NOTE: Radiance is saved instead of DNs to avoid a radiometric jump between concatenated images.
+
         :param outdir: path to the output directory
         :param suffix: suffix to be appended to the output filename (???)
         :return: root path (root directory) where products were written
         """
-        product_dir = path.join(
-            path.abspath(outdir), "{name}{suffix}".format(
-                name=[ff for ff in self.paths.root_dir.split(path.sep) if ff != ''][-1],
-                suffix=suffix)
-        )
+        product_dir = path.join(path.abspath(outdir),
+                                "{name}{suffix}".format(name=self.meta.scene_basename, suffix=suffix))
+
         self.logger.info("Write product to: %s" % product_dir)
         makedirs(product_dir, exist_ok=True)
 
-        # We can hardcode the detectors (?)
         # write the VNIR
         self.vnir.data.save(product_dir + path.sep + self.meta.vnir.data_filename, fmt="ENVI")
         self.vnir.mask_clouds.save(product_dir + path.sep + self.meta.vnir.cloud_mask_filename, fmt="GTiff")
-        self.vnir.deadpixelmap.save(product_dir + path.sep + self.meta.vnir.dead_pixel_filename, fmt="GTiff")
-        self.vnir.generate_quicklook(product_dir + path.sep + self.meta.vnir.quicklook_filename)
+        if self.vnir.deadpixelmap is not None:
+            self.vnir.deadpixelmap.save(product_dir + path.sep + self.meta.vnir.dead_pixel_filename, fmt="GTiff")
+        else:
+            self.logger.warning('Could not save VNIR dead pixel map because there is no corresponding attribute.')
+
+        # FIXME we could also write the quicklook included in DLR L1B format
+        self.vnir.generate_quicklook(bands2use=self.vnir.detector_meta.preview_bands) \
+            .save(path.join(product_dir, path.basename(self.meta.vnir.quicklook_filename) + '.png'), fmt='PNG')
 
         # write the SWIR
         self.swir.data.save(product_dir + path.sep + self.meta.swir.data_filename, fmt="ENVI")
         self.swir.mask_clouds.save(product_dir + path.sep + self.meta.swir.cloud_mask_filename, fmt="GTiff")
-        self.swir.deadpixelmap.save(product_dir + path.sep + self.meta.swir.dead_pixel_filename, fmt="GTiff")
-        self.swir.generate_quicklook(product_dir + path.sep + self.meta.swir.quicklook_filename)
+        if self.swir.deadpixelmap is not None:
+            self.swir.deadpixelmap.save(product_dir + path.sep + self.meta.swir.dead_pixel_filename, fmt="GTiff")
+        else:
+            self.logger.warning('Could not save SWIR dead pixel map because there is no corresponding attribute.')
+        self.swir.generate_quicklook(bands2use=self.swir.detector_meta.preview_bands) \
+            .save(path.join(product_dir, path.basename(self.meta.swir.quicklook_filename) + '.png'), fmt='PNG')
 
         # Update the xml file
-        xml = etree.parse(self.paths.metaxml)
-        xml.findall("ProductComponent/VNIRDetector/Data/Size/NRows")[0].text = str(self.meta.vnir.nrows)
-        xml.findall("ProductComponent/VNIRDetector/Data/Type/UnitCode")[0].text = self.meta.vnir.unitcode
-        xml.findall("ProductComponent/VNIRDetector/Data/Type/Unit")[0].text = self.meta.vnir.unit
-        xml.findall("ProductComponent/SWIRDetector/Data/Size/NRows")[0].text = str(self.meta.swir.nrows)
-        xml.findall("ProductComponent/SWIRDetector/Data/Type/UnitCode")[0].text = self.meta.swir.unitcode
-        xml.findall("ProductComponent/SWIRDetector/Data/Type/Unit")[0].text = self.meta.swir.unit
-        xml_string = etree.tostring(xml, pretty_print=True, xml_declaration=True, encoding='UTF-8')
+        metadata_string = self.meta.to_XML()
         with open(product_dir + path.sep + path.basename(self.paths.metaxml), "w") as xml_file:
-            xml_file.write(xml_string.decode("utf-8"))
+            xml_file.write(metadata_string)
 
-        return product_dir
-
-    def append_new_image(self, img2, n_lines: int = None):
-        """
-        Check if a second image could pass with the first image.
-        In this version we assume that the image will be add below
-        If it is the case, the method will create temporary files that will be used in the following.
-        :param img2:
-        :param n_lines: number of line to be added
-        :return: None
-        """
-        self.logger.info("Check new image %s" % img2.paths.root_dir)
-
-        distance_min = 27.0
-        distance_max = 34.0
-        tag_vnir = False
-        tag_swir = False
-
-        # check vnir bottom left
-        x1, y1, _, _ = utm.from_latlon(self.meta.vnir.lat_UL_UR_LL_LR[2], self.meta.vnir.lon_UL_UR_LL_LR[2])
-        x2, y2, _, _ = utm.from_latlon(img2.meta.vnir.lat_UL_UR_LL_LR[0], img2.meta.vnir.lon_UL_UR_LL_LR[0])
-        distance_left = np.sqrt((x1-x2)**2 + (y1-y2)**2)
-
-        # check vnir bottom right
-        x1, y1, _, _ = utm.from_latlon(self.meta.vnir.lat_UL_UR_LL_LR[3], self.meta.vnir.lon_UL_UR_LL_LR[3])
-        x2, y2, _, _ = utm.from_latlon(img2.meta.vnir.lat_UL_UR_LL_LR[1], img2.meta.vnir.lon_UL_UR_LL_LR[1])
-        distance_right = np.sqrt((x1-x2)**2 + (y1-y2)**2)
-
-        if distance_min < distance_left < distance_max and distance_min < distance_right < distance_max:
-            tag_vnir = True
-
-        # check swir bottom left
-        x1, y1, _, _ = utm.from_latlon(self.meta.swir.lat_UL_UR_LL_LR[2], self.meta.swir.lon_UL_UR_LL_LR[2])
-        x2, y2, _, _ = utm.from_latlon(img2.meta.swir.lat_UL_UR_LL_LR[0], img2.meta.swir.lon_UL_UR_LL_LR[0])
-        distance_left = np.sqrt((x1-x2)**2 + (y1-y2)**2)
-
-        # check swir bottom right
-        x1, y1, _, _ = utm.from_latlon(self.meta.swir.lat_UL_UR_LL_LR[3], self.meta.swir.lon_UL_UR_LL_LR[3])
-        x2, y2, _, _ = utm.from_latlon(img2.meta.swir.lat_UL_UR_LL_LR[1], img2.meta.swir.lon_UL_UR_LL_LR[1])
-        distance_right = np.sqrt((x1-x2)**2 + (y1-y2)**2)
-
-        if distance_min < distance_left < distance_max and distance_min < distance_right < distance_max:
-            tag_swir = True
-
-        if tag_vnir is False or tag_swir is False:
-            self.logger.warning("%s and %s don't fit to be appended" % (self.paths.root_dir, img2.paths.root_dir))
-            return
-
-        self.logger.info("Append new image: %s" % img2.paths.root_dir)
-
-        # set new number of line
-        if n_lines is None:
-            n_lines = img2.meta.vnir.nrows
-
-        if n_lines > img2.meta.vnir.nrows:
-            self.logger.warning("n_lines (%s) exceeds the total number of line of second image" % n_lines)
-            self.logger.warning("Set to the image number of line")
-            n_lines = img2.meta.vnir.nrows
-
-        if n_lines < 50:  # TODO: determine these values
-            self.logger.warning("A minimum of 50 lines is required, only %s were selected" % n_lines)
-            self.logger.warning("Set the number of line to 50")
-            n_lines = 50
+        self.logger.info("L1B product successfully written!")
 
-        self.meta.vnir.nrows += n_lines
-
-        # Create new corner coordinate - VNIR
-        ff = interp2d(x=[0, 1], y=[0, 1], z=[[img2.meta.vnir.lat_UL_UR_LL_LR[0], img2.meta.vnir.lat_UL_UR_LL_LR[1]],
-                                             [img2.meta.vnir.lat_UL_UR_LL_LR[2], img2.meta.vnir.lat_UL_UR_LL_LR[3]]],
-                      kind='linear')
-        self.meta.vnir.lat_UL_UR_LL_LR[2] = np.array(ff(0, n_lines/img2.meta.vnir.nrows))[0]
-        self.meta.vnir.lat_UL_UR_LL_LR[3] = np.array(ff(1, n_lines/img2.meta.vnir.nrows))[0]
-        lon_lat_smpl = (15, 15)
-        self.meta.vnir.lats = self.meta.vnir.interpolate_corners(*self.meta.vnir.lat_UL_UR_LL_LR, *lon_lat_smpl)
-
-        ff = interp2d(x=[0, 1], y=[0, 1], z=[[img2.meta.vnir.lon_UL_UR_LL_LR[0], img2.meta.vnir.lon_UL_UR_LL_LR[1]],
-                                             [img2.meta.vnir.lon_UL_UR_LL_LR[2], img2.meta.vnir.lon_UL_UR_LL_LR[3]]],
-                      kind='linear')
-        self.meta.vnir.lon_UL_UR_LL_LR[2] = np.array(ff(0, n_lines/img2.meta.vnir.nrows))[0]
-        self.meta.vnir.lon_UL_UR_LL_LR[3] = np.array(ff(1, n_lines/img2.meta.vnir.nrows))[0]
-        self.meta.vnir.lons = self.meta.vnir.interpolate_corners(*self.meta.vnir.lon_UL_UR_LL_LR, *lon_lat_smpl)
-
-        # Create new corner coordinate - SWIR
-        ff = interp2d(x=[0, 1], y=[0, 1], z=[[img2.meta.swir.lat_UL_UR_LL_LR[0], img2.meta.swir.lat_UL_UR_LL_LR[1]],
-                                             [img2.meta.swir.lat_UL_UR_LL_LR[2], img2.meta.swir.lat_UL_UR_LL_LR[3]]],
-                      kind='linear')
-        self.meta.swir.lat_UL_UR_LL_LR[2] = np.array(ff(0, n_lines/img2.meta.swir.nrows))[0]
-        self.meta.swir.lat_UL_UR_LL_LR[3] = np.array(ff(1, n_lines/img2.meta.swir.nrows))[0]
-        lon_lat_smpl = (15, 15)
-        self.meta.swir.lats = self.meta.swir.interpolate_corners(*self.meta.swir.lat_UL_UR_LL_LR, *lon_lat_smpl)
-        ff = interp2d(x=[0, 1], y=[0, 1], z=[[img2.meta.vnir.lon_UL_UR_LL_LR[0], img2.meta.vnir.lon_UL_UR_LL_LR[1]],
-                                             [img2.meta.vnir.lon_UL_UR_LL_LR[2], img2.meta.vnir.lon_UL_UR_LL_LR[3]]],
-                      kind='linear')
-        self.meta.swir.lon_UL_UR_LL_LR[2] = np.array(ff(0, n_lines/img2.meta.swir.nrows))[0]
-        self.meta.swir.lon_UL_UR_LL_LR[3] = np.array(ff(1, n_lines/img2.meta.swir.nrows))[0]
-        self.meta.swir.lons = self.meta.swir.interpolate_corners(*self.meta.swir.lon_UL_UR_LL_LR, *lon_lat_smpl)
-
-        # append the vnir/swir image
-        img2.vnir.data = img2.paths.vnir.data
-        img2.vnir.data = img2.vnir.data[0:n_lines, :, :]
-        img2.swir.data = img2.paths.swir.data
-        img2.swir.data = img2.swir.data[0:n_lines, :, :]
-        img2.DN2TOARadiance()
-        self.vnir.data = np.append(self.vnir.data, img2.vnir.data, axis=0)
-        self.swir.data = np.append(self.swir.data, img2.swir.data, axis=0)
-
-        # append the mask cloud
-        self.vnir.mask_clouds = np.append(self.vnir.mask_clouds,
-                                          GeoArray(img2.paths.vnir.mask_clouds)[0:n_lines, :], axis=0)
-        self.swir.mask_clouds = np.append(self.swir.mask_clouds,
-                                          GeoArray(img2.paths.swir.mask_clouds)[0:n_lines, :], axis=0)
+        return product_dir
 
 
 ####################################
@@ -839,3 +884,156 @@ class EnMAP_Detector_MapGeo(_EnMAP_Image):
             self.data.calc_mask_nodata(fromBand=fromBand, overwrite=overwrite)
             self.mask_nodata = self.data.mask_nodata
             return self.mask_nodata
+
+
+class EnMAPL2Product_MapGeo(_EnMAP_Image):
+    """Class for EnPT Level-2 EnMAP object in map geometry.
+
+    Attributes:
+        - logger:
+            - logging.Logger instance or subclass instance
+        - paths:
+            - paths belonging to the EnMAP product
+        - meta:
+            - instance of EnMAP_Metadata_SensorGeo class
+    """
+    def __init__(self, config: EnPTConfig, logger=None):
+        # protected attributes
+        self._logger = None
+
+        # populate attributes
+        self.cfg = config
+        if logger:
+            self.logger = logger
+
+        self.meta = None  # type: EnMAP_Metadata_L2A_MapGeo
+        self.paths = None  # type: SimpleNamespace
+
+        super(EnMAPL2Product_MapGeo, self).__init__()
+
+    @property
+    def logger(self) -> EnPT_Logger:
+        """Get a an instance of enpt.utils.logging.EnPT_Logger.
+
+        NOTE:
+            - The logging level will be set according to the user inputs of EnPT.
+            - The path of the log file is directly derived from the attributes of the _EnMAP_Image instance.
+
+        Usage:
+            - get the logger:
+                logger = self.logger
+            - set the logger
+                self.logger = logging.getLogger()  # NOTE: only instances of logging.Logger are allowed here
+            - delete the logger:
+                del self.logger  # or "self.logger = None"
+
+        :return: EnPT_Logger instance
+        """
+        if self._logger and self._logger.handlers[:]:
+            return self._logger
+        else:
+            basename = path.splitext(path.basename(self.cfg.path_l1b_enmap_image))[0]
+            path_logfile = path.join(self.cfg.output_dir, basename + '.log') \
+                if self.cfg.create_logfile and self.cfg.output_dir else ''
+            self._logger = EnPT_Logger('log__' + basename, fmt_suffix=None, path_logfile=path_logfile,
+                                       log_level=self.cfg.log_level, append=False)
+
+            return self._logger
+
+    @logger.setter
+    def logger(self, logger: logging.Logger):
+        assert isinstance(logger, logging.Logger) or logger in ['not set', None], \
+            "%s.logger can not be set to %s." % (self.__class__.__name__, logger)
+
+        # save prior logs
+        # if logger is None and self._logger is not None:
+        #     self.log += self.logger.captured_stream
+        self._logger = logger
+
+    @property
+    def log(self) -> str:
+        """Return a string of all logged messages until now.
+
+        NOTE: self.log can also be set to a string.
+        """
+        return self.logger.captured_stream
+
+    @log.setter
+    def log(self, string: str):
+        assert isinstance(string, str), "'log' can only be set to a string. Got %s." % type(string)
+        self.logger.captured_stream = string
+
+    @classmethod
+    def from_L1B_sensorgeo(cls, config: EnPTConfig, enmap_ImageL1: EnMAPL1Product_SensorGeo):
+        from ..processors.orthorectification import Orthorectifier
+        L2_obj = Orthorectifier(config=config).run_transformation(enmap_ImageL1=enmap_ImageL1)
+
+        return L2_obj
+
+    def get_paths(self, l2a_outdir: str):
+        """
+        Get all file paths associated with the current instance of EnMAP_Detector_SensorGeo
+        These information are read from the detector_meta.
+
+        :param l2a_outdir:  output directory of EnMAP Level-2A dataset
+        :return: paths as SimpleNamespace
+        """
+        paths = SimpleNamespace()
+        paths.root_dir = l2a_outdir
+        paths.metaxml = path.join(l2a_outdir, self.meta.metaxml_filename)
+        paths.data = path.join(l2a_outdir, self.meta.data_filename)
+        paths.mask_clouds = path.join(l2a_outdir, self.meta.cloud_mask_filename)
+        paths.deadpixelmap_vnir = path.join(l2a_outdir, self.meta.dead_pixel_filename_vnir)
+        paths.deadpixelmap_swir = path.join(l2a_outdir, self.meta.dead_pixel_filename_swir)
+        paths.quicklook_vnir = path.join(l2a_outdir, self.meta.quicklook_filename_vnir)
+        paths.quicklook_swir = path.join(l2a_outdir, self.meta.quicklook_filename_swir)
+
+        return paths
+
+    def save(self, outdir: str, suffix="") -> str:
+        """
+        Save the product to disk using almost the same input format
+        :param outdir: path to the output directory
+        :param suffix: suffix to be appended to the output filename (???)
+        :return: root path (root directory) where products were written
+        """
+        # TODO optionally add more output formats
+        product_dir = path.join(path.abspath(outdir),
+                                "{name}{suffix}".format(name=self.meta.scene_basename, suffix=suffix))
+
+        self.logger.info("Write product to: %s" % product_dir)
+        makedirs(product_dir, exist_ok=True)
+
+        # define output paths
+        outpath_data = path.join(product_dir, self.meta.data_filename)
+        outpath_mask_clouds = path.join(product_dir, self.meta.cloud_mask_filename)
+        outpath_quicklook_vnir = path.join(product_dir, self.meta.quicklook_filename_vnir)
+        outpath_quicklook_swir = path.join(product_dir, self.meta.quicklook_filename_swir)
+        outpath_meta = path.join(product_dir, self.meta.metaxml_filename)
+        outpaths = [outpath_data, outpath_mask_clouds, outpath_quicklook_vnir, outpath_quicklook_swir, outpath_meta]
+
+        # save raster data
+        kwargs_save = dict(fmt='GTiff', creationOptions=["COMPRESS=LZW"])
+        self.data.save(outpath_data, **kwargs_save)
+        self.mask_clouds.save(outpath_mask_clouds, **kwargs_save)
+
+        # TODO VNIR and SWIR
+        # self.deadpixelmap.save(path.join(product_dir, self.meta.cloud_mask_filename), **kwargs_save)
+        self.logger.warning('Currently, L2A dead pixel masks cannot be saved yet.')
+
+        self.generate_quicklook(bands2use=self.meta.preview_bands_vnir).save(outpath_quicklook_vnir, **kwargs_save)
+        self.generate_quicklook(bands2use=self.meta.preview_bands_swir).save(outpath_quicklook_swir, **kwargs_save)
+
+        # TODO remove GDAL's *.aux.xml files?
+
+        # save metadata
+        self.meta.add_product_fileinformation(filepaths=outpaths)
+        metadata_string = self.meta.to_XML()
+
+        with open(outpath_meta, 'w') as metaF:
+            self.logger.info("Writing metdata to %s" % outpath_meta)
+            metaF.write(metadata_string)
+
+        self.logger.info("L2A product successfully written!")
+
+        return product_dir
diff --git a/enpt/model/metadata.py b/enpt/model/metadata.py
index 08e29c31253886c78e510c2f449a31d7649f256f..299c4031d7c6855c02f0499628c981024e533618 100644
--- a/enpt/model/metadata.py
+++ b/enpt/model/metadata.py
@@ -2,23 +2,51 @@
 """EnPT metadata modules. All object and functions regarding EnMAP metadata are implemented here."""
 
 from datetime import datetime
-from xml.etree import ElementTree
+from lxml import etree as ElementTree
 import logging
 import os
-from typing import Union
+import fnmatch
+from typing import Union, List, Tuple  # noqa: F401
+from collections import OrderedDict
 import numpy as np
-import spectral as sp
+from py_tools_ds.geo.vector.topology import Polygon, get_footprint_polygon  # noqa: F401  # flake8 issue
 from geoarray import GeoArray
 
-from ..options.config import EnPTConfig
+from ..options.config import EnPTConfig, enmap_xres
 from .srf import SRF
+from ..processors.spatial_transform import RPC_3D_Geolayer_Generator
 
 
 # Define L1B_product_props
 L1B_product_props = dict(
     xml_detector_label=dict(
-        VNIR='VNIR',
-        SWIR='SWIR'
+        VNIR='VNIRDetector',
+        SWIR='SWIRDetector'
+    ),
+    fn_detector_suffix=dict(
+        VNIR='D1',
+        SWIR='D2'
+    )
+)
+
+
+L1B_product_props_DLR = dict(
+    xml_detector_label=dict(
+        VNIR='vnir',
+        SWIR='swir'
+    ),
+    fn_detector_suffix=dict(
+        VNIR='D1',
+        SWIR='D2'
+    )
+)
+
+
+# Define L1B_product_props
+L2A_product_props_DLR = dict(
+    xml_detector_label=dict(
+        VNIR='vnir',
+        SWIR='swir'
     ),
     fn_detector_suffix=dict(
         VNIR='D1',
@@ -43,34 +71,44 @@ class EnMAP_Metadata_L1B_Detector_SensorGeo(object):
     def __init__(self, detector_name: str, config: EnPTConfig, logger: logging.Logger = None):
         """Get a metadata object containing the metadata of a single EnMAP detector in sensor geometry.
 
-        :param detector_name: Name of the detector (VNIR or SWIR)
-        :param logger:
+        :param detector_name:   Name of the detector (VNIR or SWIR)
+        :param config:          EnPT configuration object
+        :param logger:          instance of logging.logger or subclassed
         """
         self.cfg = config
         self.detector_name = detector_name  # type: str
-        self.detector_label = L1B_product_props['xml_detector_label'][detector_name]
+        if self.cfg.is_dlr_dataformat:
+            self.detector_label = L1B_product_props_DLR['xml_detector_label'][detector_name]
+        else:
+            self.detector_label = L1B_product_props['xml_detector_label'][detector_name]
         self.logger = logger or logging.getLogger()
 
         # These lines are used to load path information
         self.data_filename = None  # type: str # detector data filename
+        self.scene_basename = None  # type: str # basename of the EnMAP image
         self.dead_pixel_filename = None  # type: str # filename of the dead pixel file
         self.quicklook_filename = None  # type: str # filename of the quicklook file
+        # FIXME cloud mask of BOTH detectors
         self.cloud_mask_filename = None  # type: str # filename of the cloud mask file
 
-        self.fwhm = None  # type: np.ndarray  # Full width half maximmum for each EnMAP band
         self.wvl_center = None  # type: np.ndarray  # Center wavelengths for each EnMAP band
+        self.fwhm = None  # type: np.ndarray  # Full width half maximmum for each EnMAP band
         self.srf = None  # type: SRF  # SRF object holding the spectral response functions for each EnMAP band
-        self.solar_irrad = None  # type: dict  # solar irradiance in [W/m2/nm] for eac band
+        self.solar_irrad = None  # type: np.array  # solar irradiance in [W/m2/nm] for each band
         self.nwvl = None  # type: int  # Number of wave bands
         self.nrows = None  # type: int  # number of rows
         self.ncols = None  # type: int  # number of columns
         self.smile_coef = None  # type: np.ndarray  # smile coefficients needed for smile computation
         self.nsmile_coef = None  # type: int  # number of smile coefficients
         self.smile = None  # type: np.ndarray  # smile for each EnMAP image column
-        self.l_min = None  # type: np.ndarray
-        self.l_max = None  # type: np.ndarray
-        self.lat_UL_UR_LL_LR = None  # type:  list  # latitude coordinates for UL, UR, LL, LR
-        self.lon_UL_UR_LL_LR = None  # type:  list  # longitude coordinates for UL, UR, LL, LR
+        self.gains = None  # type: np.ndarray  # band-wise gains for computing radiance from DNs
+        self.offsets = None  # type: np.ndarray  # band-wise offsets for computing radiance from DNs
+        self.l_min = None  # type: np.ndarray  # band-wise l-min for computing radiance from DNs
+        self.l_max = None  # type: np.ndarray  # band-wise l-max for computing radiance from DNs
+        self.lat_UL_UR_LL_LR = None  # type:  List[float, float, float, float]  # latitude coords for UL, UR, LL, LR
+        self.lon_UL_UR_LL_LR = None  # type:  List[float, float, float, float]  # longitude coords for UL, UR, LL, LR
+        self.rpc_coeffs = OrderedDict()  # type: OrderedDict  # RPC coefficients for geolayer computation
+        self.ll_mapPoly = None  # type: Polygon  # footprint polygon in longitude/latitude map coordinates
         self.lats = None  # type: np.ndarray  # 2D array of latitude coordinates according to given lon/lat sampling
         self.lons = None  # type: np.ndarray  # 2D array of longitude coordinates according to given lon/lat sampling
         self.unit = ''  # type: str  # radiometric unit of pixel values
@@ -78,75 +116,157 @@ class EnMAP_Metadata_L1B_Detector_SensorGeo(object):
         self.preview_bands = None
         self.snr = None  # type: np.ndarray  # Signal to noise ratio as computed from radiance data
 
-    # On this new version of read_data, we don't need anymore nsmile_coef (will be read from xml file)
-    def read_metadata(self, path_xml, lon_lat_smpl):
+    def read_metadata(self, path_xml):
         """
         Read the meadata of a specific EnMAP detector in sensor geometry
+
         :param path_xml: file path of the metadata file
-        :param lon_lat_smpl:  number if sampling in lon, lat fields
         :return: None
         """
         xml = ElementTree.parse(path_xml).getroot()
-        lbl = self.detector_label + "Detector"
-        self.logger.info("Reading metadata for %s detector..." % self.detector_name)
-
-        # read data filenames
-        self.data_filename = xml.findall("ProductComponent/%s/Data/Filename" % lbl)[0].text
-        self.dead_pixel_filename = xml.findall("ProductComponent/%s/Sensor/DeadPixel/Filename" % lbl)[0].text
-        self.quicklook_filename = xml.findall("ProductComponent/%s/Preview/Filename" % lbl)[0].text
-        self.cloud_mask_filename = xml.findall("ProductComponent/%s/Data/CloudMaskMap/Filename" % lbl)[0].text
-
-        # read preview bands
-        self.preview_bands = np.zeros(3, dtype=np.int)
-        self.preview_bands[0] = np.int(xml.findall("ProductComponent/%s/Preview/Bands/Red" % lbl)[0].text)
-        self.preview_bands[1] = np.int(xml.findall("ProductComponent/%s/Preview/Bands/Green" % lbl)[0].text)
-        self.preview_bands[2] = np.int(xml.findall("ProductComponent/%s/Preview/Bands/Blue" % lbl)[0].text)
-
-        # read some basic information concerning the detector
-        self.nrows = np.int(xml.findall("ProductComponent/%s/Data/Size/NRows" % lbl)[0].text)
-        self.ncols = np.int(xml.findall("ProductComponent/%s/Data/Size/NCols" % lbl)[0].text)
-        self.unitcode = xml.findall("ProductComponent/%s/Data/Type/UnitCode" % lbl)[0].text
-        self.unit = xml.findall("ProductComponent/%s/Data/Type/Unit" % lbl)[0].text
-
-        # Read image coordinates
-        scene_corner_coordinates = xml.findall("ProductComponent/%s/Data/SceneInformation/SceneCornerCoordinates" % lbl)
-        self.lat_UL_UR_LL_LR = [
-            np.float(scene_corner_coordinates[0].findall("Latitude")[0].text),
-            np.float(scene_corner_coordinates[1].findall("Latitude")[0].text),
-            np.float(scene_corner_coordinates[2].findall("Latitude")[0].text),
-            np.float(scene_corner_coordinates[3].findall("Latitude")[0].text)
-        ]
-        self.lon_UL_UR_LL_LR = [
-            np.float(scene_corner_coordinates[0].findall("Longitude")[0].text),
-            np.float(scene_corner_coordinates[1].findall("Longitude")[0].text),
-            np.float(scene_corner_coordinates[2].findall("Longitude")[0].text),
-            np.float(scene_corner_coordinates[3].findall("Longitude")[0].text)
-        ]
-
-        # read the band related information: wavelength, fwhm
-        self.nwvl = np.int(xml.findall("ProductComponent/%s/Data/BandInformationList/NumberOfBands" % lbl)[0].text)
-        self.nsmile_coef = np.int(xml.findall(
-            "ProductComponent/%s/Data/BandInformationList/SmileInformation/NumberOfCoefficients" % lbl)[0].text)
-        self.fwhm = np.zeros(self.nwvl, dtype=np.float)
-        self.wvl_center = np.zeros(self.nwvl, dtype=np.float)
-        self.smile_coef = np.zeros((self.nwvl, self.nsmile_coef), dtype=np.float)
-        self.l_min = np.zeros(self.nwvl, dtype=np.float)
-        self.l_max = np.zeros(self.nwvl, dtype=np.float)
-        band_informations = xml.findall("ProductComponent/%s/Data/BandInformationList/BandInformation" % lbl)
-        for bi in band_informations:
-            k = np.int64(bi.attrib['Id']) - 1
-            self.wvl_center[k] = np.float(bi.findall("CenterWavelength")[0].text)
-            self.fwhm[k] = np.float(bi.findall("FullWidthHalfMaximum")[0].text)
-            self.l_min[k] = np.float(bi.findall("L_min")[0].text)
-            self.l_max[k] = np.float(bi.findall("L_max")[0].text)
-            scl = bi.findall("Smile/Coefficient")
-            for sc in scl:
-                self.smile_coef[k, np.int64(sc.attrib['exponent'])] = np.float(sc.text)
-        self.smile = self.calc_smile()
-        self.srf = SRF.from_cwl_fwhm(self.wvl_center, self.fwhm)
-        self.solar_irrad = self.calc_solar_irradiance_CWL_FWHM_per_band()
-        self.lats = self.interpolate_corners(*self.lat_UL_UR_LL_LR, *lon_lat_smpl)
-        self.lons = self.interpolate_corners(*self.lon_UL_UR_LL_LR, *lon_lat_smpl)
+
+        if self.cfg.is_dlr_dataformat:
+            lbl = self.detector_label
+            self.logger.info("Reading metadata for %s detector..." % self.detector_name)
+
+            # read data filenames
+            all_filenames = [ele.text for ele in xml.findall("product/productFileInformation/file/name")]
+            self.data_filename = xml.find("product/image/%s/name" % lbl).text
+            self.scene_basename = self.data_filename.split('-SPECTRAL_IMAGE')[0]
+            self.dead_pixel_filename = fnmatch.filter(all_filenames, '*QL_PIXELMASK_%s.GEOTIFF' % self.detector_name)[0]
+            self.quicklook_filename = xml.find("product/quicklook/%s/name" % lbl).text
+            # FIXME multiple cloud masks provided. QL_QUALITY_CLASSES.GEOTIFF as combined product?
+            #   - QL_QUALITY_CLOUD.GEOTIFF
+            #   - QL_QUALITY_CIRRUS.GEOTIFF
+            #   - QL_QUALITY_SNOW.GEOTIFF
+            #   - QL_QUALITY_CLOUDSHADOW.GEOTIFF
+            #   - QL_QUALITY_HAZE.GEOTIFF
+            self.logger.warning('DLR test data provide multiple cloud masks. Added only *QUALITY_CLOUD.GEOTIFF!')
+            self.cloud_mask_filename = fnmatch.filter(all_filenames, '*QUALITY_CLOUD.GEOTIFF')[0]
+
+            # read some basic information concerning the detector
+            self.nrows = int(xml.find("product/image/%s/dimension/rows" % lbl).text)
+            self.ncols = int(xml.find("product/image/%s/dimension/columns" % lbl).text)
+            self.unitcode = 'DN'
+            self.unit = ''
+
+            # Read image coordinates
+            # FIXME why do we have the same corner coordinates for VNIR and SWIR?
+            points = xml.findall("base/spatialCoverage/boundingPolygon/point")
+            coords_xy = {p.find('frame').text: (float(p.find('longitude').text),
+                                                float(p.find('latitude').text))
+                         for p in points}
+            coord_tags = ['upper_left', 'upper_right', 'lower_left', 'lower_right']
+            self.lon_UL_UR_LL_LR = [coords_xy[ct][0] for ct in coord_tags]
+            self.lat_UL_UR_LL_LR = [coords_xy[ct][1] for ct in coord_tags]
+
+            # read the band related information: wavelength, fwhm
+            self.nwvl = int(xml.find("product/image/%s/channels" % lbl).text)
+            # FIXME hardcoded - DLR does not provide any smile information
+            #   => smile coefficients are set to zero until we have valid ones
+            self.nsmile_coef = 5
+            self.smile_coef = np.zeros((self.nwvl, self.nsmile_coef), dtype=np.float)
+
+            startband = 0 if self.detector_name == 'VNIR' else int(xml.find("product/image/vnir/channels").text)
+            subset = slice(startband, startband + self.nwvl)
+            bi = "specific/bandCharacterisation/bandID/"
+            self.wvl_center = np.array([float(ele.text) for ele in xml.findall(bi + "wavelengthCenterOfBand")[subset]])
+            self.fwhm = np.array([float(ele.text) for ele in xml.findall(bi + "FWHMOfBand")[subset]])
+            self.gains = np.array([float(ele.text) for ele in xml.findall(bi + "GainOfBand")[subset]])
+            self.offsets = np.array([float(ele.text) for ele in xml.findall(bi + "OffsetOfBand")[subset]])
+
+            # read preview bands
+            wvl_red = float(xml.find("product/image/%s/qlChannels/red" % lbl).text)
+            wvl_green = float(xml.find("product/image/%s/qlChannels/green" % lbl).text)
+            wvl_blue = float(xml.find("product/image/%s/qlChannels/blue" % lbl).text)
+            self.preview_bands = np.array([np.argmin(np.abs(self.wvl_center - wvl))
+                                           for wvl in [wvl_red, wvl_green, wvl_blue]])
+
+            # read RPC coefficients
+            for bID in xml.findall("product/navigation/RPC/bandID")[subset]:
+                bN = 'band_%d' % np.int64(bID.attrib['number'])
+
+                keys2combine = ('row_num', 'row_den', 'col_num', 'col_den')
+
+                tmp = OrderedDict([(ele.tag.lower(), np.float(ele.text)) for ele in bID.findall('./')])
+                self.rpc_coeffs[bN] = {k: v for k, v in tmp.items() if not k.startswith(keys2combine)}
+
+                for n in keys2combine:
+                    self.rpc_coeffs[bN]['%s_coeffs' % n.lower()] = \
+                        np.array([v for k, v in tmp.items() if k.startswith(n)])
+
+            # compute metadata derived from read data
+            self.smile = self.calc_smile()
+            self.srf = SRF.from_cwl_fwhm(self.wvl_center, self.fwhm)
+            self.solar_irrad = self.calc_solar_irradiance_CWL_FWHM_per_band()
+            self.ll_mapPoly = get_footprint_polygon(tuple(zip(self.lon_UL_UR_LL_LR,
+                                                              self.lat_UL_UR_LL_LR)), fix_invalid=True)
+
+        else:
+            lbl = self.detector_label
+            self.logger.info("Reading metadata for %s detector..." % self.detector_name)
+
+            # read data filenames
+            self.data_filename = xml.findall("ProductComponent/%s/Data/Filename" % lbl)[0].text
+            self.scene_basename = os.path.splitext(self.data_filename)[0]
+            self.dead_pixel_filename = xml.findall("ProductComponent/%s/Sensor/DeadPixel/Filename" % lbl)[0].text
+            self.quicklook_filename = xml.findall("ProductComponent/%s/Preview/Filename" % lbl)[0].text
+            self.cloud_mask_filename = xml.findall("ProductComponent/%s/Data/CloudMaskMap/Filename" % lbl)[0].text
+
+            # read preview bands
+            self.preview_bands = np.zeros(3, dtype=np.int)
+            self.preview_bands[0] = np.int(xml.findall("ProductComponent/%s/Preview/Bands/Red" % lbl)[0].text)
+            self.preview_bands[1] = np.int(xml.findall("ProductComponent/%s/Preview/Bands/Green" % lbl)[0].text)
+            self.preview_bands[2] = np.int(xml.findall("ProductComponent/%s/Preview/Bands/Blue" % lbl)[0].text)
+
+            # read some basic information concerning the detector
+            self.nrows = np.int(xml.findall("ProductComponent/%s/Data/Size/NRows" % lbl)[0].text)
+            self.ncols = np.int(xml.findall("ProductComponent/%s/Data/Size/NCols" % lbl)[0].text)
+            self.unitcode = xml.findall("ProductComponent/%s/Data/Type/UnitCode" % lbl)[0].text
+            self.unit = xml.findall("ProductComponent/%s/Data/Type/Unit" % lbl)[0].text
+
+            # Read image coordinates
+            scene_corner_coordinates = xml.findall("ProductComponent/%s/Data/SceneInformation/"
+                                                   "SceneCornerCoordinates" % lbl)
+            self.lat_UL_UR_LL_LR = [
+                np.float(scene_corner_coordinates[0].findall("Latitude")[0].text),
+                np.float(scene_corner_coordinates[1].findall("Latitude")[0].text),
+                np.float(scene_corner_coordinates[2].findall("Latitude")[0].text),
+                np.float(scene_corner_coordinates[3].findall("Latitude")[0].text)
+            ]
+            self.lon_UL_UR_LL_LR = [
+                np.float(scene_corner_coordinates[0].findall("Longitude")[0].text),
+                np.float(scene_corner_coordinates[1].findall("Longitude")[0].text),
+                np.float(scene_corner_coordinates[2].findall("Longitude")[0].text),
+                np.float(scene_corner_coordinates[3].findall("Longitude")[0].text)
+            ]
+
+            # read the band related information: wavelength, fwhm
+            self.nwvl = np.int(xml.findall("ProductComponent/%s/Data/BandInformationList/NumberOfBands" % lbl)[0].text)
+            self.nsmile_coef = np.int(xml.findall(
+                "ProductComponent/%s/Data/BandInformationList/SmileInformation/NumberOfCoefficients" % lbl)[0].text)
+            self.fwhm = np.zeros(self.nwvl, dtype=np.float)
+            self.wvl_center = np.zeros(self.nwvl, dtype=np.float)
+            self.smile_coef = np.zeros((self.nwvl, self.nsmile_coef), dtype=np.float)
+            self.l_min = np.zeros(self.nwvl, dtype=np.float)
+            self.l_max = np.zeros(self.nwvl, dtype=np.float)
+            band_informations = xml.findall("ProductComponent/%s/Data/BandInformationList/BandInformation" % lbl)
+            for bi in band_informations:
+                k = np.int64(bi.attrib['Id']) - 1
+                self.wvl_center[k] = np.float(bi.findall("CenterWavelength")[0].text)
+                self.fwhm[k] = np.float(bi.findall("FullWidthHalfMaximum")[0].text)
+                self.l_min[k] = np.float(bi.findall("L_min")[0].text)
+                self.l_max[k] = np.float(bi.findall("L_max")[0].text)
+                scl = bi.findall("Smile/Coefficient")
+                for sc in scl:
+                    self.smile_coef[k, np.int64(sc.attrib['exponent'])] = np.float(sc.text)
+            self.smile = self.calc_smile()
+            self.srf = SRF.from_cwl_fwhm(self.wvl_center, self.fwhm)
+            self.solar_irrad = self.calc_solar_irradiance_CWL_FWHM_per_band()
+            self.ll_mapPoly = get_footprint_polygon(tuple(zip(self.lon_UL_UR_LL_LR, self.lat_UL_UR_LL_LR)),
+                                                    fix_invalid=True)
+            self.lats = self.interpolate_corners(*self.lat_UL_UR_LL_LR, self.ncols, self.nrows)
+            self.lons = self.interpolate_corners(*self.lon_UL_UR_LL_LR, self.ncols, self.nrows)
 
     def calc_smile(self):
         """Compute smile for each EnMAP column.
@@ -181,16 +301,16 @@ class EnMAP_Metadata_L1B_Detector_SensorGeo(object):
         :param rad_data:        image radiance data of EnMAP_Detector_SensorGeo
         :param dir_snr_models:  root directory where SNR model data is stored (must contain SNR_D1.bsq/SNR_D2.bsq)
         """
-        path_snr_model = os.path.join(dir_snr_models, "SNR_D1.hdr" if self.detector_name == 'VNIR' else "SNR_D2.hdr")
+        path_snr_model = os.path.join(dir_snr_models, "SNR_D1.bsq" if self.detector_name == 'VNIR' else "SNR_D2.bsq")
         rad_data = np.array(rad_data)
 
         assert self.unitcode == 'TOARad'
         self.logger.info("Computing SNR for %s using %s" % (self.detector_name, path_snr_model))
 
         if self.detector_name == 'VNIR':
-            gA = sp.open_image(path_snr_model)
-            coeffs_highgain = gA[0:3, :, :]
-            coeffs_lowgain = gA[3:6, :, :]
+            gA = GeoArray(path_snr_model)
+            coeffs_highgain = gA[0:3, :, :]  # [3 x ncols x nbands]
+            coeffs_lowgain = gA[3:6, :, :]  # [3 x ncols x nbands]
             gain_threshold = np.squeeze(gA[6, :, :])
 
             self.snr = np.zeros(rad_data.shape)
@@ -210,7 +330,17 @@ class EnMAP_Metadata_L1B_Detector_SensorGeo(object):
                     coeffs_lowgain[2, lowgain_mask] * rad_lowgain ** 2
 
         else:
-            coeffs = sp.open_image(path_snr_model)[:, :, :]
+            gA = GeoArray(path_snr_model)
+
+            # some SWIR bands may be missing -> find indices of bands we need for SNR computation
+            cwls_gA = gA.metadata.band_meta['wavelength']
+            cwls_needed = self.wvl_center
+
+            idxs_needed = [np.argmin(np.abs(cwls_gA - cwl)) for cwl in cwls_needed]
+            if not len(set(idxs_needed)) == len(idxs_needed) or len(idxs_needed) != self.nwvl:
+                raise RuntimeError('Unclear band allocation during SWIR SNR computation.')
+
+            coeffs = gA[:, :, idxs_needed]  # [3 x ncols x nbands]
             self.snr = coeffs[0, :, :] + coeffs[1, :, :] * rad_data[:, :, :] + coeffs[2, :, :] * rad_data[:, :, :] ** 2
 
     @staticmethod
@@ -230,6 +360,8 @@ class EnMAP_Metadata_L1B_Detector_SensorGeo(object):
         #       - especially at off-nadir acquisitions and with some terrain present, a linear interpolation leads
         #         to large deviations (> 180 m y-coordinate offset for the EnPT test dataset)
 
+        # TODO ensure that lons/lats represent UL coordinates not pixel coordinate centers (as given by Karl / DLR(?))
+
         corner_coords = np.array([[ul, ur],
                                   [ll, lr]])
         rowpos, colpos = [0, 1], [0, 1]
@@ -244,29 +376,39 @@ class EnMAP_Metadata_L1B_Detector_SensorGeo(object):
 
         return coords
 
-    def calc_solar_irradiance_CWL_FWHM_per_band(self) -> dict:
+    def compute_geolayer_for_cube(self):
+        self.logger.info('Computing %s geolayer...' % self.detector_name)
+
+        lons, lats = RPC_3D_Geolayer_Generator(rpc_coeffs_per_band=self.rpc_coeffs,
+                                               dem=self.cfg.path_dem,
+                                               enmapIm_cornerCoords=tuple(zip(self.lon_UL_UR_LL_LR,
+                                                                              self.lat_UL_UR_LL_LR)),
+                                               enmapIm_dims_sensorgeo=(self.nrows, self.ncols),
+                                               CPUs=self.cfg.CPUs)\
+            .compute_geolayer()
+
+        return lons, lats
+
+    def calc_solar_irradiance_CWL_FWHM_per_band(self) -> np.array:
         from ..io.reader import Solar_Irradiance_reader
 
         self.logger.debug('Calculating solar irradiance...')
 
         sol_irr = Solar_Irradiance_reader(path_solar_irr_model=self.cfg.path_solar_irr, wvl_min_nm=350, wvl_max_nm=2500)
 
-        irr_bands = {}
+        irr_bands = []
         for band in self.srf.bands:
-            if self.srf[band] is None:
-                irr_bands[band] = None
-            else:
-                WVL_band = self.srf.srfs_wvl if self.srf.wvl_unit == 'nanometers' else self.srf.srfs_wvl * 1000
-                RSP_band = self.srf.srfs_norm01[band]
-                sol_irr_at_WVL = np.interp(WVL_band, sol_irr[:, 0], sol_irr[:, 1], left=0, right=0)
+            WVL_band = self.srf.srfs_wvl if self.srf.wvl_unit == 'nanometers' else self.srf.srfs_wvl * 1000
+            RSP_band = self.srf.srfs_norm01[band]
+            sol_irr_at_WVL = np.interp(WVL_band, sol_irr[:, 0], sol_irr[:, 1], left=0, right=0)
 
-                irr_bands[band] = round(np.sum(sol_irr_at_WVL * RSP_band) / np.sum(RSP_band), 2)
+            irr_bands.append(np.round(np.sum(sol_irr_at_WVL * RSP_band) / np.sum(RSP_band), 2))
 
-        return irr_bands
+        return np.array(irr_bands)
 
 
 class EnMAP_Metadata_L1B_SensorGeo(object):
-    """EnMAP Metadata class holding the metadata of the complete EnMAP product in sensor geometry incl. VNIR and SWIR.
+    """EnMAP Metadata class for the metadata of the complete EnMAP L1B product in sensor geometry incl. VNIR and SWIR.
 
     Attributes:
         - logger(logging.Logger):  None or logging instance
@@ -278,29 +420,43 @@ class EnMAP_Metadata_L1B_SensorGeo(object):
         - mu_sun: needed by SICOR for TOARad > TOARef conversion
         - vnir(EnMAP_Metadata_VNIR_SensorGeo)
         - swir(EnMAP_Metadata_SWIR_SensorGeo)
+        - detector_attrNames: attribute names of the detector objects
     """
 
     def __init__(self, path_metaxml, config: EnPTConfig, logger=None):
         """Get a metadata object instance for the given EnMAP L1B product in sensor geometry.
 
-        :param path_metaxml:  file path of the EnMAP L1B metadata XML file
-        :param logger:  instance of logging.logger or subclassed
+        :param path_metaxml:    file path of the EnMAP L1B metadata XML file
+        :para, config:          EnPT configuration object
+        :param logger:          instance of logging.logger or subclassed
         """
         self.cfg = config
         self.logger = logger or logging.getLogger()
-        self._path_xml = path_metaxml
+        self.path_xml = path_metaxml
+        self.rootdir = os.path.dirname(path_metaxml)
 
         # defaults - Common
+        self.proc_level = None  # type: str  # Dataset processing level
         self.observation_datetime = None  # type: datetime  # Date and Time of image observation
         self.geom_view_zenith = None  # type: float  # viewing zenith angle
         self.geom_view_azimuth = None  # type: float  # viewing azimuth angle
         self.geom_sun_zenith = None  # type: float  # sun zenith angle
         self.geom_sun_azimuth = None  # type: float  # sun azimuth angle
         self.mu_sun = None  # type: float  # needed by SICOR for TOARad > TOARef conversion
-        self.earthSunDist = None  # type: float  # earth-sun distance # TODO doc correct?
+        self.earthSunDist = None  # type: float  # earth-sun distance
         self.vnir = None  # type: EnMAP_Metadata_L1B_Detector_SensorGeo # metadata of VNIR only
         self.swir = None  # type: EnMAP_Metadata_L1B_Detector_SensorGeo # metadata of SWIR only
-        self.detector_attrNames = ['vnir', 'swir']
+        self.detector_attrNames = ['vnir', 'swir']  # type: list # attribute names of the detector objects
+        self.metaxml_filename = None  # type: str # filename of XML metadata file
+
+        self._scene_basename = None  # type: str # basename of the EnMAP image
+
+    @property
+    def scene_basename(self):
+        if self.vnir:
+            self._scene_basename = self.vnir.scene_basename
+
+        return self._scene_basename
 
     # Read common metadata method
     def read_common_meta(self, path_xml):
@@ -314,19 +470,47 @@ class EnMAP_Metadata_L1B_SensorGeo(object):
         # load the metadata xml file
         xml = ElementTree.parse(path_xml).getroot()
 
-        # read the acquisition time
-        self.observation_datetime = \
-            datetime.strptime(xml.findall("GeneralInfo/ProductInfo/ProductStartTime")[0].text, '%Y-%m-%dT%H:%M:%S.%fZ')
+        self.metaxml_filename = os.path.basename(path_xml)
+
+        if self.cfg.is_dlr_dataformat:
+            # read processing level
+            self.proc_level = xml.find("base/level").text
+            if self.proc_level != 'L1B':
+                raise RuntimeError(self.proc_level, "Unexpected input data processing level. Expected 'L1B'.")
+
+            # read the acquisition time
+            self.observation_datetime = \
+                datetime.strptime(xml.find("base/temporalCoverage/startTime").text, '%Y-%m-%dT%H:%M:%S.%fZ')
+
+            # get the distance earth sun from the acquisition date
+            self.earthSunDist = self.get_earth_sun_distance(self.observation_datetime)
+
+            # read Geometry (observation/illumination) angle
+            # NOTE: EnMAP metadata provide also the angles for the image corners
+            #       -> would allow even more precise computation (e.g., specific/sunElevationAngle/upper_left)
+            # NOTE: alongOffNadirAngle is always near 0 and therefore ignored here (not relevant for AC)
+            # FIXME VZA may be negative in DLR L1B data -> correct to always use the absolute value for SICOR?
+            self.geom_view_zenith = np.abs(np.float(xml.find("specific/acrossOffNadirAngle/center").text))
+            # FIXME correct to directly use sceneAzimuthAngle (14.24 (DLR) vs. 101.1 (AlpineTest)
+            self.geom_view_azimuth = np.float(xml.find("specific/sceneAzimuthAngle/center").text)
+            self.geom_sun_zenith = 90 - np.float(xml.find("specific/sunElevationAngle/center").text)
+            self.geom_sun_azimuth = np.float(xml.find("specific/sunAzimuthAngle/center").text)
+            self.mu_sun = np.cos(np.deg2rad(self.geom_sun_zenith))
+        else:
+            # read the acquisition time
+            self.observation_datetime = \
+                datetime.strptime(xml.findall("GeneralInfo/ProductInfo/ProductStartTime")[0].text,
+                                  '%Y-%m-%dT%H:%M:%S.%fZ')
 
-        # get the distance earth sun from the acquisition date
-        self.earthSunDist = self.get_earth_sun_distance(self.observation_datetime)
+            # get the distance earth sun from the acquisition date
+            self.earthSunDist = self.get_earth_sun_distance(self.observation_datetime)
 
-        # read Geometry (observation/illumination) angle
-        self.geom_view_zenith = np.float(xml.findall("GeneralInfo/Geometry/Observation/ZenithAngle")[0].text)
-        self.geom_view_azimuth = np.float(xml.findall("GeneralInfo/Geometry/Observation/AzimuthAngle")[0].text)
-        self.geom_sun_zenith = np.float(xml.findall("GeneralInfo/Geometry/Illumination/ZenithAngle")[0].text)
-        self.geom_sun_azimuth = np.float(xml.findall("GeneralInfo/Geometry/Illumination/AzimuthAngle")[0].text)
-        self.mu_sun = np.cos(np.deg2rad(self.geom_sun_zenith))
+            # read Geometry (observation/illumination) angle
+            self.geom_view_zenith = np.float(xml.findall("GeneralInfo/Geometry/Observation/ZenithAngle")[0].text)
+            self.geom_view_azimuth = np.float(xml.findall("GeneralInfo/Geometry/Observation/AzimuthAngle")[0].text)
+            self.geom_sun_zenith = np.float(xml.findall("GeneralInfo/Geometry/Illumination/ZenithAngle")[0].text)
+            self.geom_sun_azimuth = np.float(xml.findall("GeneralInfo/Geometry/Illumination/AzimuthAngle")[0].text)
+            self.mu_sun = np.cos(np.deg2rad(self.geom_sun_zenith))
 
     def get_earth_sun_distance(self, acqDate: datetime):
         """Get earth sun distance (requires file of pre calculated earth sun distance per day)
@@ -350,20 +534,350 @@ class EnMAP_Metadata_L1B_SensorGeo(object):
 
         return float(EA_dist_dict[acqDate.strftime('%Y-%m-%d')])
 
-    def read_metadata(self, lon_lat_smpl):
+    def read_metadata(self):
         """
         Read the metadata of the entire EnMAP L1B product in sensor geometry
-        :param lon_lat_smpl:  number if sampling point in lon, lat fields
+
         :return: None
         """
 
         # first read common metadata
-        self.read_common_meta(self._path_xml)
+        self.read_common_meta(self.path_xml)
 
         # define and read the VNIR metadata
         self.vnir = EnMAP_Metadata_L1B_Detector_SensorGeo('VNIR', config=self.cfg, logger=self.logger)
-        self.vnir.read_metadata(self._path_xml, lon_lat_smpl)
+        self.vnir.read_metadata(self.path_xml)
 
         # define and read the SWIR metadata
         self.swir = EnMAP_Metadata_L1B_Detector_SensorGeo('SWIR', config=self.cfg, logger=self.logger)
-        self.swir.read_metadata(self._path_xml, lon_lat_smpl)
+        self.swir.read_metadata(self.path_xml)
+
+    def to_XML(self) -> str:
+        """
+        Generate an XML metadata string from the L1B metadata.
+        """
+        xml = ElementTree.parse(self.path_xml).getroot()
+
+        if self.cfg.is_dlr_dataformat:
+            for detName, detMeta in zip(['VNIR', 'SWIR'], [self.vnir, self.swir]):
+                lbl = L1B_product_props_DLR['xml_detector_label'][detName]
+                xml.find("product/image/%s/dimension/rows" % lbl).text = str(detMeta.nrows)
+                xml.find("product/image/%s/dimension/columns" % lbl).text = str(detMeta.ncols)
+                xml.find("product/quicklook/%s/dimension/rows" % lbl).text = str(detMeta.nrows)
+                xml.find("product/quicklook/%s/dimension/columns" % lbl).text = str(detMeta.ncols)
+
+        else:
+            for detName, detMeta in zip(['VNIR', 'SWIR'], [self.vnir, self.swir]):
+                lbl = L1B_product_props['xml_detector_label'][detName]
+                xml.find("ProductComponent/%s/Data/Size/NRows" % lbl).text = str(detMeta.nrows)
+                xml.find("ProductComponent/%s/Data/Type/UnitCode" % lbl).text = detMeta.unitcode
+                xml.find("ProductComponent/%s/Data/Type/Unit" % lbl).text = detMeta.unit
+
+        xml_string = ElementTree.tostring(xml, encoding='unicode', pretty_print=True)
+
+        return xml_string
+
+
+class EnMAP_Metadata_L2A_MapGeo(object):
+    def __init__(self,
+                 config: EnPTConfig,
+                 meta_l1b: EnMAP_Metadata_L1B_SensorGeo,
+                 dims_mapgeo: Tuple[int, int, int],
+                 logger=None):
+        """EnMAP Metadata class for the metadata of the complete EnMAP L2A product in map geometry incl. VNIR and SWIR.
+
+        :param config:              EnPT configuration object
+        :param meta_l1b:            metadata object of the L1B dataset in sensor geometry
+        :param dims_mapgeo:         dimensions of the EnMAP raster data in map geometry, e.g., (1024, 1000, 218)
+        :param logger:              instance of logging.logger or subclassed
+        """
+        self.cfg = config
+        self._meta_l1b = meta_l1b
+        self.logger = logger or logging.getLogger()
+
+        # defaults
+        self.band_means = None  # type: np.ndarray # band-wise means in unscaled values (percent in case of reflectance)
+        self.band_stds = None  # type: np.ndarray # band-wise standard deviations in unscaled values
+        self.fileinfos = []  # type: list # file informations for each file beloning to the EnMAP L2A product
+
+        self.proc_level = 'L2A'
+        self.observation_datetime = meta_l1b.observation_datetime  # type: datetime  # Date and Time of observation
+        # FIXME VZA may be negative in DLR data
+        self.geom_view_zenith = meta_l1b.geom_view_zenith  # type: float  # viewing zenith angle
+        self.geom_view_azimuth = meta_l1b.geom_view_azimuth  # type: float  # viewing azimuth angle
+        self.geom_sun_zenith = meta_l1b.geom_sun_zenith  # type: float  # sun zenith angle
+        self.geom_sun_azimuth = meta_l1b.geom_sun_azimuth  # type: float  # sun azimuth angle
+        self.mu_sun = meta_l1b.mu_sun  # type: float  # needed by SICOR for TOARad > TOARef conversion
+        self.earthSunDist = meta_l1b.earthSunDist  # type: float  # earth-sun distance
+
+        # generate file names for L2A output
+        if self.cfg.is_dlr_dataformat:
+            self.scene_basename = meta_l1b.vnir.data_filename.split('-SPECTRAL_IMAGE')[0].replace('L1B-', 'L2A-')
+        else:
+            self.scene_basename = os.path.splitext(meta_l1b.vnir.data_filename)[0]
+        self.data_filename = meta_l1b.vnir.data_filename.replace('L1B-', 'L2A-').replace('_VNIR', '')
+        self.dead_pixel_filename_vnir = meta_l1b.vnir.dead_pixel_filename.replace('L1B-', 'L2A-')
+        self.dead_pixel_filename_swir = meta_l1b.swir.dead_pixel_filename.replace('L1B-', 'L2A-')
+        self.quicklook_filename_vnir = meta_l1b.vnir.quicklook_filename.replace('L1B-', 'L2A-')
+        self.quicklook_filename_swir = meta_l1b.swir.quicklook_filename.replace('L1B-', 'L2A-')
+        self.cloud_mask_filename = meta_l1b.vnir.cloud_mask_filename.replace('L1B-', 'L2A-')
+        self.metaxml_filename = meta_l1b.metaxml_filename.replace('L1B-', 'L2A-')
+
+        # fuse band-wise metadata (sort all band-wise metadata by wavelengths but band number keeps as it is)
+        # get band index order
+        wvls_vnir_plus_swir = np.hstack([self._meta_l1b.vnir.wvl_center, self._meta_l1b.swir.wvl_center])
+        wvls_sorted = np.array(sorted(wvls_vnir_plus_swir))
+        bandidx_order = np.array([np.argmin(np.abs(wvls_vnir_plus_swir - cwl)) for cwl in wvls_sorted])
+
+        self.wvl_center = np.hstack([meta_l1b.vnir.wvl_center, meta_l1b.swir.wvl_center])[bandidx_order]
+        self.fwhm = np.hstack([meta_l1b.vnir.fwhm, meta_l1b.swir.fwhm])[bandidx_order]
+        self.gains = np.full((dims_mapgeo[2],), 100)  # implies reflectance scaled between 0 and 10000
+        self.offsets = np.zeros((dims_mapgeo[2],))
+        self.srf = SRF.from_cwl_fwhm(self.wvl_center, self.fwhm)
+        self.solar_irrad = np.hstack([meta_l1b.vnir.solar_irrad, meta_l1b.swir.solar_irrad])[bandidx_order]
+
+        if not meta_l1b.vnir.nsmile_coef == meta_l1b.swir.nsmile_coef:
+            raise ValueError('Expected equal number of smile coefficients for VNIR and SWIR. Received %d/%s.'
+                             % (meta_l1b.vnir.nsmile_coef, meta_l1b.swir.nsmile_coef))
+
+        self.nsmile_coef = meta_l1b.vnir.nsmile_coef
+        self.smile_coef = np.vstack([meta_l1b.vnir.smile_coef, meta_l1b.swir.smile_coef])[bandidx_order, :]
+        self.smile = np.hstack([meta_l1b.vnir.smile, meta_l1b.swir.smile])[:, bandidx_order]
+
+        if self.cfg.is_dlr_dataformat:
+            self.rpc_coeffs = OrderedDict(zip(
+                ['band_%d' % (i + 1) for i in range(dims_mapgeo[2])],
+                [meta_l1b.vnir.rpc_coeffs['band_%d' % (i + 1)] if 'band_%d' % (i + 1) in meta_l1b.vnir.rpc_coeffs else
+                 meta_l1b.swir.rpc_coeffs['band_%d' % (i + 1)] for i in bandidx_order]))
+        else:
+            self.rpc_coeffs = OrderedDict()
+
+        self.nrows = dims_mapgeo[0]
+        self.ncols = dims_mapgeo[1]
+        self.nwvl = dims_mapgeo[2]
+        common_UL_UR_LL_LR = self.get_common_UL_UR_LL_LR()
+        self.lon_UL_UR_LL_LR = [lon for lon, lat in common_UL_UR_LL_LR]
+        self.lat_UL_UR_LL_LR = [lat for lon, lat in common_UL_UR_LL_LR]
+        self.ll_mapPoly = get_footprint_polygon(tuple(zip(self.lon_UL_UR_LL_LR,
+                                                          self.lat_UL_UR_LL_LR)), fix_invalid=True)
+
+        if meta_l1b.vnir.unit != meta_l1b.swir.unit or meta_l1b.vnir.unitcode != meta_l1b.swir.unitcode:
+            raise RuntimeError('L2A data should have the same radiometric unit for VNIR and SWIR. '
+                               'Received %s in %s for VNIR and %s in %s for SWIR.'
+                               % (meta_l1b.vnir.unitcode, meta_l1b.vnir.unit,
+                                  meta_l1b.swir.unitcode, meta_l1b.swir.unit))
+
+        self.unit = meta_l1b.vnir.unit
+        self.unitcode = meta_l1b.vnir.unitcode
+        self.preview_bands_vnir = meta_l1b.vnir.preview_bands
+        self.preview_bands_swir = meta_l1b.swir.preview_bands
+
+        self.snr = None
+        if meta_l1b.vnir.snr is not None:
+            assert meta_l1b.swir.snr is not None
+            self.snr = np.dstack([meta_l1b.vnir.snr, meta_l1b.swir.snr])[:, :, bandidx_order]
+
+    def get_common_UL_UR_LL_LR(self):
+        vnir_ulx, vnir_urx, vnir_llx, vnir_lrx = self._meta_l1b.vnir.lon_UL_UR_LL_LR
+        vnir_uly, vnir_ury, vnir_lly, vnir_lry = self._meta_l1b.vnir.lat_UL_UR_LL_LR
+        swir_ulx, swir_urx, swir_llx, swir_lrx = self._meta_l1b.swir.lon_UL_UR_LL_LR
+        swir_uly, swir_ury, swir_lly, swir_lry = self._meta_l1b.swir.lat_UL_UR_LL_LR
+
+        # use OUTER coordinates
+        return ((min([vnir_ulx, swir_ulx]), max([vnir_uly, swir_uly])),
+                (max([vnir_urx, swir_urx]), max([vnir_ury, swir_ury])),
+                (min([vnir_llx, swir_llx]), min([vnir_lly, swir_lly])),
+                (max([vnir_lrx, swir_lrx]), min([vnir_lry, swir_lry])))
+
+    def add_band_statistics(self, datastack_vnir_swir: Union[np.ndarray, GeoArray]):
+        R, C, B = datastack_vnir_swir.shape
+        # NOTE:  DEVIDE by gains to reflectance in percent
+        self.band_means = np.mean(datastack_vnir_swir.reshape(1, R * C, B), axis=1) / self.gains
+        self.band_stds = np.mean(datastack_vnir_swir.reshape(1, R * C, B), axis=1) / self.gains
+
+    def add_product_fileinformation(self, filepaths: List[str], sizes: List[int] = None, versions: List[str] = None):
+        self.fileinfos = []
+
+        for i, fp in enumerate(filepaths):
+            ismeta = fp.endswith('METADATA.XML') or fp.endswith('_header.xml')  # FIXME
+            if not os.path.exists(fp):
+                if ismeta:
+                    pass  # does not yet exist
+                else:
+                    raise FileNotFoundError(fp)
+
+            ext = os.path.splitext(fp)[1]
+            fileinfo_dict = dict(
+                name=os.path.basename(fp),
+                size=sizes[i] if sizes else int(os.path.getsize(fp) / 1024) if not ismeta else '',
+                version=versions[i] if versions else '',
+                format='binary' if ext in ['.GEOTIFF',
+                                           '.BSQ',
+                                           '.BIL',
+                                           '.BIP',
+                                           '.JPEG2000'] else 'xml' if ext == '.XML' else 'NA'
+            )
+
+            self.fileinfos.append(fileinfo_dict)
+
+    def to_XML(self) -> str:
+        """
+        Generate an XML metadata string from the L2A metadata.
+        """
+        # use an XML parser that creates properly indented XML files even if new SubElements have been added
+        parser = ElementTree.XMLParser(remove_blank_text=True)
+
+        # parse (use L1B metadata as template)
+        xml = ElementTree.parse(self._meta_l1b.path_xml, parser).getroot()
+
+        if not self.cfg.is_dlr_dataformat:
+            self.logger.warning('No XML metadata conversion implemented for datasets different to the DLR format.'
+                                'Metadata XML file will be empty.')
+            return ''
+
+        self.logger.warning('Currently, the L2A metadata XML file does not contain all relevant keys and contains '
+                            'not updated values!')  # FIXME
+
+        ############
+        # metadata #
+        ############
+
+        xml.find("metadata/schema/processingLevel").text = self.proc_level
+        xml.find("metadata/name").text = self.metaxml_filename
+        # xml.find("metadata/comment").text = 'EnMAP Level 0 Product of datatake 987'  # FIXME hardcoded
+
+        ##############
+        # processing #
+        ##############
+
+        xml.find("processing/terrainCorrection").text = 'Yes'  # FIXME hardcoded {Yes, No}
+        xml.find("processing/ozoneValue").text = 'NA'  # FIXME {[200-500], NA}
+        xml.find("processing/season").text = 'NA'  # FIXME {summer, winter, NA}
+        xml.find("processing/productFormat").text = 'GeoTIFF+Metadata'  # FIXME hardcoded
+        # {BSQ+Metadata, BIL+Metadata, BIP+Metadata, JPEG2000+Metadata, GeoTiff+Metadata}
+        xml.find("processing/mapProjection").text = 'UTM_Zone_of_Scene_Center'  # FIXME hardcoded
+        # {UTM_Zone_of_Scene_Center, UTM_Zone_of_Scene_Center(-1), UTM_Zone_of_Scene_Center(+1),
+        #  UTM_Zone_of_Datatake_Center, Geographic, European_Projection_LAEA, NA}
+        xml.find("processing/DEMDBVersion").text = 'SRTM-C_v4'  # FIXME hardcoded
+        # {SRTM-C-X_vv.rr, best-of-DEM_vv.rr, DEM-derivedfrom-Tandem-X_vv.rr, ASTER-GDEM_vv.rr, NA}
+        xml.find("processing/correctionType").text = 'NA'  # FIXME hardcoded {Combined, Land_Mode, Water_Mode, NA}
+        xml.find("processing/cirrusHazeRemoval").text = 'NA'  # FIXME hardcoded {Yes, No}
+        xml.find("processing/bandInterpolation").text = 'NA'  # FIXME hardcoded {Yes, No}
+        xml.find("processing/waterType").text = 'NA'  # FIXME hardcoded {Clear, Turbid, Highly_Turbid, NA}
+
+        ########
+        # base #
+        ########
+
+        # TODO update corner coordinates? DLR just uses the same coordinates like in L1B
+        # xml.find("base/spatialCoverage" % lbl).text =
+        xml.find("base/format").text = 'ENMAP_%s' % self.proc_level
+        xml.find("base/level").text = self.proc_level
+        xml.find("base/size").text = 'NA'  # FIXME Size of product. Attribute unit {byte, Kbyte, Mbyte, Gbyte}
+
+        ############
+        # specific #
+        ############
+
+        xml.find("specific/code").text = self.proc_level
+        bi = "specific/bandCharacterisation/bandID/"
+        for ele, gain in zip(xml.findall(bi + "GainOfBand"), self.gains):
+            ele.text = str(gain)
+        for ele, offset in zip(xml.findall(bi + "OffsetOfBand"), self.offsets):
+            ele.text = str(offset)
+
+        ###########
+        # product #
+        ###########
+
+        if not self.fileinfos:
+            raise ValueError('Product file informations must be added before writing metadata. '
+                             'Call add_product_fileinformation() before!')
+
+        for detName, detMetaL1B in zip(['VNIR', 'SWIR'], [self._meta_l1b.vnir, self._meta_l1b.swir]):
+            lbl = L2A_product_props_DLR['xml_detector_label'][detName]
+            # FIXME DLR uses L0 filenames for VNIR/SWIR separately?!
+            xml.find("product/image/%s/name" % lbl).text = detMetaL1B.data_filename
+            # FIXME this is the size of the VNIR/SWIR stack
+            size = [F['size'] for F in self.fileinfos if os.path.splitext(F['name'])[0].endswith('-SPECTRAL_IMAGE')][0]
+            xml.find("product/image/%s/size" % lbl).text = str(size)
+            # FIXME DLR data dimensions equal either L2A data nor L1B data
+            xml.find("product/image/%s/channels" % lbl).text = str(detMetaL1B.nwvl)
+            xml.find("product/image/%s/dimension/rows" % lbl).text = str(self.nrows)
+            xml.find("product/image/%s/dimension/columns" % lbl).text = str(self.ncols)
+            # xml.find("product/image/%s/dimension/dimensionGeographic/longitude" % lbl).text = 'NA'  # TODO
+            # xml.find("product/image/%s/dimension/dimensionGeographic/latitude" % lbl).text = 'NA'
+
+            fN_quicklook = self.quicklook_filename_vnir if detName == 'VNIR' else self.quicklook_filename_swir
+            size_quicklook = [F['size'] for F in self.fileinfos
+                              if os.path.splitext(F['name'])[0].endswith('-QL_%s' % detName)][0]
+            xml.find("product/quicklook/%s/name" % lbl).text = fN_quicklook
+            xml.find("product/quicklook/%s/size" % lbl).text = str(size_quicklook)
+            xml.find("product/quicklook/%s/dimension/rows" % lbl).text = str(self.nrows)
+            xml.find("product/quicklook/%s/dimension/columns" % lbl).text = str(self.ncols)
+            # xml.find("product/quicklook/%s/dimension/dimensionGeographic/longitude" % lbl).text = 'NA'
+            # xml.find("product/quicklook/%s/dimension/dimensionGeographic/latitude" % lbl).text = 'NA'
+
+        # productFileInformation
+        ########################
+
+        # get L1B product file information
+        l1b_fileinfos = xmlSubtree2dict(xml, 'product/productFileInformation/')
+
+        # clear old L1B file information in XML
+        pFI_root = xml.findall('product/productFileInformation')[0]
+        pFI_root.clear()
+
+        # recreate sub-elements for productFileInformation according to L2A file information
+        for i, fileInfo in enumerate(self.fileinfos):
+            fn_l1b_exp = fileInfo['name'].replace('L2A', '*').replace('-SPECTRAL_IMAGE', '-SPECTRAL_IMAGE_VNIR')
+            l1b_fileInfo = [fI for fI in l1b_fileinfos.values() if fnmatch.fnmatch(fI['name'], fn_l1b_exp)]
+
+            if l1b_fileInfo:
+                # TODO update file size of METADATA.XML (has not been written yet)
+                fileInfo['size'] = fileInfo['size'] or l1b_fileInfo[0]['size']
+                fileInfo['version'] = fileInfo['version'] or l1b_fileInfo[0]['version']
+            else:
+                # FIXME if no L1B equivalent is found for the file to be written, the file version will be empty ('')
+                pass
+
+            sub = ElementTree.SubElement(pFI_root, 'file', number=str(i))
+
+            for k, kw in zip(['name', 'size', 'version', 'format'], [{}, {'unit': 'kbytes'}, {}, {}]):
+                ele = ElementTree.SubElement(sub, k, **kw)
+                ele.text = str(fileInfo[k])
+
+        # TODO update product/ortho/projection
+        #      {UTM_ZoneX_North, UTM_ZoneX_South (where X in {1..60}), Geographic, LAEA-ETRS89, NA}
+        xml.find('product/ortho/resolution').text = str(enmap_xres)
+        xml.find('product/ortho/resampling').text = self.cfg.ortho_resampAlg
+
+        # band statistics
+        #################
+
+        if self.band_means is None or self.band_stds is None:
+            raise ValueError('Band statistics have not yet been computed. Compute them first by calling '
+                             'add_band_statistics()!')
+
+        bs = "specific/bandStatistics/bandID/"
+        for ele, mean in zip(xml.findall(bs + "meanReflectance"), self.band_means):
+            ele.text = str(mean)
+        for ele, std in zip(xml.findall(bs + "stdDeviation"), self.band_stds):
+            ele.text = str(std)
+
+        xml_string = ElementTree.tostring(xml, encoding='unicode', pretty_print=True)
+
+        return xml_string
+
+
+def xmlSubtree2dict(xml_root, path_subtree) -> OrderedDict:
+    outDict = OrderedDict()
+    allEle = xml_root.findall(path_subtree)
+
+    for ele in allEle:
+        eleKey = '%s_%s' % (ele.tag, ele.get('number'))
+        outDict[eleKey] = dict()
+        for subele in ele:
+            outDict[eleKey][subele.tag] = subele.text
+
+    return outDict
diff --git a/enpt/model/srf.py b/enpt/model/srf.py
index d9b47c9330c61f393268b955a11342e66d33ad5d..038fd1869abd4864215c817c5dfb599f9d0358f3 100644
--- a/enpt/model/srf.py
+++ b/enpt/model/srf.py
@@ -9,8 +9,8 @@ from scipy import stats
 
 
 class SRF(object):
-    def __init__(self, wvl_unit: str='nanometers', wvl_min: float=400, wvl_max: float=2500, specres_nm: float=1,
-                 format_bandnames: bool=False, v: bool=False):
+    def __init__(self, wvl_unit: str = 'nanometers', wvl_min: float = 400, wvl_max: float = 2500, specres_nm: float = 1,
+                 format_bandnames: bool = False, v: bool = False):
         """SRF instance provides SRF functions, wavelength positions, etc..
 
         :param wvl_unit:            the wavelengths unit to be used within SRF instance ('nanometers' or 'micrometers)
@@ -38,7 +38,7 @@ class SRF(object):
 
     @staticmethod
     def compute_gaussian_srf(cwl: float, fwhm: float, wvl_min: float, wvl_max: float, wvl_res: float,
-                             normalize: bool=True) -> np.ndarray:
+                             normalize: bool = True) -> np.ndarray:
         """Compute a spectral response function based on center wavelength and band width using on a gaussian curve.
 
         :param cwl:         target center wavelength position
@@ -118,7 +118,7 @@ class SRF(object):
         for band in self.bands:
             yield self[band]
 
-    def plot_srfs(self, figsize: tuple=(15, 5), band: Union[str, List[str]]=None, normalize: bool=True):
+    def plot_srfs(self, figsize: tuple = (15, 5), band: Union[str, List[str]] = None, normalize: bool = True):
         """Show a plot of all spectral response functions.
 
         :param figsize: figure size of the plot
diff --git a/enpt/options/config.py b/enpt/options/config.py
index dcb06d6ca39f472c79c8d544a51badef8b627ad7..14f0f8f8e6f26dfaca24c8fdb959bc5a889213dc 100644
--- a/enpt/options/config.py
+++ b/enpt/options/config.py
@@ -43,11 +43,37 @@ config_for_testing = dict(
     log_level='DEBUG',
     output_dir=os.path.join(path_enptlib,  '..', 'tests', 'data', 'test_outputs'),
     n_lines_to_append=50,
-    disable_progress_bars=True
+    disable_progress_bars=True,
+    is_dlr_dataformat=False,
+    enable_ac=False
 )
 
+
+config_for_testing_dlr = dict(
+    path_l1b_enmap_image=os.path.abspath(
+        os.path.join(path_enptlib, '..', 'tests', 'data', 'EnMAP_Level_1B',
+                     'ENMAP01-____L1B-DT000000987_20130205T105307Z_001_V000003_20181214T160003Z__'
+                     'rows0-99.zip')),
+    path_l1b_enmap_image_gapfill=os.path.abspath(
+        os.path.join(path_enptlib, '..', 'tests', 'data', 'EnMAP_Level_1B',
+                     'ENMAP01-____L1B-DT000000987_20130205T105307Z_001_V000003_20181214T160003Z__'
+                     'rows100-199.zip')),
+    path_dem=os.path.abspath(
+        os.path.join(path_enptlib, '..', 'tests', 'data', 'DLR_L2A_DEM_UTM32.bsq')),
+    log_level='DEBUG',
+    output_dir=os.path.join(path_enptlib,  '..', 'tests', 'data', 'test_outputs'),
+    n_lines_to_append=50,
+    disable_progress_bars=True,
+    is_dlr_dataformat=True,
+    enable_ac=False,
+    ortho_resampAlg='gauss'
+)
+
+
 enmap_coordinate_grid = dict(x=np.array([0, 30]),
                              y=np.array([0, 30]))
+enmap_xres, enmap_yres = np.ptp(enmap_coordinate_grid['x']), np.ptp(enmap_coordinate_grid['y'])
+assert enmap_xres == enmap_yres, 'Unequal X/Y resolution of the output grid!'
 
 
 class EnPTConfig(object):
@@ -79,6 +105,10 @@ class EnPTConfig(object):
         # general options #
         ###################
 
+        try:
+            self.is_dlr_dataformat = gp('is_dlr_dataformat')
+        except:  # noqa E722  # FIXME
+            self.is_dlr_dataformat = False
         self.CPUs = gp('CPUs', fallback=cpu_count())
         self.log_level = gp('log_level')
         self.create_logfile = gp('create_logfile')
@@ -112,6 +142,7 @@ class EnPTConfig(object):
         self.path_reference_image = gp('path_reference_image')
 
         # atmospheric_correction
+        self.enable_ac = gp('enable_ac')
         self.sicor_cache_dir = gp('sicor_cache_dir', fallback=sicor.__path__[0])
         self.auto_download_ecmwf = gp('auto_download_ecmwf')
         self.enable_cloud_screening = gp('enable_cloud_screening')
@@ -123,7 +154,8 @@ class EnPTConfig(object):
         # dead_pixel
         self.run_deadpix_P = gp('run_deadpix_P')
         self.deadpix_P_algorithm = gp('deadpix_P_algorithm')
-        self.deadpix_P_interp = gp('deadpix_P_interp')
+        self.deadpix_P_interp_spectral = gp('deadpix_P_interp_spectral')
+        self.deadpix_P_interp_spatial = gp('deadpix_P_interp_spatial')
 
         # orthorectification
         self.ortho_resampAlg = gp('ortho_resampAlg')
diff --git a/enpt/options/options_default.json b/enpt/options/options_default.json
index 81316a0bb66cb6641c4a8de459ceae412a80e477..474a2792897b124fc47a3dfad06fe481c736b12a 100644
--- a/enpt/options/options_default.json
+++ b/enpt/options/options_default.json
@@ -36,6 +36,8 @@
         },
 
         "atmospheric_correction": {
+            "enable_ac": true,  /*enable atmospheric correction using SICOR algorithm (default: True). If False, the L2A
+                                  output contains top-of-atmosphere reflectance.*/
             "sicor_cache_dir": "",  /*directory to be used to stored sicor cache files
                                       NOTE: SICOR stores intermediate results there that need to computed only once
                                             for atmospheric correction of multiple EnMAP images. (default: 'auto')*/
@@ -52,13 +54,15 @@
             "run_processor": true,
             "algorithm": "spectral",  /*algorithm how to correct dead pixels
                                         'spectral': interpolate in the spectral domain
-                                        'spatial':  interpolate in the spatial domain
-                                        NOTE: currently, only the 'spectral' algorithm is implemented*/
-            "interpolation_method": "linear"  /*interpolation algorithm ('linear', 'bilinear', 'cubic', 'spline')*/
+                                        'spatial':  interpolate in the spatial domain*/
+            "interp_method_spectral": "linear",  /*spectral interpolation algorithm
+                                                   ('linear', 'nearest', 'zero', 'slinear', 'quadratic', 'cubic', ...)*/
+            "interp_method_spatial": "linear"  /*spatial interpolation algorithm
+                                                 ('linear', 'nearest', 'zero', 'slinear', 'quadratic', 'cubic', ...)*/
         },
 
         "orthorectification": {
-            "resamp_alg": 1  /*resampling algorithm TODO*/
+            "resamp_alg": "bilinear"  /*Ortho-rectification resampling algorithm ('nearest', 'bilinear', 'gauss')*/
         }
     }
 
diff --git a/enpt/options/options_schema.py b/enpt/options/options_schema.py
index fa2b184aee6a3e29cc29b9b9fea3755ffad7211a..f10af6d29c5c5d3fc98e96a094b1e63f0bca7afb 100644
--- a/enpt/options/options_schema.py
+++ b/enpt/options/options_schema.py
@@ -45,6 +45,7 @@ enpt_schema_input = dict(
             atmospheric_correction=dict(
                 type='dict', required=False,
                 schema=dict(
+                    enable_ac=dict(type='boolean', required=False),
                     sicor_cache_dir=dict(type='string', required=False),
                     auto_download_ecmwf=dict(type='boolean', required=False),
                     enable_cloud_screening=dict(type='boolean', required=False),
@@ -61,14 +62,16 @@ enpt_schema_input = dict(
                 schema=dict(
                     run_processor=dict(type='boolean', required=False),
                     algorithm=dict(type='string', required=False, allowed=['spectral', 'spatial']),
-                    interpolation_method=dict(type='string', required=False,
-                                              allowed=['linear', 'bilinear', 'cubic', 'spline']),
+                    interp_method_spectral=dict(type='string', required=False,
+                                                allowed=['linear', 'nearest', 'zero', 'slinear', 'quadratic', 'cubic']),
+                    interp_method_spatial=dict(type='string', required=False,
+                                               allowed=['linear', 'nearest', 'zero', 'slinear', 'quadratic', 'cubic']),
                 )),
 
             orthorectification=dict(
                 type='dict', required=False,
                 schema=dict(
-                    resamp_alg=dict(type='integer', required=False),
+                    resamp_alg=dict(type='string', required=False, allowed=['nearest', 'bilinear', 'gauss']),
                 ))
         ))
 )
@@ -102,6 +105,7 @@ parameter_mapping = dict(
     path_reference_image=('processors', 'geometry', 'path_reference_image'),
 
     # processors > atmospheric_correction
+    enable_ac=('processors', 'atmospheric_correction', 'enable_ac'),
     sicor_cache_dir=('processors', 'atmospheric_correction', 'sicor_cache_dir'),
     auto_download_ecmwf=('processors', 'atmospheric_correction', 'auto_download_ecmwf'),
     enable_cloud_screening=('processors', 'atmospheric_correction', 'enable_cloud_screening'),
@@ -113,7 +117,8 @@ parameter_mapping = dict(
     # processors > dead_pixel
     run_deadpix_P=('processors', 'dead_pixel', 'run_processor'),
     deadpix_P_algorithm=('processors', 'dead_pixel', 'algorithm'),
-    deadpix_P_interp=('processors', 'dead_pixel', 'interpolation_method'),
+    deadpix_P_interp_spectral=('processors', 'dead_pixel', 'interp_method_spectral'),
+    deadpix_P_interp_spatial=('processors', 'dead_pixel', 'interp_method_spatial'),
 
     # processors > orthorectification
     ortho_resampAlg=('processors', 'orthorectification', 'resamp_alg'),
diff --git a/enpt/processors/atmospheric_correction/atmospheric_correction.py b/enpt/processors/atmospheric_correction/atmospheric_correction.py
index 8872b4b1366d4b9bba581f0c117d89f9befccb30..541ef6e49c3a2a7006252de0fc2f0912b8206178 100644
--- a/enpt/processors/atmospheric_correction/atmospheric_correction.py
+++ b/enpt/processors/atmospheric_correction/atmospheric_correction.py
@@ -51,8 +51,8 @@ class AtmosphericCorrector(object):
         # run AC
         enmap_ImageL1.logger.info("Starting atmospheric correction for VNIR and SWIR detector. "
                                   "Source radiometric unit code is '%s'." % enmap_ImageL1.meta.vnir.unitcode)
-        enmap_l2a_sens_geo, state, cwv_map, ch4_map = sicor_ac_enmap(enmap_l1b=enmap_ImageL1, options=options,
-                                                                     logger=enmap_ImageL1.logger)
+        enmap_l2a_sens_geo, state = sicor_ac_enmap(enmap_l1b=enmap_ImageL1, options=options,
+                                                   logger=enmap_ImageL1.logger)
 
         # join results
         enmap_ImageL1.logger.info('Joining results of atmospheric correction.')
@@ -68,4 +68,6 @@ class AtmosphericCorrector(object):
             in_detector.detector_meta.unit = '0-%d' % self.cfg.scale_factor_boa_ref
             in_detector.detector_meta.unitcode = 'BOARef'
 
+            # FIXME what about mask_clouds, mask_clouds_confidence, ac_errors?
+
         return enmap_ImageL1
diff --git a/enpt/processors/dead_pixel_correction/__init__.py b/enpt/processors/dead_pixel_correction/__init__.py
index c1548c077158a592a85e23d212b5f0f75a8c1e99..09302d13f7d681d4e9dfbe53f10eb16e34fbaa78 100644
--- a/enpt/processors/dead_pixel_correction/__init__.py
+++ b/enpt/processors/dead_pixel_correction/__init__.py
@@ -1,6 +1,4 @@
 # -*- coding: utf-8 -*-
 """EnPT 'dead pixel correction' module for detecting and correcting dead pixels."""
 
-from .dead_pixel_correction import Dead_Pixel_Corrector
-
-__all__ = ['Dead_Pixel_Corrector']
+from .dead_pixel_correction import *  # noqa: F401,F403  # flake8 unable to detect undefined names
diff --git a/enpt/processors/dead_pixel_correction/dead_pixel_correction.py b/enpt/processors/dead_pixel_correction/dead_pixel_correction.py
index 8bed067e9eb14bcc625c07d2b9d69e6bf4e2ab12..3dae81667e3ca5568554f239e43d823e8b98d025 100644
--- a/enpt/processors/dead_pixel_correction/dead_pixel_correction.py
+++ b/enpt/processors/dead_pixel_correction/dead_pixel_correction.py
@@ -4,130 +4,321 @@
 Performs the Dead Pixel Correction using a linear interpolation in spectral dimension.
 """
 from typing import Union
+from numbers import Number  # noqa: F401
 import logging
-from tqdm import tqdm
 
 import numpy as np
-from scipy.interpolate import griddata
+import numpy_indexed as npi
+from multiprocessing import Pool, cpu_count
+from scipy.interpolate import griddata, interp1d
+from pandas import DataFrame
 from geoarray import GeoArray
 
 
 class Dead_Pixel_Corrector(object):
-    def __init__(self, algorithm: str = 'spectral', interp: str = 'linear', filter_halfwidth: int = 1,
-                 logger: logging.Logger = None):
+    def __init__(self, algorithm: str = 'spectral', interp_spectral: str = 'linear', interp_spatial: str = 'linear',
+                 CPUs: int = None, logger: logging.Logger = None):
         """Get an instance of Dead_Pixel_Corrector.
 
         :param algorithm:           algorithm how to correct dead pixels
                                     'spectral': interpolate in the spectral domain
                                     'spatial':  interpolate in the spatial domain
-        :param interp:              interpolation algorithm ('linear', 'bilinear', 'cubic', 'spline')
-        :param filter_halfwidth:    half width of interpolation filter
-                                    (determines the number of adjacant pixels to be respected in interpation)
+        :param interp_spectral:     spectral interpolation algorithm
+                                    (‘linear’, ‘nearest’, ‘zero’, ‘slinear’, ‘quadratic’, ‘cubic’, etc.)
+        :param interp_spatial:      spatial interpolation algorithm ('linear', 'bilinear', 'cubic', 'spline')
+        :param CPUs:                number of CPUs to use for interpolation (only relevant if algorithm = 'spatial')
         :param logger:
         """
         self.algorithm = algorithm
-        self.interp_alg = interp
-        self.fhw = filter_halfwidth
+        self.interp_alg_spectral = interp_spectral
+        self.interp_alg_spatial = interp_spatial
+        self.CPUs = CPUs or cpu_count()
         self.logger = logger or logging.getLogger()
 
     @staticmethod
-    def _validate_inputs(image2correct: GeoArray, deadcolumn_map: GeoArray):
-        if (image2correct.bands, image2correct.columns) != deadcolumn_map.shape:
-            raise ValueError('The given image to be corrected (shape: %s) requires a dead column map with shape '
-                             '(%s, %s). Received %s.'
-                             % (image2correct.shape, image2correct.bands, image2correct.columns, deadcolumn_map.shape))
-
-    def correct(self, image2correct: Union[np.ndarray, GeoArray], deadcolumn_map: Union[np.ndarray, GeoArray],
-                progress=False):
-        """
+    def _validate_inputs(image2correct: GeoArray, deadpixel_map: GeoArray):
+        if deadpixel_map.ndim == 2:
+            if (image2correct.bands, image2correct.columns) != deadpixel_map.shape:
+                raise ValueError('The given image to be corrected (shape: %s) requires a dead column map with shape '
+                                 '(%s, %s). Received %s.'
+                                 % (image2correct.shape, image2correct.bands,
+                                    image2correct.columns, deadpixel_map.shape))
+        elif deadpixel_map.ndim == 3:
+            if image2correct.shape != deadpixel_map.shape:
+                raise ValueError('The given image to be corrected (shape: %s) requires a dead pixel map with equal '
+                                 'shape. Received %s.' % (image2correct.shape, deadpixel_map.shape))
+        else:
+            raise ValueError('Unexpected number of dimensions of dead column map.')
+
+    def _interpolate_nodata_spectrally(self, image2correct: GeoArray, deadpixel_map: GeoArray):
+        assert deadpixel_map.ndim == 3, "3D dead pixel map expected."
+        if deadpixel_map.shape != image2correct.shape:
+            raise ValueError("Dead pixel map and image to be corrected must have equal shape.")
+
+        image_corrected = interp_nodata_along_axis(image2correct, axis=2, nodata=deadpixel_map[:],
+                                                   method=self.interp_alg_spectral, fill_value='extrapolate')
+
+        return image_corrected
+
+    def _interpolate_nodata_spatially(self, image2correct: GeoArray, deadpixel_map: GeoArray):
+        assert deadpixel_map.ndim == 3, "3D dead pixel map expected."
+        if deadpixel_map.shape != image2correct.shape:
+            raise ValueError("Dead pixel map and image to be corrected must have equal shape.")
 
-        NOTE: dead columns in the first or the last band are unmodified.
+        band_indices_with_nodata = np.argwhere(np.any(np.any(deadpixel_map, axis=0), axis=0)).flatten()
+        image_sub = image2correct[:, :, band_indices_with_nodata]
+        deadpixel_map_sub = deadpixel_map[:, :, band_indices_with_nodata]
 
-        :param image2correct:
-        :param deadcolumn_map:
-        :param progress:        whether to show progress bars
-        :return:
+        kw = dict(method=self.interp_alg_spatial, fill_value=np.nan, implementation='pandas', CPUs=self.CPUs)
+
+        # correct dead columns
+        image_sub_interp = interp_nodata_spatially_3d(image_sub, axis=1, nodata=deadpixel_map_sub, **kw)
+
+        # correct dead rows
+        if np.isnan(image_sub_interp).any():
+            image_sub_interp = interp_nodata_spatially_3d(image_sub_interp, axis=0,
+                                                          nodata=np.isnan(image_sub_interp), **kw)
+
+        image2correct[:, :, band_indices_with_nodata] = image_sub_interp
+
+        # correct remaining nodata by spectral interpolation (e.g., outermost columns)
+        if np.isnan(image2correct).any():
+            image2correct = interp_nodata_along_axis(image2correct, axis=2, nodata=np.isnan(image2correct),
+                                                     method=self.interp_alg_spectral, fill_value='extrapolate')
+
+        return image2correct
+
+    def correct(self, image2correct: Union[np.ndarray, GeoArray], deadpixel_map: Union[np.ndarray, GeoArray]):
+        """Run the dead pixel correction.
+
+        :param image2correct:   image to correct
+        :param deadpixel_map:   dead pixel map (2D (bands x columns) or 3D (rows x columns x bands)
+        :return:    corrected image
         """
         image2correct = GeoArray(image2correct) if not isinstance(image2correct, GeoArray) else image2correct
-        deadcolumn_map = GeoArray(deadcolumn_map) if not isinstance(deadcolumn_map, GeoArray) else deadcolumn_map
 
-        # validate
-        self._validate_inputs(image2correct, deadcolumn_map)
+        self._validate_inputs(image2correct, deadpixel_map)
+
+        if 1 in list(np.unique(deadpixel_map)):
+            if deadpixel_map.ndim == 2:
+                deadcolumn_map = deadpixel_map
+
+                # compute dead pixel percentage
+                prop_dp_anyband = \
+                    np.any(deadcolumn_map, axis=0).sum() * image2correct.shape[0] / np.dot(*image2correct.shape[:2])
+                prop_dp = deadcolumn_map.sum() * image2correct.shape[0] / image2correct.size
+
+                # convert 2D deadcolumn_map to 3D deadpixel_map
+                B, C = deadcolumn_map.shape
+                deadpixel_map = np.empty((image2correct.shape[0], C, B), np.bool)
+                deadpixel_map[:, :, :] = deadcolumn_map.T
+
+            else:
+                # compute dead pixel percentage
+                prop_dp_anyband = np.any(deadpixel_map, axis=2).sum() / np.dot(*image2correct.shape[:2])
+                prop_dp = deadpixel_map.sum() / image2correct.size
+
+            self.logger.info('Percentage of defective pixels: %.2f' % (prop_dp * 100))
+            self.logger.debug('Percentage of pixel with a defect in any band: %.2f' % (prop_dp_anyband * 100))
+
+            # run correction
+            if self.algorithm == 'spectral':
+                return self._interpolate_nodata_spectrally(image2correct, deadpixel_map)
+            else:
+                return self._interpolate_nodata_spatially(image2correct, deadpixel_map)
+
+        else:
+            self.logger.info("Dead pixel correction skipped because dead pixel mask labels no pixels as 'defective'.")
+            return image2correct
+
 
-        #################
-        # Interpolation #
-        #################
+def _get_baddata_mask(data: np.ndarray, nodata: Union[np.ndarray, Number] = np.nan, transpose_inNodata: bool = False):
+    if isinstance(nodata, np.ndarray):
+        badmask = nodata.T if transpose_inNodata else nodata
 
-        if self.algorithm == 'spectral':
-            # set bands where no spectral interpolation is possible -> spatial interpolation
-            band_nbrs_spatial_interp = \
-                list(range(self.fhw)) + list(range(image2correct.bands-1, image2correct.bands-self.fhw-1, -1))
+        if badmask.shape != data.shape:
+            raise ValueError('No-data mask and data must have the same shape.')
 
-            # spatial interpolation (fallback) #
-            ####################################
+    else:
+        badmask = ~np.isfinite(data) if ~np.isfinite(nodata) else data == nodata
 
-            # NOTE: this is done first, because spectral interpolation needs the information on the outermost pixels
+    return badmask
 
-            for band, column in np.argwhere(deadcolumn_map):
 
-                # only first or last bands (number depends on filter half width)
-                if band in band_nbrs_spatial_interp:
-                    if column in [0, image2correct.shape[1] - 1]:
-                        # currently, dead pixels in the outermost bands at the outermost columns are not corrected
-                        self.logger.warning('Unable to correct dead column %s in band %s.' % (column, band))
-                    else:
-                        self.logger.debug('Correcting dead column %s in band %s.' % (column, band))
+def interp_nodata_along_axis_2d(data_2d: np.ndarray, axis: int = 0,
+                                nodata: Union[np.ndarray, Number] = np.nan,
+                                method: str = 'linear', fill_value: Union[float, str] = 'extrapolate'):
+    """Interpolate a 2D array along the given axis (based on scipy.interpolate.interp1d).
 
-                        band_data_orig = image2correct[:, column - 1:column + 2, band]  # target and adjacent columns
-                        band_data_float = band_data_orig.astype(np.float)
-                        band_data_float[:, 1] = np.NaN
+    :param data_2d:         data to interpolate
+    :param axis:            axis to interpolate (0: along columns; 1: along rows)
+    :param nodata:          nodata array in the shape of data or nodata value
+    :param method:          interpolation method (‘linear’, ‘nearest’, ‘zero’, ‘slinear’, ‘quadratic’, ‘cubic’, etc.)
+    :param fill_value:      value to fill into positions where no interpolation is possible
+                            - if 'extrapolate': extrapolate the missing values
+    :return:    interpolated array
+    """
+    if data_2d.ndim != 2:
+        raise ValueError('Expected a 2D array. Received a %dD array.' % data_2d.ndim)
+    if axis > data_2d.ndim:
+        raise ValueError("axis=%d is out of bounds for data with %d dimensions." % (axis, data_2d.ndim))
 
-                        x, y = np.indices(band_data_float.shape)
-                        interp = np.array(band_data_float)
+    data_2d = data_2d if axis == 1 else data_2d.T
 
-                        interp[np.isnan(interp)] = \
-                            griddata(np.array([x[~np.isnan(band_data_float)],
-                                               y[~np.isnan(band_data_float)]]).T,  # points we know
-                                     band_data_float[~np.isnan(band_data_float)],  # values we know
-                                     np.array([x[np.isnan(band_data_float)],
-                                               y[np.isnan(band_data_float)]]).T,  # points to interpolate
-                                     method=self.interp_alg)
+    badmask_full = _get_baddata_mask(data_2d, nodata, transpose_inNodata=axis == 0)
 
-                        # copy corrected columns to image2correct
-                        interp[np.isnan(interp)] = band_data_orig[np.isnan(interp)]
-                        image2correct[:, column - 1:column + 2, band] = interp.astype(image2correct.dtype)
+    # call 1D interpolation vectorized
+    #   => group the dataset by rows that have nodata at the same column position
+    #   => remember the row positions, call the intpolation for these rows at once (vectorized)
+    #      and substitute the original data  at the previously grouped row positions
+    groups_unique_rows = npi.group_by(badmask_full).split(np.arange(len(badmask_full)))
 
-            # spectral interpolation #
-            #########################
+    for indices_unique_rows in groups_unique_rows:
+        badmask_grouped_rows = badmask_full[indices_unique_rows, :]
 
-            for band, column in tqdm(np.argwhere(deadcolumn_map), disable=False if progress else True):
-                if band in band_nbrs_spatial_interp:
-                    continue  # already interpolated spatially above
+        if np.any(badmask_grouped_rows[0, :]):
+            badpos = np.argwhere(badmask_grouped_rows[0, :]).flatten()
+            goodpos = np.delete(np.arange(data_2d.shape[1]), badpos)
 
-                # any other band
-                else:
-                    self.logger.debug('Correcting dead column %s in band %s.' % (column, band))
+            if goodpos.size > 1:
+                data_2d_grouped_rows = data_2d[indices_unique_rows]
 
-                    column_data_orig = image2correct[:, column, band-self.fhw:band+self.fhw+1]
-                    column_data_float = column_data_orig.astype(np.float)
-                    column_data_float[:, self.fhw] = np.NaN
+                data_2d_grouped_rows[:, badpos] = \
+                    interp1d(goodpos, data_2d_grouped_rows[:, goodpos],
+                             axis=1, kind=method, fill_value=fill_value, bounds_error=False)(badpos)
 
-                    x, y = np.indices(column_data_float.shape)
-                    interp = np.array(column_data_float)
+                data_2d[indices_unique_rows, :] = data_2d_grouped_rows
 
-                    interp[np.isnan(interp)] = \
-                        griddata(np.array([x[~np.isnan(column_data_float)],
-                                           y[~np.isnan(column_data_float)]]).T,  # points we know
-                                 column_data_float[~np.isnan(column_data_float)],  # values we know
-                                 np.array([x[np.isnan(column_data_float)],
-                                           y[np.isnan(column_data_float)]]).T,  # points to interpolate
-                                 method=self.interp_alg)
+    return data_2d if axis == 1 else data_2d.T
 
-                    # copy corrected columns to image2correct
-                    interp[np.isnan(interp)] = column_data_orig[np.isnan(interp)]
-                    image2correct[:, column, band-self.fhw:band+self.fhw+1] = interp.astype(image2correct.dtype)
+
+def interp_nodata_along_axis(data, axis=0, nodata: Union[np.ndarray, Number] = np.nan,
+                             method: str = 'linear', fill_value: Union[float, str] = 'extrapolate'):
+    """Interpolate a 2D or 3D array along the given axis (based on scipy.interpolate.interp1d).
+
+    :param data:            data to interpolate
+    :param axis:            axis to interpolate (0: along columns; 1: along rows, 2: along bands)
+    :param nodata:          nodata array in the shape of data or nodata value
+    :param method:          interpolation method (‘linear’, ‘nearest’, ‘zero’, ‘slinear’, ‘quadratic’, ‘cubic’, etc.)
+    :param fill_value:      value to fill into positions where no interpolation is possible
+                            - if 'extrapolate': extrapolate the missing values
+    :return:    interpolated array
+    """
+    assert axis <= 2
+    if data.ndim not in [2, 3]:
+        raise ValueError('Expected a 2D or 3D array. Received a %dD array.' % data.ndim)
+    if isinstance(nodata, np.ndarray) and nodata.shape != data.shape:
+        raise ValueError('No-data mask and data must have the same shape.')
+
+    if data.ndim == 2:
+        return interp_nodata_along_axis_2d(data, axis=axis, nodata=nodata, method=method, fill_value=fill_value)
+
+    else:
+        def reshape_input(In):
+            R, C, B = In.shape
+            return \
+                In.reshape(C, R * B) if axis == 0 else \
+                np.transpose(In, axes=[1, 0, 2]).reshape(C, R * B).T if axis == 1 else \
+                In.reshape(R * C, B)
+
+        def reshape_output(out):
+            return \
+                out.reshape(data.shape) if axis in [0, 2] else \
+                np.transpose(out.T.reshape(data.shape), axes=[1, 0, 2])
+
+        return \
+            reshape_output(
+                interp_nodata_along_axis_2d(
+                    data_2d=reshape_input(data),
+                    nodata=reshape_input(nodata) if isinstance(nodata, np.ndarray) else nodata,
+                    axis=axis if axis != 2 else 1,
+                    method=method, fill_value=fill_value))
+
+
+def interp_nodata_spatially_2d(data_2d: np.ndarray, axis: int = 0, nodata: Union[np.ndarray, Number] = np.nan,
+                               method: str = 'linear', fill_value: float = np.nan,
+                               implementation: str = 'pandas') -> np.ndarray:
+    """Interpolate a 2D array spatially.
+
+    NOTE: If data_2d contains NaN values that are unlabelled by a given nodata array,
+          they are also overwritten in the pandas implementation.
+
+    :param data_2d:         data to interpolate
+    :param axis:            axis to interpolate (0: along columns; 1: along rows)
+    :param nodata:          nodata array in the shape of data or nodata value
+    :param method:          interpolation method
+                            - if implementation='scipy': ‘linear’, ‘nearest’, ‘cubic’
+                            - if implementation='pandas': ‘linear’, ‘nearest’, 'slinear’, ‘quadratic’, ‘cubic’, etc.
+    :param fill_value:      value to fill into positions where no interpolation is possible
+    :param implementation:  'scipy': interpolation based on scipy.interpolate.griddata
+                            'pandas': interpolation based on pandas.core.resample.Resampler.interpolate
+    :return:    interpolated array
+    """
+    assert axis < 2
+    if data_2d.ndim != 2:
+        raise ValueError('Expected a 2D array. Received a %dD array.' % data_2d.ndim)
+
+    badmask_full = _get_baddata_mask(data_2d, nodata)
+
+    if badmask_full.any():
+        if implementation == 'scipy':
+            if axis == 0:
+                y, x = np.indices(data_2d.shape)
+            else:
+                x, y = np.indices(data_2d.shape)
+
+            data_2d[badmask_full] = \
+                griddata(np.array([x[~badmask_full], y[~badmask_full]]).T,  # points we know
+                         data_2d[~badmask_full],  # values we know
+                         np.array([x[badmask_full], y[badmask_full]]).T,  # points to interpolate
+                         method=method, fill_value=fill_value)
+
+        elif implementation == 'pandas':
+            data2int = data_2d.astype(np.float)
+            data2int[badmask_full] = np.nan
+
+            data_2d = np.array(DataFrame(data2int)
+                               .interpolate(method=method, axis=axis)).astype(data_2d.dtype)
+
+            if fill_value:
+                data_2d[np.isnan(data_2d)] = fill_value
 
         else:
-            raise NotImplementedError("Currently only the algorithm 'spectral' is implemented.")
+            raise ValueError(implementation, 'Unknown implementation.')
 
-        return image2correct
+    return data_2d
+
+
+def interp_nodata_spatially_3d(data_3d: np.ndarray, axis: int = 0, nodata: Union[np.ndarray, Number] = np.nan,
+                               method: str = 'linear', fill_value: float = np.nan, implementation: str = 'pandas',
+                               CPUs: int = None) -> np.ndarray:
+    """Interpolate a 3D array spatially, band-for-band.
+
+    :param data_3d:         data to interpolate
+    :param axis:            axis to interpolate (0: along columns; 1: along rows)
+    :param nodata:          nodata array in the shape of data or nodata value
+    :param method:          interpolation method
+                            - if implementation='scipy': ‘linear’, ‘nearest’, ‘cubic’
+                            - if implementation='pandas': ‘linear’, ‘nearest’, 'slinear’, ‘quadratic’, ‘cubic’, etc.
+    :param fill_value:      value to fill into positions where no interpolation is possible
+    :param implementation:  'scipy': interpolation based on scipy.interpolate.griddata
+                            'pandas': interpolation based on pandas.core.resample.Resampler.interpolate
+    :param CPUs:            number of CPUs to use
+    :return:    interpolated array
+    """
+    assert axis < 2
+
+    badmask_full = _get_baddata_mask(data_3d, nodata)
+
+    if CPUs > 1:
+        with Pool(CPUs or cpu_count()) as pool:
+            args = [[data_3d[:, :, band], axis, badmask_full[:, :, band], method, fill_value, implementation]
+                    for band in range(data_3d.shape[2])]
+            return np.dstack(pool.starmap(interp_nodata_spatially_2d, args))
+
+    else:
+        return \
+            np.dstack([interp_nodata_spatially_2d(data_3d[:, :, band], axis=axis,
+                                                  nodata=badmask_full[:, :, band], method=method,
+                                                  fill_value=fill_value, implementation=implementation)
+                       for band in range(data_3d.shape[2])])
diff --git a/enpt/processors/dem_preprocessing/dem_preprocessing.py b/enpt/processors/dem_preprocessing/dem_preprocessing.py
index 20dd73c85145f0eaf6ea9396272b348bf0547a8c..d5ee932bc2595024681e9b3011cfc69d5627f8ca 100644
--- a/enpt/processors/dem_preprocessing/dem_preprocessing.py
+++ b/enpt/processors/dem_preprocessing/dem_preprocessing.py
@@ -1,26 +1,55 @@
 # -*- coding: utf-8 -*-
 """EnPT pre-processing module for digital elevation models."""
 
-from typing import Union  # noqa: F401
+from typing import Union, Tuple  # noqa: F401
 from multiprocessing import cpu_count
 import numpy as np
 
 from geoarray import GeoArray
+from py_tools_ds.geo.projection import get_proj4info, proj4_to_dict
+from py_tools_ds.geo.coord_trafo import reproject_shapelyGeometry, transform_any_prj
+from py_tools_ds.geo.vector.topology import get_footprint_polygon, get_overlap_polygon
 
 from ..spatial_transform import Geometry_Transformer
 
 
 class DEM_Processor(object):
-    def __init__(self, dem_path_geoarray: Union[str, GeoArray], CPUs: int = None):
+    def __init__(self, dem_path_geoarray: Union[str, GeoArray],
+                 enmapIm_cornerCoords: Tuple[Tuple[float, float]],
+                 CPUs: int = None):
         self.dem = GeoArray(dem_path_geoarray)
+        self.enmapIm_cornerCoords = enmapIm_cornerCoords
         self.CPUs = CPUs or cpu_count()
 
         self._validate_input()
 
     def _validate_input(self):
-        # TODO - check geographic datum
-        #      - check overlap
-        pass
+        # check geocoding of DEM
+        if not self.dem.is_map_geo:
+            raise ValueError((self.dem.gt, self.dem.prj),
+                             'The provided digital elevation model has no valid geo-coding or projection.')
+
+        # check if provided as WGS-84
+        proj4dict = proj4_to_dict(get_proj4info(proj=self.dem.prj))
+        if 'datum' not in proj4dict or proj4dict['datum'] != 'WGS84':
+            raise ValueError(proj4dict,
+                             "The digital elevation model must be provided with 'WGS84' as geographic datum.")
+
+        # check overlap
+        dem_ll_mapPoly = reproject_shapelyGeometry(self.dem.footprint_poly, prj_src=self.dem.epsg, prj_tgt=4326)
+        enmapIm_ll_mapPoly = get_footprint_polygon(self.enmapIm_cornerCoords, fix_invalid=True)
+        overlap_dict = get_overlap_polygon(dem_ll_mapPoly, enmapIm_ll_mapPoly)
+        overlap_perc = round(overlap_dict['overlap percentage'], 4)
+
+        if overlap_perc < 100:
+            # compute minimal extent in user provided projection
+            cornersXY = np.array([transform_any_prj(4326, self.dem.epsg, x, y) for x, y in self.enmapIm_cornerCoords])
+            xmin, xmax = cornersXY[:, 0].min(), cornersXY[:, 0].max()
+            ymin, ymax = cornersXY[:, 1].min(), cornersXY[:, 1].max()
+
+            raise ValueError('The provided digital elevation model covers %.1f percent of the EnMAP image. It must '
+                             'cover it completely. The minimal needed extent in the provided projection is: \n'
+                             'xmin: %s, xmax: %s, ymin: %s, ymax: %s.' % (overlap_perc, xmin, xmax, ymin, ymax))
 
     def fill_gaps(self):
         pass
@@ -33,19 +62,10 @@ class DEM_Processor(object):
         # compute on map geometry (as provided)
         pass
 
-    def to_map_geometry(self,
-                        lons: np.ndarray,
-                        lats: np.ndarray,
-                        tgt_prj: Union[str, int] = None):
-        GT = Geometry_Transformer(self.dem, lons=lons, lats=lats, nprocs=self.CPUs)
-        data_mapgeo, gt, prj = GT.to_map_geometry(tgt_prj=tgt_prj)
-
-        return GeoArray(data_mapgeo, geotransform=gt, projection=prj)
-
     def to_sensor_geometry(self,
                            lons: np.ndarray,
                            lats: np.ndarray):
-        GT = Geometry_Transformer(self.dem, lons=lons, lats=lats, nprocs=self.CPUs)
-        data_sensorgeo = GT.to_sensor_geometry()
+        GT = Geometry_Transformer(lons=lons, lats=lats, nprocs=self.CPUs)
+        data_sensorgeo = GT.to_sensor_geometry(self.dem)
 
         return GeoArray(data_sensorgeo)
diff --git a/enpt/processors/orthorectification/__init__.py b/enpt/processors/orthorectification/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..bc275f7a4f8173ffef5591e68d01a468303a55e7
--- /dev/null
+++ b/enpt/processors/orthorectification/__init__.py
@@ -0,0 +1,9 @@
+# -*- coding: utf-8 -*-
+"""
+EnPT module 'orthorectification' for transforming an EnMAP image from sensor to map geometry
+based on a pixel- and band-wise coordinate-layer (geolayer).
+"""
+
+from .orthorectification import Orthorectifier
+
+__all__ = ['Orthorectifier']
diff --git a/enpt/processors/orthorectification/orthorectification.py b/enpt/processors/orthorectification/orthorectification.py
new file mode 100644
index 0000000000000000000000000000000000000000..f285e4a341edc85a5aab30f2ae005d543bae408b
--- /dev/null
+++ b/enpt/processors/orthorectification/orthorectification.py
@@ -0,0 +1,159 @@
+# -*- coding: utf-8 -*-
+"""
+EnPT module 'orthorectification' for transforming an EnMAP image from sensor to map geometry
+based on a pixel- and band-wise coordinate-layer (geolayer).
+"""
+
+from typing import Tuple  # noqa: F401
+
+import numpy as np
+from geoarray import GeoArray
+from py_tools_ds.geo.coord_trafo import transform_any_prj
+from py_tools_ds.geo.projection import EPSG2WKT, prj_equal
+
+from ...options.config import EnPTConfig
+from ...model.images import EnMAPL1Product_SensorGeo, EnMAPL2Product_MapGeo
+from ...model.metadata import EnMAP_Metadata_L2A_MapGeo
+from ..spatial_transform import \
+    Geometry_Transformer, \
+    Geometry_Transformer_3D, \
+    move_extent_to_EnMAP_grid, \
+    get_UTMEPSG_from_LonLat_cornersXY
+
+
+class Orthorectifier(object):
+    def __init__(self, config: EnPTConfig = None):
+        """Create an instance of Orthorectifier."""
+        self.cfg = config
+
+    @staticmethod
+    def validate_input(enmap_ImageL1):
+        # check type
+        if not isinstance(enmap_ImageL1, EnMAPL1Product_SensorGeo):
+            raise TypeError(enmap_ImageL1, "The Orthorectifier expects an instance of 'EnMAPL1Product_SensorGeo'."
+                                           "Received a '%s' instance." % type(enmap_ImageL1))
+
+        # check geolayer shapes
+        for detector in [enmap_ImageL1.vnir, enmap_ImageL1.swir]:
+            for XY in [detector.detector_meta.lons, detector.detector_meta.lats]:
+                datashape = detector.data.shape
+                if XY.shape not in [datashape, datashape[:2]]:
+                    raise RuntimeError('Expected a %s geolayer shape of %s or %s. Received %s.'
+                                       % (detector.detector_name, str(datashape), str(datashape[:2]), str(XY.shape)))
+
+    def run_transformation(self, enmap_ImageL1: EnMAPL1Product_SensorGeo) -> EnMAPL2Product_MapGeo:
+        self.validate_input(enmap_ImageL1)
+
+        enmap_ImageL1.logger.info('Starting orthorectification...')
+
+        # get a new instance of EnMAPL2Product_MapGeo
+        L2_obj = EnMAPL2Product_MapGeo(config=self.cfg, logger=enmap_ImageL1.logger)
+
+        # geometric transformations #
+        #############################
+
+        lons_vnir, lats_vnir = enmap_ImageL1.vnir.detector_meta.lons, enmap_ImageL1.vnir.detector_meta.lats
+        lons_swir, lats_swir = enmap_ImageL1.swir.detector_meta.lons, enmap_ImageL1.swir.detector_meta.lats
+
+        # get target UTM zone and common extent  # TODO add optionally user defined UTM zone?
+        tgt_epsg = self._get_tgt_UTMepsg(enmap_ImageL1)
+        tgt_extent = self._get_common_extent(enmap_ImageL1, tgt_epsg, enmap_grid=True)
+        kw_init = dict(resamp_alg=self.cfg.ortho_resampAlg,
+                       nprocs=self.cfg.CPUs,
+                       radius_of_influence=30 if not self.cfg.ortho_resampAlg == 'bilinear' else 45)
+        kw_trafo = dict(tgt_prj=tgt_epsg, tgt_extent=tgt_extent)
+
+        # transform VNIR and SWIR to map geometry
+        GeoTransformer = Geometry_Transformer if lons_vnir.ndim == 2 else Geometry_Transformer_3D
+
+        GT_vnir = GeoTransformer(lons=lons_vnir, lats=lats_vnir, **kw_init)
+        vnir_mapgeo, vnir_gt, vnir_prj = GT_vnir.to_map_geometry(enmap_ImageL1.vnir.data[:], **kw_trafo)
+
+        GT_swir = GeoTransformer(lons=lons_swir, lats=lats_swir, **kw_init)
+        swir_mapgeo, swir_gt, swir_prj = GT_swir.to_map_geometry(enmap_ImageL1.swir.data[:], **kw_trafo)
+
+        # combine VNIR and SWIR
+        L2_obj.data = self._get_VNIR_SWIR_stack(vnir_mapgeo, swir_mapgeo, vnir_gt, swir_gt, vnir_prj, swir_prj,
+                                                enmap_ImageL1.meta.vnir.wvl_center, enmap_ImageL1.meta.swir.wvl_center)
+
+        # TODO allow to set geolayer band to be used for warping of 2D arrays
+        GT_2D = Geometry_Transformer(lons=lons_vnir if lons_vnir.ndim == 2 else lons_vnir[:, :, 0],
+                                     lats=lats_vnir if lats_vnir.ndim == 2 else lats_vnir[:, :, 0],
+                                     ** kw_init)
+
+        # FIXME cloud mask applies to BOTH detectors
+        L2_obj.mask_clouds = GeoArray(*GT_2D.to_map_geometry(enmap_ImageL1.vnir.mask_clouds, **kw_trafo))
+
+        # TODO transform mask_clouds_confidence, ac_errors, pixel masks
+
+        # metadata adjustments #
+        ########################
+
+        L2_obj.meta = EnMAP_Metadata_L2A_MapGeo(config=self.cfg,
+                                                meta_l1b=enmap_ImageL1.meta,
+                                                dims_mapgeo=L2_obj.data.shape,
+                                                logger=L2_obj.logger)
+        L2_obj.meta.add_band_statistics(L2_obj.data)
+
+        L2_obj.data.meta.band_meta['wavelength'] = list(L2_obj.meta.wvl_center)
+        L2_obj.data.meta.band_meta['bandwidths'] = list(L2_obj.meta.fwhm)
+        L2_obj.data.meta.global_meta['wavelength_units'] = 'nanometers'
+
+        # Get the paths according information delivered in the metadata
+        L2_obj.paths = L2_obj.get_paths(self.cfg.output_dir)
+
+        return L2_obj
+
+    @staticmethod
+    def _get_tgt_UTMepsg(enmap_ImageL1: EnMAPL1Product_SensorGeo) -> int:
+        return get_UTMEPSG_from_LonLat_cornersXY(lons=enmap_ImageL1.vnir.detector_meta.lon_UL_UR_LL_LR,
+                                                 lats=enmap_ImageL1.vnir.detector_meta.lat_UL_UR_LL_LR)
+
+    @staticmethod
+    def _get_common_extent(enmap_ImageL1: EnMAPL1Product_SensorGeo,
+                           tgt_epsg: int,
+                           enmap_grid: bool = True) -> Tuple[float, float, float, float]:
+        """Get common target extent for VNIR and SWIR.
+
+        :param enmap_ImageL1:
+        :param tgt_epsg:
+        :param enmap_grid:
+        :return:
+        """
+        vnir_coords_utm = np.array([transform_any_prj(EPSG2WKT(4326), EPSG2WKT(tgt_epsg), x, y)
+                                    for x, y in zip(enmap_ImageL1.vnir.detector_meta.lon_UL_UR_LL_LR,
+                                                    enmap_ImageL1.vnir.detector_meta.lat_UL_UR_LL_LR,)])
+        swir_coords_utm = np.array([transform_any_prj(EPSG2WKT(4326), EPSG2WKT(tgt_epsg), x, y)
+                                    for x, y in zip(enmap_ImageL1.swir.detector_meta.lon_UL_UR_LL_LR,
+                                                    enmap_ImageL1.swir.detector_meta.lat_UL_UR_LL_LR, )])
+        all_coords_utm = np.dstack([vnir_coords_utm, swir_coords_utm])
+
+        common_extent = (all_coords_utm[:, 0, :].min(),  # xmin
+                         all_coords_utm[:, 1, :].min(),  # ymin
+                         all_coords_utm[:, 0, :].max(),  # xmax
+                         all_coords_utm[:, 1, :].max())  # ymax
+
+        if enmap_grid:
+            common_extent = move_extent_to_EnMAP_grid(common_extent)
+
+        return common_extent
+
+    @staticmethod
+    def _get_VNIR_SWIR_stack(vnir_data, swir_data, vnir_gt, swir_gt, vnir_prj, swir_prj, wvls_vnir, wvls_swir):
+        """Stack VNIR and SWIR bands with respect to their spectral overlap."""
+        if vnir_gt != swir_gt:
+            raise ValueError((vnir_gt, swir_gt), 'VNIR and SWIR geoinformation should be equal.')
+        if not prj_equal(vnir_prj, swir_prj):
+            raise ValueError((vnir_prj, swir_prj), 'VNIR and SWIR projection should be equal.')
+
+        # get band index order
+        wvls_vnir_plus_swir = np.hstack([wvls_vnir, wvls_swir])
+        wvls_sorted = np.array(sorted(wvls_vnir_plus_swir))
+        bandidx_order = np.array([np.argmin(np.abs(wvls_vnir_plus_swir - cwl)) for cwl in wvls_sorted])
+
+        # stack bands ordered by wavelengths
+        data_stacked = np.dstack([vnir_data, swir_data])[:, :, bandidx_order]
+
+        # TODO implement correction for VNIR/SWIR spectral jump
+
+        return GeoArray(data_stacked, geotransform=vnir_gt, projection=vnir_prj)
diff --git a/enpt/processors/radiometric_transform/radiometric_transform.py b/enpt/processors/radiometric_transform/radiometric_transform.py
index 8f1e57b7efd19f4aa3b22b6a88067995c2c0bddc..e4bbae66eb1052a88e0d6b86d69feb5bb45cade3 100644
--- a/enpt/processors/radiometric_transform/radiometric_transform.py
+++ b/enpt/processors/radiometric_transform/radiometric_transform.py
@@ -15,8 +15,11 @@ from ...options.config import EnPTConfig
 class Radiometric_Transformer(object):
     """Class for performing all kinds of radiometric transformations of EnMAP images."""
 
-    def __init__(self, config: EnPTConfig=None):
-        """Create an instance of Radiometric_Transformer."""
+    def __init__(self, config: EnPTConfig = None):
+        """Create an instance of Radiometric_Transformer.
+
+        :param config: EnPT configuration object
+        """
         self.cfg = config
         self.solarIrr = config.path_solar_irr  # path of model for solar irradiance
         self.earthSunDist = config.path_earthSunDist  # path of model for earth sun distance
@@ -41,8 +44,7 @@ class Radiometric_Transformer(object):
             constant = \
                 self.cfg.scale_factor_toa_ref * math.pi * enmap_ImageL1.meta.earthSunDist ** 2 / \
                 (math.cos(math.radians(enmap_ImageL1.meta.geom_sun_zenith)))
-            solIrr = np.array([detector.detector_meta.solar_irrad[band] for band in detector.detector_meta.srf.bands])\
-                .reshape(1, 1, detector.data.bands)
+            solIrr = detector.detector_meta.solar_irrad.reshape(1, 1, detector.data.bands)
             toaRef = (constant * detector.data[:] / solIrr).astype(np.int16)
 
             # update EnMAP image
diff --git a/enpt/processors/spatial_transform/__init__.py b/enpt/processors/spatial_transform/__init__.py
index a20673e1cf8e25642dac63bb6a240563e2977361..7ed2348df5b27e73cd54107477565d5dc8c3452e 100644
--- a/enpt/processors/spatial_transform/__init__.py
+++ b/enpt/processors/spatial_transform/__init__.py
@@ -1,6 +1,4 @@
 # -*- coding: utf-8 -*-
 """EnPT module 'spatial transform', containing everything related to spatial transformations."""
 
-from .spatial_transform import Geometry_Transformer
-
-__all__ = ['Geometry_Transformer']
+from .spatial_transform import *  # noqa: F401,F403  # flake8 unable to detect undefined names
diff --git a/enpt/processors/spatial_transform/spatial_transform.py b/enpt/processors/spatial_transform/spatial_transform.py
index 408232a7df0b9a2fb18d8370a9bf9e8528e6c1c1..d2f51933eb7021fcfebcce2ab471469cb5590b99 100644
--- a/enpt/processors/spatial_transform/spatial_transform.py
+++ b/enpt/processors/spatial_transform/spatial_transform.py
@@ -1,13 +1,20 @@
 # -*- coding: utf-8 -*-
 """EnPT module 'spatial transform', containing everything related to spatial transformations."""
 
+from typing import Union, Tuple, List  # noqa: F401
+from multiprocessing import Pool, cpu_count
+from collections import OrderedDict
 import numpy as np
-from typing import Union, Tuple  # noqa: F401
+from scipy.interpolate import griddata as interpolate_griddata, interp1d
 from geoarray import GeoArray
 
-from py_tools_ds.geo.raster.reproject import SensorMapGeometryTransformer
-from py_tools_ds.geo.projection import get_proj4info, proj4_to_dict
+from py_tools_ds.geo.raster.reproject import \
+    SensorMapGeometryTransformer, \
+    SensorMapGeometryTransformer3D, \
+    AreaDefinition
+from py_tools_ds.geo.projection import get_proj4info, proj4_to_dict, prj_equal, EPSG2WKT, WKT2EPSG, proj4_to_WKT
 from py_tools_ds.geo.coord_grid import find_nearest
+from py_tools_ds.geo.coord_trafo import transform_any_prj, transform_coordArray
 
 from ...options.config import enmap_coordinate_grid
 
@@ -16,30 +23,91 @@ class Geometry_Transformer(SensorMapGeometryTransformer):
     # use Sentinel-2 grid (30m grid with origin at 0/0)
     # EnMAP geolayer contains pixel center coordinate
 
-    def __init__(self, path_or_geoarray, lons, lats, **opts):
-        self._data2transform = GeoArray(path_or_geoarray)
+    def to_sensor_geometry(self,
+                           path_or_geoarray_mapgeo: Union[str, GeoArray],
+                           src_prj: Union[str, int] = None,
+                           src_extent: Tuple[float, float, float, float] = None):
+        data_mapgeo = GeoArray(path_or_geoarray_mapgeo)
+
+        if not data_mapgeo.is_map_geo:
+            raise RuntimeError('The dataset to be transformed into sensor geometry already represents sensor geometry.')
+
+        return super(Geometry_Transformer, self).to_sensor_geometry(
+            data_mapgeo[:],
+            src_prj=src_prj or data_mapgeo.prj,
+            src_extent=src_extent or list(np.array(data_mapgeo.box.boundsMap)[[0, 2, 1, 3]]))
+
+    def to_map_geometry(self,
+                        path_or_geoarray_sensorgeo: Union[str, GeoArray],
+                        tgt_prj:  Union[str, int] = None,
+                        tgt_extent: Tuple[float, float, float, float] = None,
+                        tgt_res: Tuple[float, float] = None,
+                        area_definition: AreaDefinition = None):
+        data_sensorgeo = GeoArray(path_or_geoarray_sensorgeo)
+
+        if data_sensorgeo.is_map_geo:
+            raise RuntimeError('The dataset to be transformed into map geometry already represents map geometry.')
+
+        if area_definition:
+            self.area_definition = area_definition
+        else:
+            if not tgt_prj:
+                raise ValueError(tgt_prj, 'Target projection must be given if area_definition is not given.')
+
+            # compute target resolution and extent (according to EnMAP grid)
+            proj4dict = proj4_to_dict(get_proj4info(proj=tgt_prj))
 
-        # TODO check if lons/lats represent pixel coordinate centers (needed as UL coordinates)
+            if 'units' in proj4dict and proj4dict['units'] == 'm':
+                if not tgt_res:
+                    tgt_res = (np.ptp(enmap_coordinate_grid['x']), np.ptp(enmap_coordinate_grid['x']))
 
-        super(Geometry_Transformer, self).__init__(self._data2transform[:], lons, lats, **opts)
+                if not tgt_extent:
+                    # use the extent computed by compute_output_shape and move it to the EnMAP coordinate grid
+                    area_definition = self.compute_areadefinition_sensor2map(
+                        data_sensorgeo[:], tgt_prj, tgt_res=tgt_res)
+
+                    tgt_extent = move_extent_to_EnMAP_grid(tuple(area_definition.area_extent))
+
+        out_data, out_gt, out_prj = \
+            super(Geometry_Transformer, self).to_map_geometry(data_sensorgeo[:], tgt_prj=tgt_prj,
+                                                              tgt_extent=tgt_extent, tgt_res=tgt_res,
+                                                              area_definition=self.area_definition)
+
+        return out_data, out_gt, out_prj
+
+
+class Geometry_Transformer_3D(SensorMapGeometryTransformer3D):
+    # use Sentinel-2 grid (30m grid with origin at 0/0)
+    # EnMAP geolayer contains pixel center coordinate
 
     def to_sensor_geometry(self,
+                           path_or_geoarray_mapgeo: Union[str, GeoArray],
                            src_prj: Union[str, int] = None,
                            src_extent: Tuple[float, float, float, float] = None):
-        if not self._data2transform.is_map_geo:
+        data_mapgeo = GeoArray(path_or_geoarray_mapgeo)
+
+        if not data_mapgeo.is_map_geo:
             raise RuntimeError('The dataset to be transformed into sensor geometry already represents sensor geometry.')
 
-        return super(Geometry_Transformer, self).to_sensor_geometry(
-            src_prj=src_prj or self._data2transform.prj,
-            src_extent=src_extent or list(np.array(self._data2transform.box.boundsMap)[[0, 2, 1, 3]]))
+        return super(Geometry_Transformer_3D, self).to_sensor_geometry(
+            data_mapgeo[:],
+            src_prj=src_prj or data_mapgeo.prj,
+            src_extent=src_extent or list(np.array(data_mapgeo.box.boundsMap)[[0, 2, 1, 3]]))
 
     def to_map_geometry(self,
-                        tgt_prj:  Union[str, int],
+                        path_or_geoarray_sensorgeo: Union[str, GeoArray],
+                        tgt_prj:  Union[str, int] = None,
                         tgt_extent: Tuple[float, float, float, float] = None,
                         tgt_res: Tuple[float, float] = None):
-        if self._data2transform.is_map_geo:
+        data_sensorgeo = GeoArray(path_or_geoarray_sensorgeo)
+
+        if data_sensorgeo.is_map_geo:
             raise RuntimeError('The dataset to be transformed into map geometry already represents map geometry.')
 
+        if not tgt_prj:
+            raise ValueError(tgt_prj, 'Target projection must be given if area_definition is not given.')
+
+        # compute target resolution and extent (according to EnMAP grid)
         proj4dict = proj4_to_dict(get_proj4info(proj=tgt_prj))
 
         if 'units' in proj4dict and proj4dict['units'] == 'm':
@@ -48,19 +116,426 @@ class Geometry_Transformer(SensorMapGeometryTransformer):
 
             if not tgt_extent:
                 # use the extent computed by compute_output_shape and move it to the EnMAP coordinate grid
-                x_size, y_size, out_gt, out_prj, out_extent = \
-                    super(Geometry_Transformer, self).compute_output_shape(tgt_prj=tgt_prj)
-
-                xmin, ymin, xmax, ymax = out_extent
-                tgt_xgrid = enmap_coordinate_grid['x']
-                tgt_ygrid = enmap_coordinate_grid['y']
-                tgt_xmin = find_nearest(tgt_xgrid, xmin, roundAlg='off', extrapolate=True)
-                tgt_xmax = find_nearest(tgt_xgrid, xmax, roundAlg='on', extrapolate=True)
-                tgt_ymin = find_nearest(tgt_ygrid, ymin, roundAlg='off', extrapolate=True)
-                tgt_ymax = find_nearest(tgt_ygrid, ymax, roundAlg='on', extrapolate=True)
-                tgt_extent = list((tgt_xmin, tgt_ymin, tgt_xmax, tgt_ymax))
+                tgt_epsg = WKT2EPSG(proj4_to_WKT(get_proj4info(proj=tgt_prj)))
+                common_extent = self._get_common_target_extent(tgt_epsg)
+
+                tgt_extent = move_extent_to_EnMAP_grid(tuple(common_extent))
 
         out_data, out_gt, out_prj = \
-            super(Geometry_Transformer, self).to_map_geometry(tgt_prj, tgt_extent=tgt_extent, tgt_res=tgt_res)
+            super(Geometry_Transformer_3D, self).to_map_geometry(data_sensorgeo[:], tgt_prj=tgt_prj,
+                                                                 tgt_extent=tgt_extent, tgt_res=tgt_res)
 
         return out_data, out_gt, out_prj
+
+
+def move_extent_to_EnMAP_grid(extent_utm: Tuple[float, float, float, float]) -> Tuple[float, float, float, float]:
+    """Move a UTM coordinate extent to the EnMAP coordinate grid (30m x 30m, origin at 0/0).
+
+    :param extent_utm:  xmin, ymin, xmax, ymax coordinates
+    """
+    xmin, ymin, xmax, ymax = extent_utm
+    tgt_xgrid = enmap_coordinate_grid['x']
+    tgt_ygrid = enmap_coordinate_grid['y']
+    tgt_xmin = find_nearest(tgt_xgrid, xmin, roundAlg='off', extrapolate=True)
+    tgt_xmax = find_nearest(tgt_xgrid, xmax, roundAlg='on', extrapolate=True)
+    tgt_ymin = find_nearest(tgt_ygrid, ymin, roundAlg='off', extrapolate=True)
+    tgt_ymax = find_nearest(tgt_ygrid, ymax, roundAlg='on', extrapolate=True)
+
+    return tgt_xmin, tgt_ymin, tgt_xmax, tgt_ymax
+
+
+class RPC_Geolayer_Generator(object):
+    """
+    Class for creating pixel-wise longitude/latitude arrays based on rational polynomial coefficients (RPC).
+    """
+    def __init__(self,
+                 rpc_coeffs: dict,
+                 dem: Union[str, GeoArray],
+                 enmapIm_cornerCoords: Tuple[Tuple[float, float]],
+                 enmapIm_dims_sensorgeo: Tuple[int, int]):
+        """Get an instance of RPC_Geolayer_Generator.
+
+        :param rpc_coeffs:              RPC coefficients for a single EnMAP band
+        :param dem:                     digital elevation model in map geometry (file path or instance of GeoArray)
+        :param enmapIm_cornerCoords:    corner coordinates as tuple of lon/lat pairs
+        :param enmapIm_dims_sensorgeo:  dimensions of the EnMAP image in sensor geometry (rows, colunms)
+        """
+        self.row_off = rpc_coeffs['row_off']
+        self.col_off = rpc_coeffs['col_off']
+        self.lat_off = rpc_coeffs['lat_off']
+        self.lon_off = rpc_coeffs['long_off']
+        self.height_off = rpc_coeffs['height_off']
+        self.row_scale = rpc_coeffs['row_scale']
+        self.col_scale = rpc_coeffs['col_scale']
+        self.lat_scale = rpc_coeffs['lat_scale']
+        self.lon_scale = rpc_coeffs['long_scale']
+        self.height_scale = rpc_coeffs['height_scale']
+        self.row_num_coeffs = rpc_coeffs['row_num_coeffs']
+        self.row_den_coeffs = rpc_coeffs['row_den_coeffs']
+        self.col_num_coeffs = rpc_coeffs['col_num_coeffs']
+        self.col_den_coeffs = rpc_coeffs['col_den_coeffs']
+
+        self.dem = GeoArray(dem)
+        self.enmapIm_cornerCoords = enmapIm_cornerCoords
+        self.enmapIm_dims_sensorgeo = enmapIm_dims_sensorgeo
+
+    def _normalize_map_coordinates(self,
+                                   lon: np.ndarray,
+                                   lat: np.ndarray,
+                                   height: np.ndarray) -> (np.ndarray, np.ndarray, np.ndarray):
+        """Normalize map coordinates to [-1, +1] to improve numerical precision.
+
+        :param lon:     longitude array
+        :param lat:     latitude array
+        :param height:  elevation array
+        """
+        if not lon.shape == lat.shape == height.shape:
+            raise ValueError((lon.shape, lat.shape, height.shape),
+                             'Longitude, latitude and height arrays are expected to have the same dimensions.')
+
+        lon_norm = (lon - self.lon_off) / self.lon_scale  # longitude
+        lat_norm = (lat - self.lat_off) / self.lat_scale  # latitude
+        height_norm = (height - self.height_off) / self.height_scale  # elevation
+
+        msg = 'Coordinate normalization yields significantly out-of-range values for %s. ' \
+              'Check the coordinates and RPC coefficients.'
+        for llh, name in zip([lon_norm, lat_norm, height_norm], ['longitudes', 'latitudes', 'heights']):
+            if llh.min() < -1.1 or llh.max() > 1.1:
+                raise RuntimeError((llh.min(), llh.max()), msg % name)
+
+        return lon_norm, lat_norm, height_norm
+
+    def _compute_normalized_image_coordinates(self,
+                                              lon_norm: np.ndarray,
+                                              lat_norm: np.ndarray,
+                                              height_norm: np.ndarray) -> (np.ndarray, np.ndarray):
+        """Compute normalized sensor geometry coordinates for the given lon/lat/height positions.
+
+        :param lon_norm:    normalized longitudes
+        :param lat_norm:    normalized latitudes
+        :param height_norm: normalized elevations
+        :return:
+        """
+        P = lat_norm.flatten()
+        L = lon_norm.flatten()
+        H = height_norm.flatten()
+
+        u = np.zeros((P.size, 20))
+
+        u_data = (ui for ui in [
+            1,
+            L,
+            P,
+            H,
+            L * P,
+            L * H,
+            P * H,
+            L ** 2,
+            P ** 2,
+            H ** 2,
+            P * L * H,
+            L ** 3,
+            L * P ** 2,
+            L * H ** 2,
+            L ** 2 * P,
+            P ** 3,
+            P * H ** 2,
+            L ** 2 * H,
+            P ** 2 * H,
+            H ** 3
+        ])
+
+        for i, ud in enumerate(u_data):
+            u[:, i] = ud
+
+        num_row_norm = np.sum(self.row_num_coeffs * u, axis=1)
+        den_row_norm = np.sum(self.row_den_coeffs * u, axis=1)
+        num_col_norm = np.sum(self.col_num_coeffs * u, axis=1)
+        den_col_norm = np.sum(self.col_den_coeffs * u, axis=1)
+
+        row_norm = num_row_norm / den_row_norm
+        col_norm = num_col_norm / den_col_norm
+
+        return row_norm, col_norm
+
+    def _denormalize_image_coordinates(self,
+                                       row_norm: np.ndarray,
+                                       col_norm: np.ndarray) -> (np.ndarray, np.ndarray):
+        """De-normalize norrmalized sensor geometry coordinates to get valid image coordinates.
+
+        :param row_norm:    normalized rows
+        :param col_norm:    normalized columns
+        :return:
+        """
+        rows = row_norm * self.row_scale + self.row_off
+        cols = col_norm * self.col_scale + self.col_off
+
+        return rows, cols
+
+    def transform_LonLatHeight_to_RowCol(self,
+                                         lon: np.ndarray,
+                                         lat: np.ndarray,
+                                         height: np.ndarray) -> (np.ndarray, np.ndarray):
+        """Get sensor geometry image coordinates for the given 3D map coordinate positions using RPC coefficients.
+
+        :param lon:     longitude array
+        :param lat:     latitude array
+        :param height:  elevation array
+        :return:
+        """
+        # TODO add reshaping
+
+        lon_norm, lat_norm, height_norm = \
+            self._normalize_map_coordinates(lon=lon, lat=lat, height=height)
+        row_norm, col_norm = \
+            self._compute_normalized_image_coordinates(lon_norm=lon_norm, lat_norm=lat_norm, height_norm=height_norm)
+        rows, cols = \
+            self._denormalize_image_coordinates(row_norm=row_norm, col_norm=col_norm)
+
+        return rows, cols
+
+    @staticmethod
+    def _fill_nans_at_corners(arr: np.ndarray, along_axis: int = 0) -> np.ndarray:
+        if not arr.ndim == 2:
+            raise ValueError(arr.ndim, '2D numpy array expected.')
+        if along_axis not in [0, 1]:
+            raise ValueError(along_axis, "The 'axis' parameter must be set to 0 or 1")
+
+        kw = dict(kind='linear', fill_value='extrapolate')
+
+        if along_axis == 0:
+            # extrapolate in top/bottom direction
+            cols_with_nan = np.arange(arr.shape[1])[~np.all(np.isnan(arr), axis=0)]
+
+            for col in cols_with_nan:  # FIXME iterating over columns is slow
+                data = arr[:, col]
+                idx_goodvals = np.argwhere(~np.isnan(data)).flatten()
+                arr[:, col] = interp1d(idx_goodvals, data[idx_goodvals], **kw)(range(data.size))
+        else:
+            # extrapolate in left/right direction
+            rows_with_nan = np.arange(arr.shape[0])[~np.all(np.isnan(arr), axis=1)]
+
+            for row in rows_with_nan:  # FIXME iterating over rows is slow
+                data = arr[row, :]
+                idx_goodvals = np.argwhere(~np.isnan(data)).flatten()
+                arr[row, :] = interp1d(idx_goodvals, data[idx_goodvals], **kw)(range(data.size))
+
+        return arr
+
+    def compute_geolayer(self) -> (np.ndarray, np.ndarray):
+        """Compute pixel-wise lon/lat arrays based on RPC coefficients, corner coordinates and image dimensions.
+
+        :return: (2D longitude array, 2D latitude array)
+        """
+        # transform corner coordinates of EnMAP image to UTM
+        grid_utm_epsg = get_UTMEPSG_from_LonLat(*get_center_coord(self.enmapIm_cornerCoords))
+        cornerCoordsUTM = np.array([transform_any_prj(4326, grid_utm_epsg, lon, lat)
+                                    for lon, lat in self.enmapIm_cornerCoords])
+        xmin, xmax = cornerCoordsUTM[:, 0].min(), cornerCoordsUTM[:, 0].max()
+        ymin, ymax = cornerCoordsUTM[:, 1].min(), cornerCoordsUTM[:, 1].max()
+
+        # get UTM bounding box and move it to the EnMAP grid
+        xmin, ymin, xmax, ymax = move_extent_to_EnMAP_grid((xmin, ymin, xmax, ymax))
+
+        # set up a regular grid of UTM points with a specific mesh width
+        meshwidth = 300  # 10 EnMAP pixels
+        y_grid_utm, x_grid_utm = np.meshgrid(np.arange(ymax, ymin - meshwidth, -meshwidth),
+                                             np.arange(xmin, xmax + meshwidth, meshwidth),
+                                             indexing='ij')
+
+        # transform UTM grid to DEM projection
+        x_grid_demPrj, y_grid_demPrj = (x_grid_utm, y_grid_utm) if prj_equal(grid_utm_epsg, self.dem.epsg) else \
+            transform_coordArray(EPSG2WKT(grid_utm_epsg), EPSG2WKT(self.dem.epsg), x_grid_utm, y_grid_utm)
+
+        # retrieve corresponding heights from DEM
+        # -> resample DEM to EnMAP grid?
+        xy_pairs_demPrj = np.vstack([x_grid_demPrj.flatten(), y_grid_demPrj.flatten()]).T
+        heights = self.dem.read_pointData(xy_pairs_demPrj).flatten()
+
+        # transform UTM points to lon/lat
+        lon_grid, lat_grid = transform_coordArray(EPSG2WKT(grid_utm_epsg), EPSG2WKT(4326), x_grid_utm, y_grid_utm)
+        lons = lon_grid.flatten()
+        lats = lat_grid.flatten()
+
+        # compute floating point EnMAP image coordinates for the selected UTM points
+        rows, cols = self.transform_LonLatHeight_to_RowCol(lon=lons, lat=lats, height=heights)
+
+        # interpolate lon/lats to get lon/lat coordinates integer image coordinates of EnMAP image
+        rows_im, cols_im = self.enmapIm_dims_sensorgeo
+        out_rows_grid, out_cols_grid = np.meshgrid(range(rows_im), range(cols_im), indexing='ij')
+        out_xy_pairs = np.vstack([out_cols_grid.flatten(), out_rows_grid.flatten()]).T
+
+        lons_interp = interpolate_griddata(np.array((cols, rows)).T, lons, out_xy_pairs,
+                                           method='linear').reshape(*out_cols_grid.shape)
+        lats_interp = interpolate_griddata(np.array((cols, rows)).T, lats, out_xy_pairs,
+                                           method='linear').reshape(*out_rows_grid.shape)
+
+        # lons_interp / lats_interp may contain NaN values in case xmin, ymin, xmax, ymax has been set too small
+        # to account for RPC transformation errors
+        # => fix that by extrapolation at NaN value positions
+
+        lons_interp = self._fill_nans_at_corners(lons_interp, along_axis=1)  # extrapolate in left/right direction
+        lats_interp = self._fill_nans_at_corners(lats_interp, along_axis=1)
+
+        # return a geolayer in the exact dimensions like the EnMAP detector array
+        return lons_interp, lats_interp
+
+    def __call__(self, *args, **kwargs):
+        return self.compute_geolayer()
+
+
+global_dem_sensorgeo = None  # type: GeoArray
+
+
+def mp_initializer_for_RPC_3D_Geolayer_Generator(dem_sensorgeo):
+    """Declare global variables needed for RPC_3D_Geolayer_Generator._compute_geolayer_for_band()
+
+    :param dem_sensorgeo:   DEM in sensor geometry
+    """
+    global global_dem_sensorgeo
+    global_dem_sensorgeo = dem_sensorgeo
+
+
+class RPC_3D_Geolayer_Generator(object):
+    """
+    Class for creating band- AND pixel-wise longitude/latitude arrays based on rational polynomial coefficients (RPC).
+    """
+    def __init__(self,
+                 rpc_coeffs_per_band: dict,
+                 dem: Union[str, GeoArray],
+                 enmapIm_cornerCoords: Tuple[Tuple[float, float]],
+                 enmapIm_dims_sensorgeo: Tuple[int, int],
+                 CPUs: int = None):
+        """Get an instance of RPC_3D_Geolayer_Generator.
+
+        :param rpc_coeffs_per_band:     RPC coefficients for a single EnMAP band ({'band_1': <rpc_coeffs_dict>,
+                                                                                   'band_2': <rpc_coeffs_dict>,
+                                                                                   ...}
+        :param dem:                     digital elevation model in map geometry (file path or instance of GeoArray)
+        :param enmapIm_cornerCoords:    corner coordinates as tuple of lon/lat pairs
+        :param enmapIm_dims_sensorgeo:  dimensions of the EnMAP image in sensor geometry (rows, colunms)
+        :param CPUs:                    number of CPUs to use
+        """
+        self.rpc_coeffs_per_band = OrderedDict(sorted(rpc_coeffs_per_band.items()))
+        self.dem = dem
+        self.enmapIm_cornerCoords = enmapIm_cornerCoords
+        self.enmapIm_dims_sensorgeo = enmapIm_dims_sensorgeo
+        self.CPUs = CPUs or cpu_count()
+
+        # get validated DEM in map geometry
+        # self.logger.debug('Verifying DEM...')  # TODO
+        from ..dem_preprocessing import DEM_Processor
+        self.dem = DEM_Processor(dem_path_geoarray=dem,
+                                 enmapIm_cornerCoords=enmapIm_cornerCoords).dem
+        # TODO clip DEM to needed area
+        self.dem.to_mem()
+
+    @staticmethod
+    def _compute_geolayer_for_band(rpc_coeffs, enmapIm_cornerCoords, enmapIm_dims_sensorgeo, band_idx):
+        lons, lats = \
+            RPC_Geolayer_Generator(rpc_coeffs=rpc_coeffs,
+                                   dem=global_dem_sensorgeo,
+                                   enmapIm_cornerCoords=enmapIm_cornerCoords,
+                                   enmapIm_dims_sensorgeo=enmapIm_dims_sensorgeo) \
+            .compute_geolayer()
+
+        return lons, lats, band_idx
+
+    def compute_geolayer(self):
+        rows, cols = self.enmapIm_dims_sensorgeo
+        bands = len(self.rpc_coeffs_per_band)
+        lons = np.empty((rows, cols, bands), dtype=np.float)
+        lats = np.empty((rows, cols, bands), dtype=np.float)
+
+        band_inds = list(range(len(self.rpc_coeffs_per_band)))
+        rpc_coeffs_list = list(self.rpc_coeffs_per_band.values())
+
+        if self.CPUs > 1:
+            # multiprocessing
+            args = [(coeffs, self.enmapIm_cornerCoords, self.enmapIm_dims_sensorgeo, idx)
+                    for coeffs, idx in zip(rpc_coeffs_list, band_inds)]
+
+            with Pool(self.CPUs,
+                      initializer=mp_initializer_for_RPC_3D_Geolayer_Generator,
+                      initargs=(self.dem,)) as pool:
+                results = pool.starmap(self._compute_geolayer_for_band, args)
+
+            for res in results:
+                band_lons, band_lats, band_idx = res
+                lons[:, :, band_idx] = band_lons
+                lats[:, :, band_idx] = band_lats
+
+        else:
+            # singleprocessing
+            global global_dem_sensorgeo
+            global_dem_sensorgeo = self.dem
+
+            for band_idx in band_inds:
+                lons[:, :, band_idx], lats[:, :, band_idx] = \
+                    self._compute_geolayer_for_band(rpc_coeffs=rpc_coeffs_list[band_idx],
+                                                    enmapIm_cornerCoords=self.enmapIm_cornerCoords,
+                                                    enmapIm_dims_sensorgeo=self.enmapIm_dims_sensorgeo,
+                                                    band_idx=band_idx)[:2]
+
+        return lons, lats
+
+
+def compute_mapCoords_within_sensorGeoDims(rpc_coeffs: dict,
+                                           dem: Union[str, GeoArray],
+                                           enmapIm_cornerCoords: Tuple[Tuple[float, float]],
+                                           enmapIm_dims_sensorgeo: Tuple[int, int],
+                                           sensorgeoCoords_YX: List[Tuple[float, float]]
+                                           ) -> List[Tuple[float, float]]:
+    """
+
+    :param rpc_coeffs:
+    :param dem:                     digital elevation model in MAP geometry
+    :param enmapIm_cornerCoords:
+    :param enmapIm_dims_sensorgeo:  (rows, columns)
+    :param sensorgeoCoords_YX:
+    :return:
+    """
+    # compute coordinate array
+    RPCGG = RPC_Geolayer_Generator(rpc_coeffs=rpc_coeffs,
+                                   dem=dem,
+                                   enmapIm_cornerCoords=enmapIm_cornerCoords,  # order does not matter
+                                   enmapIm_dims_sensorgeo=enmapIm_dims_sensorgeo)
+    lons, lats = RPCGG.compute_geolayer()
+
+    # extract the new corner coordinated from the coordinate arrays computed via RPCs
+    rows, cols = enmapIm_dims_sensorgeo
+
+    ul, ur, ll, lr = enmapIm_cornerCoords
+
+    lonlats = []
+    for imYX in sensorgeoCoords_YX:
+        lonlat = \
+            ul if imYX == (0, 0) else \
+            ur if imYX == (0, cols - 1) else \
+            ll if imYX == (rows - 1, 0) else \
+            lr if imYX == (rows - 1, cols - 1) else \
+            (lons[imYX], lats[imYX])
+
+        lonlats.append(lonlat)
+
+    return lonlats
+
+
+def get_UTMEPSG_from_LonLat(lon: float, lat: float) -> int:
+    zoneNr = int(1 + (lon + 180.0) / 6.0)
+    isNorth = lat >= 0
+
+    return int('326' + str(zoneNr)) if isNorth else int('327' + str(zoneNr))
+
+
+def get_center_coord(cornerCoordsXY):
+    # FIXME center coord is not equal to center of bounding box
+    cornerCoordsXY = np.array(cornerCoordsXY)
+    x_center = float(np.mean([cornerCoordsXY[:, 0].min(), cornerCoordsXY[:, 0].max()]))
+    y_center = float(np.mean([cornerCoordsXY[:, 1].min(), cornerCoordsXY[:, 1].max()]))
+
+    return x_center, y_center
+
+
+def get_UTMEPSG_from_LonLat_cornersXY(lons: List[float], lats: List[float]):
+    return get_UTMEPSG_from_LonLat(*get_center_coord(list(zip(lons, lats))))
diff --git a/enpt/version.py b/enpt/version.py
index 4d31b0340ff45d360f1a4b503fb3cc8be27b8a7a..ec828856c552f9b2128c3578fc12ad6fada3b3fb 100644
--- a/enpt/version.py
+++ b/enpt/version.py
@@ -1,2 +1,2 @@
-__version__ = '0.6.1'
-__versionalias__ = '20181214.01'
+__version__ = '0.7.0'
+__versionalias__ = '20180121.01'
diff --git a/history.csv b/history.csv
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/requirements.txt b/requirements.txt
index e42f65ea04c031b99a7d0035c4e01281742f37df..26262868a1fb4904bb9bce579d8eaee2653bd507 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,13 +1,13 @@
 numpy
+pandas
 scipy
 geoarray>=0.8.9
-py_tools_ds>=0.14.2
-spectral>=0.16
+py_tools_ds>=0.14.8
 cerberus
 jsmin
 matplotlib
 tqdm
 utm
+git+https://gitext.gfz-potsdam.de/EnMAP/sicor.git
 lxml
-imageio
-git+https://gitext.gfz-potsdam.de/EnMAP/sicor.git
\ No newline at end of file
+numpy-indexed
\ No newline at end of file
diff --git a/setup.py b/setup.py
index 4e04f6061cda3e5f7e8411584e5b5c6907f5146d..b5aa6c8e1a0e1333622d60d9fb0691bb3c289539 100644
--- a/setup.py
+++ b/setup.py
@@ -14,8 +14,8 @@ with open("enpt/version.py") as version_file:
     exec(version_file.read(), version)
 
 requirements = [  # put package requirements here
-    'numpy', 'scipy', 'geoarray>=0.8.9', 'py_tools_ds>=0.14.2', 'spectral>=0.16', 'cerberus', 'jsmin', 'matplotlib',
-    'tqdm', 'utm', 'lxml', 'imageio'
+    'numpy', 'pandas', 'scipy', 'geoarray>=0.8.9', 'py_tools_ds>=0.14.8', 'cerberus', 'jsmin', 'matplotlib', 'tqdm',
+    'utm', 'lxml', 'numpy-indexed'
     # 'sicor', # pip install git+https://gitext.gfz-potsdam.de/EnMAP/sicor.git
 ]
 
diff --git a/test.json b/test.json
new file mode 100644
index 0000000000000000000000000000000000000000..e0ee12a0d26940f2790ae7185599615e237ebe11
--- /dev/null
+++ b/test.json
@@ -0,0 +1,253 @@
+{
+    "EnMAP": {
+        "solar_model": "Thullier2002",
+        "wvl_rsp_sampling": 1.0,
+        "buffer_dir": "/home/gfz-fe/scheffler/python_deployed/sicor/sicor",
+        "keep_defaults_for": ["spr", "cwv"],
+        "default_values": {
+            "spr": 1020.0,
+            "coz": 400.0,
+            "cwv": 20.0,
+            "tmp": 0,
+            "tau_a": 0.2,
+            "ch4": 3.0,
+            "vza": 0.0,
+            "sza": 0.0,
+            "azi": 0.0},
+        "lon_lat_smpl": [10, 10],
+        "aerosol_model": "aerosol_0",
+        "aerosol_default": "aerosol_0",
+        "scene_detection_flags_to_process": [
+            0.0
+        ],
+        "use_only_rtfo": [
+            "aerosol_0",
+            "ch4"
+        ]
+    },
+    "ECMWF": {
+        "mapping": {
+            "tau_a": "fc_total_AOT_550nm",
+            "coz": "fc_O3",
+            "cwv": "fc_TCWV",
+            "spr": "fc_SLP"
+        },
+        "conversion": {
+            "tau_a": 1.0,
+            "coz": 71524.3,
+            "cwv": 1.0,
+            "spr": 0.01
+        },
+        "path_db": "./ecmwf",
+        "target_resolution": 20.0,
+        "max_delta_day": 10,
+        "variables_aerosol": [
+            "fc_total_AOT_550nm",
+            "fc_sulphate_AOT_550nm",
+            "fc_black_carbon_AOT_550nm",
+            "fc_dust_AOT_550nm",
+            "fc_organic_matter_AOT_550nm",
+            "fc_sea_salt_AOT_550nm"
+        ],
+        "var2type": {
+            "fc_organic_matter_AOT_550nm": "aerosol_2",
+            "fc_sulphate_AOT_550nm": "aerosol_2",
+            "fc_sea_salt_AOT_550nm": "aerosol_1",
+            "fc_black_carbon_AOT_550nm": "aerosol_0",
+            "fc_dust_AOT_550nm": "aerosol_3"
+        }
+    },
+    "RTFO": {
+        "aerosol_1": {
+            "flag": 10,
+            "table_path": "/table_aerosol/type_1",
+            "dim_scat": [
+                "tau_a"
+            ],
+            "atm_tables_fn": "/home/gfz-fe/scheffler/python_deployed/sicor/sicor/tables/linear_atm_functions_ncwv_5_npre_4_ncoz_2_ntmp_2_wvl_350.0_2550.0_1.00_pca.h5",
+            "dim_atm": [
+                "spr",
+                "coz",
+                "cwv",
+                "tmp"
+            ],
+            "only_toa": "true",
+            "hash_formats": {
+                "spr": "%.0f",
+                "coz": "%.0f,",
+                "cwv": "%.0f,",
+                "tmp": "%0f,",
+                "tau_a": "%.2f,",
+                "vza": "%.0f,"
+            }
+        },
+        "aerosol_0": {
+            "flag": 10,
+            "table_path": "/table_aerosol/type_0",
+            "dim_scat": [
+                "tau_a"
+            ],
+            "atm_tables_fn": "/home/gfz-fe/scheffler/python_deployed/sicor/sicor/tables/linear_atm_functions_ncwv_5_npre_4_ncoz_2_ntmp_2_wvl_350.0_2550.0_1.00_pca.h5",
+            "dim_atm": [
+                "spr",
+                "coz",
+                "cwv",
+                "tmp"
+            ],
+            "only_toa": "true",
+            "hash_formats": {
+                "spr": "%.0f",
+                "coz": "%.0f,",
+                "cwv": "%.0f,",
+                "tmp": "%0f,",
+                "tau_a": "%.2f,",
+                "vza": "%.0f,"
+            }
+        },
+        "aerosol_2": {
+            "flag": 10,
+            "table_path": "/table_aerosol/type_2",
+            "dim_scat": [
+                "tau_a"
+            ],
+            "atm_tables_fn": "/home/gfz-fe/scheffler/python_deployed/sicor/sicor/tables/linear_atm_functions_ncwv_5_npre_4_ncoz_2_ntmp_2_wvl_350.0_2550.0_1.00_pca.h5",
+            "dim_atm": [
+                "spr",
+                "coz",
+                "cwv",
+                "tmp"
+            ],
+            "only_toa": "true",
+            "hash_formats": {
+                "spr": "%.0f",
+                "coz": "%.0f,",
+                "cwv": "%.0f,",
+                "tmp": "%0f,",
+                "tau_a": "%.2f,",
+                "vza": "%.0f,"
+            }
+        },
+        "aerosol_3": {
+            "flag": 10,
+            "table_path": "/table_aerosol/type_3",
+            "dim_scat": [
+                "tau_a"
+            ],
+            "atm_tables_fn": "/home/gfz-fe/scheffler/python_deployed/sicor/sicor/tables/linear_atm_functions_ncwv_5_npre_4_ncoz_2_ntmp_2_wvl_350.0_2550.0_1.00_pca.h5",
+            "dim_atm": [
+                "spr",
+                "coz",
+                "cwv",
+                "tmp"
+            ],
+            "only_toa": "true",
+            "hash_formats": {
+                "spr": "%.0f",
+                "coz": "%.0f,",
+                "cwv": "%.0f,",
+                "tmp": "%0f,",
+                "tau_a": "%.2f,",
+                "vza": "%.0f,"
+            }
+        },
+        "ch4": {
+            "atm_tables_fn": "/home/gfz-fe/scheffler/python_deployed/sicor/sicor/tables/linear_atm_functions_ncwv_4_npre_2_ncoz_2_ntmp_1_nch4_4_wvl_350.0_2550.0_1.00_pca.h5",
+            "dim_atm": ["spr", "coz", "cwv", "ch4"],
+            "dim_scat": [
+                "tau_a"
+            ],
+            "flag": 10,
+            "only_toa": "true",
+            "table_path": "/table_aerosol/type_0_tmp_0",
+            "hash_formats": {
+                "spr": "%.0f",
+                "coz": "%.0f,",
+                "cwv": "%.0f,",
+                "tmp": "%0f,",
+                "tau_a": "%.2f,",
+                "vza": "%.0f,"
+            }
+        }
+    },
+    "output": [
+        {
+            "type": "none"
+        }
+    ],
+    "cld_mask": {
+        "target_resolution": 20.0,
+        "nodata_value_mask": 255,
+        "persistence_file": "/home/gfz-fe/scheffler/python_deployed/sicor/sicor/tables/cld_mask_S2_classi_20170412_v20170412_11:43:14.h5",
+        "processing_tiles": 4,
+        "novelty_detector": "/home/gfz-fe/scheffler/python_deployed/sicor/sicor/tables/noclear_novelty_detector_channel2_difference9_0_index10_1_channel12_index1_8.retrain.pkl",
+        "majority_mask_filter": [
+            {"block_replace": "true", "block_replace_params": [[60, 2]], "majority_width": 0},
+            {"block_replace": "true", "block_replace_params": [[50, 3]], "majority_width": 0},
+            {"block_replace": "true", "block_replace_params": [[50, 10], [30, 10]], "majority_width": 10}
+        ]
+    },
+    "processing": {
+        "Exception": "None",
+        "interface": {
+            "args": [],
+            "kwargs": {}
+        },
+        "Exception_type": "",
+        "clear_fraction": "None",
+        "status": 1,
+        "tIO": 0.0,
+        "tRT": 0.0,
+        "uncertainties": {}
+    },
+    "settings": {
+        "fo_instances": {
+            "aerosol_0": {
+                "flag": 10
+            },
+            "ch4": {
+                "flag": 10
+            }
+        },
+        "atm_fields": [
+            "tmp",
+            "tau_a",
+            "cwv",
+            "ch4",
+            "coz",
+            "spr"
+        ],
+        "pt_names": [
+            "spr",
+            "coz",
+            "cwv",
+            "tmp",
+            "tau_a",
+            "vza",
+            "sza",
+            "azi",
+            "ch4"
+        ],
+        "pt_indexes": {
+            "aerosol_0": [
+                0,
+                1,
+                2,
+                3,
+                4,
+                5,
+                6,
+                7
+            ],
+            "ch4": [
+                0,
+                1,
+                2,
+                8,
+                4,
+                5,
+                6,
+                7
+            ]
+        }
+    }
+}
\ No newline at end of file
diff --git a/tests/data/DLR_L2A_DEM_UTM32.bsq b/tests/data/DLR_L2A_DEM_UTM32.bsq
new file mode 100644
index 0000000000000000000000000000000000000000..7ef66b7fdddb2220f2cf25774f13df645e1b36ea
Binary files /dev/null and b/tests/data/DLR_L2A_DEM_UTM32.bsq differ
diff --git a/tests/data/DLR_L2A_DEM_UTM32.hdr b/tests/data/DLR_L2A_DEM_UTM32.hdr
new file mode 100644
index 0000000000000000000000000000000000000000..e4632656c281a3e92b9994896ef02de59bf1033d
--- /dev/null
+++ b/tests/data/DLR_L2A_DEM_UTM32.hdr
@@ -0,0 +1,15 @@
+ENVI
+description = {
+/home/gfz-fe/scheffler/temp/enpt_testing/dlr_l1b_test_data/DEM_UTM32.bsq}
+samples = 1564
+lines   = 1759
+bands   = 1
+header offset = 0
+file type = ENVI Standard
+data type = 2
+interleave = bsq
+byte order = 0
+map info = {UTM, 1, 1, 622274.84071595, 5302469.31172718, 30, 30, 32, North,WGS-84}
+coordinate system string = {PROJCS["WGS_1984_UTM_Zone_32N",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",9],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]]}
+band names = {
+Band 1}
diff --git a/tests/data/EnMAP_Level_1B/ENMAP01-____L1B-DT000000987_20130205T105307Z_001_V000003_20181214T160003Z__rows0-99.zip b/tests/data/EnMAP_Level_1B/ENMAP01-____L1B-DT000000987_20130205T105307Z_001_V000003_20181214T160003Z__rows0-99.zip
new file mode 100644
index 0000000000000000000000000000000000000000..8d5bee5139f8363b239c31c747aa813b57d9acb4
--- /dev/null
+++ b/tests/data/EnMAP_Level_1B/ENMAP01-____L1B-DT000000987_20130205T105307Z_001_V000003_20181214T160003Z__rows0-99.zip
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:414187f15426150478da2932cebf45570f5926a4f9f06a1bfce2340687d80154
+size 46721157
diff --git a/tests/data/EnMAP_Level_1B/ENMAP01-____L1B-DT000000987_20130205T105307Z_001_V000003_20181214T160003Z__rows100-199.zip b/tests/data/EnMAP_Level_1B/ENMAP01-____L1B-DT000000987_20130205T105307Z_001_V000003_20181214T160003Z__rows100-199.zip
new file mode 100644
index 0000000000000000000000000000000000000000..36bc08dfd3f9e2650fb32e42dc00c1f676ca90b1
--- /dev/null
+++ b/tests/data/EnMAP_Level_1B/ENMAP01-____L1B-DT000000987_20130205T105307Z_001_V000003_20181214T160003Z__rows100-199.zip
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:37c7038d39195ba3ff2bc17cf026b875317c9fd82cf127635f62140d88ff474a
+size 46629876
diff --git a/tests/data/rpc_coeffs_B200.pkl b/tests/data/rpc_coeffs_B200.pkl
new file mode 100644
index 0000000000000000000000000000000000000000..8e97eaa0c9047ad8ebe5dea6c1f0d3e0df141d8e
Binary files /dev/null and b/tests/data/rpc_coeffs_B200.pkl differ
diff --git a/tests/gitlab_CI_docker/build_enpt_testsuite_image.sh b/tests/gitlab_CI_docker/build_enpt_testsuite_image.sh
index f6a46a60db0b6644685c025d31b782f97925487c..ec2072ff1593075c8910ecd520e3a108086ba4b4 100755
--- a/tests/gitlab_CI_docker/build_enpt_testsuite_image.sh
+++ b/tests/gitlab_CI_docker/build_enpt_testsuite_image.sh
@@ -2,7 +2,7 @@
 
 context_dir="./context"
 dockerfile="enpt_ci.docker"
-tag="enpt_ci:0.4.0"
+tag="enpt_ci:0.7.0"
 gitlab_runner="enpt_gitlab_CI_runner"
 
 # get sicor project
@@ -18,6 +18,7 @@ git clone https://gitext.gfz-potsdam.de/EnMAP/sicor.git ./context/sicor
 echo "#### Build runner docker image"
 sudo docker rmi ${tag}
 sudo docker build -f ${context_dir}/${dockerfile} -m 20G -t ${tag} ${context_dir}
+# sudo docker build -f ./context/enpt_ci.docker -m 20G -t enpt_ci:0.7.0 ./context --no-cache
 
 echo "#### Create gitlab-runner (daemon) container with tag; ${tag}"
 sudo docker stop ${gitlab_runner}
diff --git a/tests/gitlab_CI_docker/context/enpt_ci.docker b/tests/gitlab_CI_docker/context/enpt_ci.docker
index a64bdeda24ee687a02cee66661871702b9643dca..fe09f8c42e1c87b4fcf87ba604a8cf6b5d8c0abb 100644
--- a/tests/gitlab_CI_docker/context/enpt_ci.docker
+++ b/tests/gitlab_CI_docker/context/enpt_ci.docker
@@ -9,7 +9,7 @@ ENV miniconda_dl 'Miniconda3-latest-Linux-x86_64.sh'
 # use miniconda 4.3.31 as workaround for "IsADirectoryError(21, 'Is a directory')" with version 4.4.10 (currently latest)
 # ENV miniconda_dl 'Miniconda3-4.3.31-Linux-x86_64.sh'
 ENV envconfig 'environment_enpt.yml'
-ENV git_lfs_v='2.4.1'
+ENV git_lfs_v='2.6.1'
 
 RUN /bin/bash -i -c "cd /root; wget https://repo.continuum.io/miniconda/$miniconda_dl ; \
     bash -i /root/$miniconda_dl -b ; \
@@ -26,9 +26,9 @@ RUN /bin/bash -i -c "source /root/miniconda3/bin/activate ; \
 RUN /bin/bash -i -c "curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.rpm.sh | bash"
 
 # install git large file support, see here: https://git-lfs.github.com/
-RUN /bin/bash -i -c "wget https://github.com/git-lfs/git-lfs/releases/download/v${git_lfs_v}/git-lfs-linux-amd64-${git_lfs_v}.tar.gz; \
-    tar -zxvf git-lfs-linux-amd64-${git_lfs_v}.tar.gz; \
-    cd git-lfs-${git_lfs_v}; \
+RUN /bin/bash -i -c "wget https://github.com/git-lfs/git-lfs/releases/download/v${git_lfs_v}/git-lfs-linux-amd64-v${git_lfs_v}.tar.gz; \
+    tar -zxvf git-lfs-linux-amd64-v${git_lfs_v}.tar.gz; \
+    cd git-lfs-v${git_lfs_v}; \
     bash ./install.sh"
 
 # copy sicor code to /tmp
diff --git a/tests/gitlab_CI_docker/context/environment_enpt.yml b/tests/gitlab_CI_docker/context/environment_enpt.yml
index 8361974aef619c172f2f663d0db6b74711026ce2..00aa1bef807d9f1c9b8a48100da3bc04b6deea78 100644
--- a/tests/gitlab_CI_docker/context/environment_enpt.yml
+++ b/tests/gitlab_CI_docker/context/environment_enpt.yml
@@ -8,18 +8,21 @@ channels: &id1
 dependencies:
   - python=3.*.*
   - numpy
+  - pandas
+  - lxml
 
   # geoarray / py_tools_ds
   - pyqt
   - gdal
+  - conda-forge::libgdal  # force to use conda-forge for libgdal to avoid package version incompatiblies due to mixed channels
   - scikit-image
   - rasterio
   - pyproj
-  # - lxml
   - geopandas
   - ipython
   - matplotlib
   - basemap
+  # - geos>3.7.0   # pinning should not be needed anymore - https://github.com/conda-forge/basemap-feedstock/issues/43
   - shapely
   - holoviews
   - bokeh
@@ -35,21 +38,20 @@ dependencies:
   - h5py
   - pytables
   - llvmlite
+  - numba  # force install via conda-forge to get rid of llvm incompatibilities
   - scikit-learn
 
   - pip:
     - scipy
     - geoarray>=0.8.9
-    - py_tools_ds>=0.14.2
-    - spectral>=0.16
+    - py_tools_ds>=0.14.8
     - cerberus
     - jsmin
     - tqdm
     - utm
-    - lxml
-    - imageio
+    - numpy-indexed
     - sphinx-argparse
-    - pycodestyle<2.4.0  # version 2.4.0 causes ImportError: module 'pycodestyle' has no attribute 'break_around_binary_operator'
+    - pycodestyle
     - flake8
     - pylint
     - pydocstyle
diff --git a/tests/linting/pydocstyle.log b/tests/linting/pydocstyle.log
index ae3ac5afabc08aa087d4eb376f4de0db79cef4f4..6c091c5d33f93714ea59e499e91d8cedacb5fc8b 100644
--- a/tests/linting/pydocstyle.log
+++ b/tests/linting/pydocstyle.log
@@ -4,50 +4,76 @@ enpt/io/reader.py:40 in public method `read_inputdata`:
         D205: 1 blank line required between summary line and description (found 0)
 enpt/io/reader.py:40 in public method `read_inputdata`:
         D400: First line should end with a period (not ']')
-enpt/model/images.py:340 in public method `get_paths`:
-        D205: 1 blank line required between summary line and description (found 0)
-enpt/model/images.py:340 in public method `get_paths`:
-        D400: First line should end with a period (not 'o')
-enpt/model/images.py:386 in public method `generate_quicklook`:
-        D205: 1 blank line required between summary line and description (found 0)
-enpt/model/images.py:386 in public method `generate_quicklook`:
+enpt/model/images.py:303 in private method `generate_quicklook`:
         D400: First line should end with a period (not 'e')
-enpt/model/images.py:454 in public method `get_paths`:
+enpt/model/images.py:367 in public method `get_paths`:
         D205: 1 blank line required between summary line and description (found 0)
-enpt/model/images.py:454 in public method `get_paths`:
+enpt/model/images.py:367 in public method `get_paths`:
         D400: First line should end with a period (not 'o')
-enpt/model/images.py:565 in public method `run_AC`:
+enpt/model/images.py:401 in public method `get_preprocessed_dem`:
         D102: Missing docstring in public method
-enpt/model/images.py:572 in public method `save`:
+enpt/model/images.py:437 in public method `append_new_image`:
         D205: 1 blank line required between summary line and description (found 0)
-enpt/model/images.py:572 in public method `save`:
-        D400: First line should end with a period (not 't')
-enpt/model/images.py:614 in public method `append_new_image`:
+enpt/model/images.py:640 in public method `get_paths`:
+        D400: First line should end with a period (not 'o')
+enpt/model/images.py:752 in public method `get_preprocessed_dem`:
+        D102: Missing docstring in public method
+enpt/model/images.py:756 in public method `run_AC`:
+        D102: Missing docstring in public method
+enpt/model/images.py:889 in public class `EnMAPL2Product_MapGeo`:
+        D204: 1 blank line required after class docstring (found 0)
+enpt/model/images.py:889 in public class `EnMAPL2Product_MapGeo`:
+        D413: Missing blank line after last section ('Attributes')
+enpt/model/images.py:900 in public method `__init__`:
+        D102: Missing docstring in public method
+enpt/model/images.py:967 in public method `from_L1B_sensorgeo`:
+        D102: Missing docstring in public method
+enpt/model/images.py:973 in public method `get_paths`:
         D205: 1 blank line required between summary line and description (found 0)
-enpt/model/metadata.py:82 in public method `read_metadata`:
+enpt/model/images.py:973 in public method `get_paths`:
+        D400: First line should end with a period (not 'o')
+enpt/model/images.py:993 in public method `save`:
         D205: 1 blank line required between summary line and description (found 0)
-enpt/model/metadata.py:82 in public method `read_metadata`:
+enpt/model/images.py:993 in public method `save`:
+        D400: First line should end with a period (not 't')
+enpt/model/metadata.py:119 in public method `read_metadata`:
         D400: First line should end with a period (not 'y')
-enpt/model/metadata.py:217 in public method `interpolate_corners`:
-        D205: 1 blank line required between summary line and description (found 0)
-enpt/model/metadata.py:240 in public method `calc_solar_irradiance_CWL_FWHM_per_band`:
+enpt/model/metadata.py:379 in public method `compute_geolayer_for_cube`:
+        D102: Missing docstring in public method
+enpt/model/metadata.py:392 in public method `calc_solar_irradiance_CWL_FWHM_per_band`:
         D102: Missing docstring in public method
-enpt/model/metadata.py:261 in public class `EnMAP_Metadata_L1B_SensorGeo`:
+enpt/model/metadata.py:410 in public class `EnMAP_Metadata_L1B_SensorGeo`:
         D413: Missing blank line after last section ('Attributes')
-enpt/model/metadata.py:299 in public method `read_common_meta`:
+enpt/model/metadata.py:455 in public method `scene_basename`:
+        D102: Missing docstring in public method
+enpt/model/metadata.py:462 in public method `read_common_meta`:
         D202: No blank lines allowed after function docstring (found 1)
-enpt/model/metadata.py:299 in public method `read_common_meta`:
+enpt/model/metadata.py:462 in public method `read_common_meta`:
         D205: 1 blank line required between summary line and description (found 0)
-enpt/model/metadata.py:299 in public method `read_common_meta`:
+enpt/model/metadata.py:462 in public method `read_common_meta`:
         D400: First line should end with a period (not 'o')
-enpt/model/metadata.py:324 in public method `get_earth_sun_distance`:
+enpt/model/metadata.py:515 in public method `get_earth_sun_distance`:
         D400: First line should end with a period (not ')')
-enpt/model/metadata.py:346 in public method `read_metadata`:
+enpt/model/metadata.py:537 in public method `read_metadata`:
         D202: No blank lines allowed after function docstring (found 1)
-enpt/model/metadata.py:346 in public method `read_metadata`:
-        D205: 1 blank line required between summary line and description (found 0)
-enpt/model/metadata.py:346 in public method `read_metadata`:
+enpt/model/metadata.py:537 in public method `read_metadata`:
         D400: First line should end with a period (not 'y')
+enpt/model/metadata.py:555 in public method `to_XML`:
+        D200: One-line docstring should fit on one line with quotes (found 3)
+enpt/model/metadata.py:581 in public class `EnMAP_Metadata_L2A_MapGeo`:
+        D101: Missing docstring in public class
+enpt/model/metadata.py:582 in public method `__init__`:
+        D403: First word of the first line should be properly capitalized ('Enmap', not 'EnMAP')
+enpt/model/metadata.py:680 in public method `get_common_UL_UR_LL_LR`:
+        D102: Missing docstring in public method
+enpt/model/metadata.py:692 in public method `add_band_statistics`:
+        D102: Missing docstring in public method
+enpt/model/metadata.py:698 in public method `add_product_fileinformation`:
+        D102: Missing docstring in public method
+enpt/model/metadata.py:723 in public method `to_XML`:
+        D200: One-line docstring should fit on one line with quotes (found 3)
+enpt/model/metadata.py:873 in public function `xmlSubtree2dict`:
+        D103: Missing docstring in public function
 enpt/model/srf.py:11 in public class `SRF`:
         D101: Missing docstring in public class
 enpt/model/srf.py:96 in public method `instrument`:
@@ -72,55 +98,115 @@ enpt/utils/logging.py:141 in public method `__enter__`:
         D105: Missing docstring in magic method
 enpt/utils/logging.py:144 in public method `__exit__`:
         D105: Missing docstring in magic method
-enpt/processors/spatial_transform/spatial_transform.py:1 at module level:
-        D100: Missing docstring in public module
-enpt/processors/spatial_transform/spatial_transform.py:5 in public class `Geometry_Transformer`:
+enpt/processors/spatial_transform/spatial_transform.py:22 in public class `Geometry_Transformer`:
         D101: Missing docstring in public class
-enpt/processors/dead_pixel_correction/dead_pixel_correction.py:15 in public class `Dead_Pixel_Corrector`:
+enpt/processors/spatial_transform/spatial_transform.py:26 in public method `to_sensor_geometry`:
+        D102: Missing docstring in public method
+enpt/processors/spatial_transform/spatial_transform.py:40 in public method `to_map_geometry`:
+        D102: Missing docstring in public method
+enpt/processors/spatial_transform/spatial_transform.py:79 in public class `Geometry_Transformer_3D`:
+        D101: Missing docstring in public class
+enpt/processors/spatial_transform/spatial_transform.py:83 in public method `to_sensor_geometry`:
+        D102: Missing docstring in public method
+enpt/processors/spatial_transform/spatial_transform.py:97 in public method `to_map_geometry`:
+        D102: Missing docstring in public method
+enpt/processors/spatial_transform/spatial_transform.py:147 in public class `RPC_Geolayer_Generator`:
+        D200: One-line docstring should fit on one line with quotes (found 3)
+enpt/processors/spatial_transform/spatial_transform.py:147 in public class `RPC_Geolayer_Generator`:
+        D204: 1 blank line required after class docstring (found 0)
+enpt/processors/spatial_transform/spatial_transform.py:383 in public method `__call__`:
+        D102: Missing docstring in public method
+enpt/processors/spatial_transform/spatial_transform.py:390 in public function `mp_initializer_for_RPC_3D_Geolayer_Generator`:
+        D400: First line should end with a period (not ')')
+enpt/processors/spatial_transform/spatial_transform.py:399 in public class `RPC_3D_Geolayer_Generator`:
+        D200: One-line docstring should fit on one line with quotes (found 3)
+enpt/processors/spatial_transform/spatial_transform.py:399 in public class `RPC_3D_Geolayer_Generator`:
+        D204: 1 blank line required after class docstring (found 0)
+enpt/processors/spatial_transform/spatial_transform.py:444 in public method `compute_geolayer`:
+        D102: Missing docstring in public method
+enpt/processors/spatial_transform/spatial_transform.py:483 in public function `compute_mapCoords_within_sensorGeoDims`:
+        D205: 1 blank line required between summary line and description (found 0)
+enpt/processors/spatial_transform/spatial_transform.py:483 in public function `compute_mapCoords_within_sensorGeoDims`:
+        D400: First line should end with a period (not ':')
+enpt/processors/spatial_transform/spatial_transform.py:524 in public function `get_UTMEPSG_from_LonLat`:
+        D103: Missing docstring in public function
+enpt/processors/spatial_transform/spatial_transform.py:531 in public function `get_center_coord`:
+        D103: Missing docstring in public function
+enpt/processors/spatial_transform/spatial_transform.py:540 in public function `get_UTMEPSG_from_LonLat_cornersXY`:
+        D103: Missing docstring in public function
+enpt/processors/dem_preprocessing/dem_preprocessing.py:16 in public class `DEM_Processor`:
+        D101: Missing docstring in public class
+enpt/processors/dem_preprocessing/dem_preprocessing.py:17 in public method `__init__`:
+        D102: Missing docstring in public method
+enpt/processors/dem_preprocessing/dem_preprocessing.py:54 in public method `fill_gaps`:
+        D102: Missing docstring in public method
+enpt/processors/dem_preprocessing/dem_preprocessing.py:57 in public method `compute_slopes`:
+        D102: Missing docstring in public method
+enpt/processors/dem_preprocessing/dem_preprocessing.py:61 in public method `compute_aspect`:
+        D102: Missing docstring in public method
+enpt/processors/dem_preprocessing/dem_preprocessing.py:65 in public method `to_sensor_geometry`:
+        D102: Missing docstring in public method
+enpt/processors/dead_pixel_correction/dead_pixel_correction.py:18 in public class `Dead_Pixel_Corrector`:
         D101: Missing docstring in public class
 enpt/processors/atmospheric_correction/atmospheric_correction.py:24 in public method `get_ac_options`:
         D102: Missing docstring in public method
 enpt/processors/atmospheric_correction/atmospheric_correction.py:47 in public method `run_ac`:
         D102: Missing docstring in public method
-enpt/execution/controller.py:88 in public method `run_geometry_processor`:
+enpt/processors/orthorectification/__init__.py:1 at module level:
+        D205: 1 blank line required between summary line and description (found 0)
+enpt/processors/orthorectification/__init__.py:1 at module level:
+        D400: First line should end with a period (not 'y')
+enpt/processors/orthorectification/orthorectification.py:1 at module level:
+        D205: 1 blank line required between summary line and description (found 0)
+enpt/processors/orthorectification/orthorectification.py:1 at module level:
+        D400: First line should end with a period (not 'y')
+enpt/processors/orthorectification/orthorectification.py:24 in public class `Orthorectifier`:
+        D101: Missing docstring in public class
+enpt/processors/orthorectification/orthorectification.py:30 in public method `validate_input`:
+        D102: Missing docstring in public method
+enpt/processors/orthorectification/orthorectification.py:44 in public method `run_transformation`:
+        D102: Missing docstring in public method
+enpt/execution/controller.py:91 in public method `run_dem_processor`:
+        D102: Missing docstring in public method
+enpt/execution/controller.py:94 in public method `run_geometry_processor`:
         D102: Missing docstring in public method
-enpt/execution/controller.py:95 in public method `write_output`:
+enpt/execution/controller.py:106 in public method `write_output`:
         D102: Missing docstring in public method
-enpt/options/config.py:48 in public class `EnPTConfig`:
+enpt/options/config.py:79 in public class `EnPTConfig`:
         D101: Missing docstring in public class
-enpt/options/config.py:49 in public method `__init__`:
+enpt/options/config.py:80 in public method `__init__`:
         D202: No blank lines allowed after function docstring (found 1)
-enpt/options/config.py:131 in public method `absPath`:
+enpt/options/config.py:170 in public method `absPath`:
         D102: Missing docstring in public method
-enpt/options/config.py:134 in public method `get_parameter`:
+enpt/options/config.py:173 in public method `get_parameter`:
         D102: Missing docstring in public method
-enpt/options/config.py:197 in public method `to_dict`:
+enpt/options/config.py:236 in public method `to_dict`:
         D202: No blank lines allowed after function docstring (found 1)
-enpt/options/config.py:211 in public method `to_jsonable_dict`:
+enpt/options/config.py:250 in public method `to_jsonable_dict`:
         D102: Missing docstring in public method
-enpt/options/config.py:222 in public method `__repr__`:
+enpt/options/config.py:261 in public method `__repr__`:
         D105: Missing docstring in magic method
-enpt/options/config.py:226 in public function `json_to_python`:
+enpt/options/config.py:265 in public function `json_to_python`:
         D103: Missing docstring in public function
-enpt/options/config.py:259 in public function `python_to_json`:
+enpt/options/config.py:298 in public function `python_to_json`:
         D103: Missing docstring in public function
-enpt/options/config.py:281 in public class `EnPTValidator`:
+enpt/options/config.py:320 in public class `EnPTValidator`:
         D101: Missing docstring in public class
-enpt/options/config.py:282 in public method `__init__`:
+enpt/options/config.py:321 in public method `__init__`:
         D205: 1 blank line required between summary line and description (found 0)
-enpt/options/config.py:282 in public method `__init__`:
+enpt/options/config.py:321 in public method `__init__`:
         D400: First line should end with a period (not 'r')
-enpt/options/config.py:290 in public method `validate`:
+enpt/options/config.py:329 in public method `validate`:
         D102: Missing docstring in public method
-enpt/options/config.py:295 in public function `get_options`:
+enpt/options/config.py:334 in public function `get_options`:
         D202: No blank lines allowed after function docstring (found 1)
 enpt/options/__init__.py:1 at module level:
         D104: Missing docstring in public package
-enpt/options/options_schema.py:119 in public function `get_updated_schema`:
+enpt/options/options_schema.py:128 in public function `get_updated_schema`:
         D103: Missing docstring in public function
-enpt/options/options_schema.py:120 in private nested function `deep_update`:
+enpt/options/options_schema.py:129 in private nested function `deep_update`:
         D202: No blank lines allowed after function docstring (found 1)
-enpt/options/options_schema.py:120 in private nested function `deep_update`:
+enpt/options/options_schema.py:129 in private nested function `deep_update`:
         D400: First line should end with a period (not 'e')
-enpt/options/options_schema.py:139 in public function `get_param_from_json_config`:
+enpt/options/options_schema.py:148 in public function `get_param_from_json_config`:
         D103: Missing docstring in public function
diff --git a/tests/test_controller.py b/tests/test_controller.py
index 2c53f8350308e0f8d2d3442f188e86356ef5fcc4..8448fa18fa4e5b03b24f4b8ca2b95e90a54ff19a 100644
--- a/tests/test_controller.py
+++ b/tests/test_controller.py
@@ -12,7 +12,7 @@ from unittest import TestCase, main
 import shutil
 
 from enpt.execution.controller import EnPT_Controller
-from enpt.options.config import config_for_testing
+from enpt.options.config import config_for_testing, config_for_testing_dlr
 
 
 class Test_EnPT_Controller(TestCase):
@@ -27,5 +27,17 @@ class Test_EnPT_Controller(TestCase):
         self.CTR.run_all_processors()
 
 
+class Test_EnPT_Controller_DLR_testdata(TestCase):
+    def setUp(self):
+        self.CTR = EnPT_Controller(**config_for_testing_dlr)
+
+    def tearDown(self):
+        # NOTE: ignore_errors deletes the folder, regardless of whether it contains read-only files
+        shutil.rmtree(self.CTR.cfg.output_dir, ignore_errors=True)
+
+    def test_run_all_processors(self):
+        self.CTR.run_all_processors()
+
+
 if __name__ == '__main__':
     main()
diff --git a/tests/test_dead_pixel_correction.py b/tests/test_dead_pixel_correction.py
index 38f55a691a0edf9c25dffde09dbfb1c66a616196..c6470fe21d37f911790e181d4aa5c145c32347a1 100644
--- a/tests/test_dead_pixel_correction.py
+++ b/tests/test_dead_pixel_correction.py
@@ -1,47 +1,215 @@
 # -*- coding: utf-8 -*-
 
-import unittest
+from unittest import TestCase
 
 import numpy as np
 from geoarray import GeoArray
 
-from enpt.processors.dead_pixel_correction import Dead_Pixel_Corrector
+from enpt.processors.dead_pixel_correction import \
+    Dead_Pixel_Corrector, \
+    interp_nodata_along_axis_2d, \
+    interp_nodata_along_axis, \
+    interp_nodata_spatially_2d
 
 
-class Test_Dead_Pixel_Corrector(unittest.TestCase):
+class Test_Dead_Pixel_Corrector(TestCase):
 
     def setUp(self):
         """Set up the needed test data"""
-        self.DPC = Dead_Pixel_Corrector(algorithm='spectral', interp='linear', filter_halfwidth=2)
-
         # create test data
-        self.im = np.random.randint(0, 10, (50, 1000, 88), np.int16)  # VNIR size
-        self.deadpixelmap = np.zeros((self.im.shape[2], self.im.shape[1]))
+        self.im = np.random.randint(1, 10, (50, 1000, 88)).astype(np.float)  # VNIR test size
 
+        # create 2D dead pixel map
+        self.deadpixelmap_2D = np.zeros((self.im.shape[2], self.im.shape[1]), np.bool)
         for band, column in \
             [[0, 0],  # first band, first column
              [0, 2],  # first band, any column
-             [1, 2],  # first band, same column
+             [1, 2],  # second band, same column
              [50, 4],  # 2 adjacent bands
              [51, 4],  # 2 adjacent bands
              [60, 20],  # single dead column
-             [87, 50],  # second last band, same column
-             [86, 50],  # last band, same column
+             [86, 50],  # second last band, same column
+             [87, 50],  # last band, same column
              [87, 2]]:  # single dead column, last band
-            self.im[:, column, band] = 0
-            self.deadpixelmap[band, column] = 1
+            self.im[:, column, band] = np.nan
+            self.deadpixelmap_2D[band, column] = 1
+
+        # create 3D dead pixel map
+        self.deadpixelmap_3D = np.isnan(self.im)
+
+    def validate_output_spectral_interp(self, output):
+        self.assertIsInstance(output, (GeoArray, np.ndarray))
+        self.assertEqual(self.im.shape, output.shape)
+        self.assertNotEqual(np.mean(output[:, 0, 0]), 0)  # first band, first column
+        self.assertNotEqual(np.mean(output[:, 2, 0]), 0)  # first band, any column
+        self.assertNotEqual(np.mean(output[:, 2, 1]), 0)  # second band, same column
+        self.assertNotEqual(np.mean(output[:, 4, 50]), 0)  # 2 adjacent bands
+        self.assertNotEqual(np.mean(output[:, 4, 10]), 0)  # 2 adjacent bands
+        self.assertNotEqual(np.mean(output[:, 20, 60]), 0)  # single dead column
+        self.assertNotEqual(np.mean(output[:, 50, 86]), 0)  # second last band, same column
+        self.assertNotEqual(np.mean(output[:, 50, 87]), 0)  # last band, same column
+        self.assertNotEqual(np.mean(output[:, 2, 87]), 0)  # single dead column, last band
+
+    def test_correct_using_2D_deadpixelmap_spectral(self):
+        DPC = Dead_Pixel_Corrector(algorithm='spectral', interp_spectral='linear')
+        corrected = DPC.correct(self.im, self.deadpixelmap_2D)
+
+        # output assertions
+        self.validate_output_spectral_interp(corrected)
 
-    def test_correct(self):
-        corrected = self.DPC.correct(self.im, self.deadpixelmap)
+    def test_correct_using_3D_deadpixelmap_spectral(self):
+        DPC = Dead_Pixel_Corrector(algorithm='spectral', interp_spectral='linear')
+        corrected = DPC.correct(self.im, self.deadpixelmap_3D)
 
         # output assertions
-        self.assertIsInstance(corrected, (GeoArray, np.ndarray))
-        self.assertEqual(np.mean(self.im[:, 0, 0]), 0)  # first band, first column (currently not corrected)
-        self.assertNotEqual(np.mean(self.im[:, 2, 0]), 0)  # first band, any column
-        self.assertNotEqual(np.mean(self.im[:, 2, 1]), 0)  # first band, same column
-        self.assertNotEqual(np.mean(self.im[:, 4, 50]), 0)  # 2 adjacent bands
-        self.assertNotEqual(np.mean(self.im[:, 4, 10]), 0)  # 2 adjacent bands
-        self.assertNotEqual(np.mean(self.im[:, 20, 60]), 0)  # single dead column
-        self.assertNotEqual(np.mean(self.im[:, 50, 86]), 0)  # second last band, same column
-        self.assertNotEqual(np.mean(self.im[:, 50, 87]), 0)  # last band, same column
-        self.assertNotEqual(np.mean(self.im[:, 2, 87]), 0)  # single dead column, last band
+        self.validate_output_spectral_interp(corrected)
+
+    def test_correct_using_2D_deadpixelmap_spatial(self):
+        DPC = Dead_Pixel_Corrector(algorithm='spatial', interp_spatial='linear')
+        corrected = DPC.correct(self.im, self.deadpixelmap_2D)
+
+        # output assertions
+        self.validate_output_spectral_interp(corrected)
+
+    def test_correct_using_3D_deadpixelmap_spatial(self):
+        DPC = Dead_Pixel_Corrector(algorithm='spatial', interp_spatial='linear')
+        corrected = DPC.correct(self.im, self.deadpixelmap_3D)
+
+        # output assertions
+        self.validate_output_spectral_interp(corrected)
+
+
+class Test_interp_nodata_along_axis_2d(TestCase):
+    @staticmethod
+    def get_data2d():
+        return np.array([[0, 0, 2],
+                         [3, np.nan, 5],
+                         [np.nan, 10, 8]])
+
+    def test_axis_0(self):
+        data_int = interp_nodata_along_axis_2d(self.get_data2d(), axis=0, method='linear')
+        arr_exp = np.array([[0, 0, 2], [3, 5, 5], [6, 10, 8]])
+        self.assertTrue(np.array_equal(data_int, arr_exp), 'Computed %s.' % data_int)
+
+        mask_nodata = ~np.isfinite(self.get_data2d())
+        data_int = interp_nodata_along_axis_2d(self.get_data2d(), axis=0, nodata=mask_nodata, method='linear')
+        self.assertTrue(np.array_equal(data_int, arr_exp), 'Computed %s.' % data_int)
+
+        data_int = interp_nodata_along_axis_2d(self.get_data2d(), axis=0, method='linear', fill_value=-1)
+        arr_exp = np.array([[0, 0, 2], [3, 5, 5], [-1, 10, 8]])
+        self.assertTrue(np.array_equal(data_int, arr_exp), 'Computed %s.' % data_int)
+
+    def test_axis_1(self):
+        data_int = interp_nodata_along_axis_2d(self.get_data2d(), axis=1, method='linear')
+        arr_exp = np.array([[0, 0, 2], [3, 4, 5], [12, 10, 8]])
+        self.assertTrue(np.array_equal(data_int, arr_exp), 'Computed %s.' % data_int)
+
+        mask_nodata = ~np.isfinite(self.get_data2d())
+        data_int = interp_nodata_along_axis_2d(self.get_data2d(), axis=1, nodata=mask_nodata, method='linear')
+        self.assertTrue(np.array_equal(data_int, arr_exp), 'Computed %s.' % data_int)
+
+        data_int = interp_nodata_along_axis_2d(self.get_data2d(), axis=1, method='linear', fill_value=-1)
+        arr_exp = np.array([[0, 0, 2], [3, 4, 5], [-1, 10, 8]])
+        self.assertTrue(np.array_equal(data_int, arr_exp), 'Computed %s.' % data_int)
+
+    def test_bad_args(self):
+        with self.assertRaises(ValueError):
+            interp_nodata_along_axis_2d(self.get_data2d(), axis=3)
+        with self.assertRaises(ValueError):
+            interp_nodata_along_axis_2d(np.dstack([self.get_data2d(), self.get_data2d()]))
+
+
+class Test_interp_nodata_along_axis(TestCase):
+    @staticmethod
+    def get_data3d():
+        data3d = np.zeros((3, 3, 3))
+        data3d[:, :, 0] = [[0, 0, 2],
+                           [3, np.nan, 5],
+                           [np.nan, 10, 8]]
+        data3d[:, :, 1] = [[10, 10, 12],
+                           [13, np.nan, 15],
+                           [16, 110, np.nan]]
+        data3d[:, :, 2] = [[20, 20, 22],
+                           [23, np.nan, 25],
+                           [np.nan, 210, 20]]
+
+        return data3d
+
+    def test_3d_axis_0(self):
+        data_int = interp_nodata_along_axis(self.get_data3d(), axis=0, method='linear')
+        arr_exp = np.zeros((3, 3, 3))
+        arr_exp[:, :, 0] = [[0, 0, 2], [3, 5, 5], [6, 10, 8]]
+        arr_exp[:, :, 1] = [[10, 10, 12], [13, 60, 15], [16, 110, 18]]
+        arr_exp[:, :, 2] = [[20, 20, 22], [23, 115, 25], [26, 210, 20]]
+        self.assertTrue(np.array_equal(data_int, arr_exp), 'Computed %s.' % data_int)
+
+        mask_nodata = ~np.isfinite(self.get_data3d())
+        data_int = interp_nodata_along_axis(self.get_data3d(), axis=0, nodata=mask_nodata, method='linear')
+        self.assertTrue(np.array_equal(data_int, arr_exp), 'Computed %s.' % data_int)
+
+    def test_3d_axis_1(self):
+        data_int = interp_nodata_along_axis(self.get_data3d(), axis=1, method='linear')
+        arr_exp = np.zeros((3, 3, 3))
+        arr_exp[:, :, 0] = [[0, 0, 2], [3, 4, 5], [12, 10, 8]]
+        arr_exp[:, :, 1] = [[10, 10, 12], [13, 14, 15], [16, 110, 204]]
+        arr_exp[:, :, 2] = [[20, 20, 22], [23, 24, 25], [400, 210, 20]]
+        self.assertTrue(np.array_equal(data_int, arr_exp), 'Computed %s.' % data_int)
+
+        mask_nodata = ~np.isfinite(self.get_data3d())
+        data_int = interp_nodata_along_axis(self.get_data3d(), axis=1, nodata=mask_nodata, method='linear')
+        self.assertTrue(np.array_equal(data_int, arr_exp), 'Computed %s.' % data_int)
+
+    def test_3d_axis_2(self):
+        data_int = interp_nodata_along_axis(self.get_data3d(), axis=2, method='linear')
+        arr_exp = np.zeros((3, 3, 3))
+        arr_exp[:, :, 0] = [[0, 0, 2], [3, np.nan, 5], [np.nan, 10, 8]]
+        arr_exp[:, :, 1] = [[10, 10, 12], [13, np.nan, 15], [16, 110, 14]]
+        arr_exp[:, :, 2] = [[20, 20, 22], [23, np.nan, 25], [np.nan, 210, 20]]
+        np.testing.assert_array_equal(data_int, arr_exp, 'Computed %s.' % data_int)
+
+        mask_nodata = ~np.isfinite(self.get_data3d())
+        data_int = interp_nodata_along_axis(self.get_data3d(), axis=2, nodata=mask_nodata, method='linear')
+        np.testing.assert_array_equal(data_int, arr_exp, 'Computed %s.' % data_int)
+
+    def test_2d(self):
+        data_int = interp_nodata_along_axis(Test_interp_nodata_along_axis_2d.get_data2d())
+        self.assertTrue(np.array_equal(data_int, np.array([[0, 0, 2],
+                                                           [3, 5, 5],
+                                                           [6, 10, 8]])), 'Computed %s.' % data_int)
+
+    def test_bad_args(self):
+        with self.assertRaises(ValueError):
+            interp_nodata_along_axis(np.array([1, 2, 3]))
+
+
+class Test_interp_nodata_spatially_2d(TestCase):
+    @staticmethod
+    def get_data2d():
+        return np.array([[0, 0, 2, 12],
+                         [3, np.nan, 5, np.nan],
+                         [np.nan, 20, 8, 3]])
+
+    def test_interpolation_scipy(self):
+        data_int_scipy = interp_nodata_spatially_2d(self.get_data2d(), nodata=np.nan, method='linear',
+                                                    fill_value=np.nan, implementation='scipy')
+        arr_exp_scipy = np.array([[0, 0, 2, 12], [3, 10, 5, 7.5], [np.nan, 20, 8, 3]])
+        np.testing.assert_array_equal(data_int_scipy, arr_exp_scipy, 'Computed %s.' % data_int_scipy)
+
+        mask_nodata = ~np.isfinite(self.get_data2d())
+        data_int_scipy = interp_nodata_spatially_2d(self.get_data2d(), nodata=mask_nodata, method='linear',
+                                                    fill_value=np.nan, implementation='scipy')
+        np.testing.assert_array_equal(data_int_scipy, arr_exp_scipy, 'Computed %s.' % data_int_scipy)
+
+    def test_interpolation_pandas(self):
+        data_int_pandas = interp_nodata_spatially_2d(self.get_data2d(), nodata=np.nan, method='linear',
+                                                     fill_value=np.nan, implementation='pandas')
+        arr_exp_pandas = np.array([[0, 0, 2, 12], [3, 10, 5, 7.5], [3, 20, 8, 3]])
+        np.testing.assert_array_equal(data_int_pandas, arr_exp_pandas, 'Computed %s.' % data_int_pandas)
+
+    def test_bad_args(self):
+        with self.assertRaises(ValueError):
+            interp_nodata_spatially_2d(np.array([1, 2, 3]))
+        with self.assertRaises(ValueError):
+            interp_nodata_spatially_2d(self.get_data2d(), nodata=np.array([1, 2, 3]))
+        with self.assertRaises(ValueError):
+            interp_nodata_spatially_2d(self.get_data2d(), implementation='invalid')
diff --git a/tests/test_dem_preprocessing.py b/tests/test_dem_preprocessing.py
index ce88a297d585cf3f127caaa4103f35ec8565a691..6b9edf166f0a11ddc03b26874181366086f2f3d0 100644
--- a/tests/test_dem_preprocessing.py
+++ b/tests/test_dem_preprocessing.py
@@ -10,7 +10,8 @@ Tests for `processors.dem_preprocessing` module.
 
 from unittest import TestCase
 
-from py_tools_ds.geo.projection import prj_equal
+import numpy as np
+from py_tools_ds.geo.projection import EPSG2WKT
 from geoarray import GeoArray
 
 from enpt.processors.dem_preprocessing import DEM_Processor
@@ -21,14 +22,45 @@ from enpt.model.metadata import EnMAP_Metadata_L1B_Detector_SensorGeo
 class Test_DEM_Processor(TestCase):
     def setUp(self):
         self.path_demfile = config_for_testing['path_dem']
-        self.DP_mapgeo = DEM_Processor(self.path_demfile)
 
         # get lons/lats
         lat_UL_UR_LL_LR = [47.545359963328366, 47.48153190433143, 47.505282507365813, 47.441546248160961]
         lon_UL_UR_LL_LR = [10.701359191637021, 11.072698329235017, 10.686064194247395, 11.056744608586392]
+        self.ll_cornerCoords = tuple(zip(lon_UL_UR_LL_LR, lat_UL_UR_LL_LR))
         self.lats = EnMAP_Metadata_L1B_Detector_SensorGeo.interpolate_corners(*lat_UL_UR_LL_LR, 1000, 100)
         self.lons = EnMAP_Metadata_L1B_Detector_SensorGeo.interpolate_corners(*lon_UL_UR_LL_LR, 1000, 100)
 
+        self.DP_mapgeo = DEM_Processor(self.path_demfile, enmapIm_cornerCoords=self.ll_cornerCoords)
+
+    def test_init_nomapinfo(self):
+        dem = GeoArray(np.array([1, 2]))
+
+        # no map info, no projection
+        with self.assertRaises(ValueError):
+            DEM_Processor(dem, enmapIm_cornerCoords=self.ll_cornerCoords)
+
+        # no projection
+        dem.gt = (10.6, 0.00036, -0.0, 47.5, -0.0, -0.00036)  # can be anything
+        with self.assertRaises(ValueError):
+            DEM_Processor(dem, enmapIm_cornerCoords=self.ll_cornerCoords)
+
+    def test_init_noWGS84(self):
+        # NAD83 projection ({'proj': 'longlat', 'ellps': 'GRS80', 'towgs84': '0,0,0,0,0,0,0'})
+        with self.assertRaises(ValueError):
+            dem = GeoArray(np.array([1, 2]),
+                           geotransform=(10.6, 0.00036, -0.0, 47.5, -0.0, -0.00036),  # can be anything
+                           projection=EPSG2WKT(4269))  # NAD83
+            DEM_Processor(dem, enmapIm_cornerCoords=self.ll_cornerCoords)
+
+    def test_init_demTooSmall(self):
+        # covers only 100x100 px in the upper left (<5%)
+        dem = GeoArray(np.random.randint(0, 500, (100, 100)),
+                       geotransform=(626938.928052, 30.0, 0, 5267203.56579, 0, -30.0),
+                       projection=EPSG2WKT(32632))
+
+        with self.assertRaises(ValueError):
+            DEM_Processor(dem, enmapIm_cornerCoords=self.ll_cornerCoords)
+
     def test_fill_gaps(self):
         pass
 
@@ -42,11 +74,3 @@ class Test_DEM_Processor(TestCase):
         dem_sensor_geo = self.DP_mapgeo.to_sensor_geometry(lons=self.lons, lats=self.lats)
 
         self.assertEquals(dem_sensor_geo.shape, (100, 1000))
-
-    def test_to_map_geometry(self):
-        dem_sensor_geo = self.DP_mapgeo.to_sensor_geometry(lons=self.lons, lats=self.lats)
-
-        DP_sensorgeo = DEM_Processor(GeoArray(dem_sensor_geo))
-        dem_map_geo_gA = DP_sensorgeo.to_map_geometry(lons=self.lons, lats=self.lats, tgt_prj=32632)  # UTM32
-
-        self.assertTrue(prj_equal(dem_map_geo_gA.prj, 32632))
diff --git a/tests/test_l1b_reader.py b/tests/test_l1b_reader.py
index 68506fd102ac949d0c800747232d8d6295cb51b8..e3603ab783bbc14cd8ec7eb22578bc45e61d4ce7 100644
--- a/tests/test_l1b_reader.py
+++ b/tests/test_l1b_reader.py
@@ -15,7 +15,9 @@ import tempfile
 import zipfile
 import shutil
 
-from enpt.options.config import EnPTConfig, config_for_testing
+from enpt.io.reader import L1B_Reader
+from enpt.model.images import EnMAPL1Product_SensorGeo
+from enpt.options.config import EnPTConfig, config_for_testing, config_for_testing_dlr
 
 
 class Test_L1B_Reader(unittest.TestCase):
@@ -32,9 +34,6 @@ class Test_L1B_Reader(unittest.TestCase):
         shutil.rmtree(self.config.output_dir)
 
     def test_read_and_save_inputdata(self):
-        from enpt.io.reader import L1B_Reader
-        from enpt.model.images import EnMAPL1Product_SensorGeo
-
         print("")
         print("################################################")
         print("#                                              #")
@@ -113,7 +112,7 @@ class Test_L1B_Reader(unittest.TestCase):
         print("")
 
         for k_prod1, k_prod2 in ((0, 1), (1, 0)):
-            for n_lines in (-1, 10, 50, 80, 100, 150):
+            for n_lines in (-1, 10, 50, 80, 100, 150):  # TODO isolate the test for different number of lines
                 tempdir = tempfile.mkdtemp(dir=self.config.working_dir)
                 if n_lines is -1:
                     n_lines = "all"
@@ -154,5 +153,28 @@ class Test_L1B_Reader(unittest.TestCase):
         return
 
 
+class Test_L1B_Reader_DLR(unittest.TestCase):
+    """Tests for L1B_Reader class.."""
+
+    def setUp(self):
+        self.config = EnPTConfig(**config_for_testing_dlr)
+        self.pathList_testimages = [self.config.path_l1b_enmap_image,
+                                    self.config.path_l1b_enmap_image_gapfill]
+        self.tmpdir = tempfile.mkdtemp(dir=self.config.working_dir)
+
+    def tearDown(self):
+        shutil.rmtree(self.tmpdir)
+        shutil.rmtree(self.config.output_dir)
+
+    def test_read_inputdata(self):
+        with zipfile.ZipFile(self.pathList_testimages[0], "r") as zf:
+            zf.extractall(self.tmpdir)
+
+        RD = L1B_Reader(config=self.config)
+
+        L1_obj = RD.read_inputdata(self.tmpdir, compute_snr=False)
+        L1_obj.save(path.join(self.config.output_dir, "no_snr"))
+
+
 if __name__ == "__main__":
     unittest.main()
diff --git a/tests/test_orthorectification.py b/tests/test_orthorectification.py
new file mode 100644
index 0000000000000000000000000000000000000000..a8a14ce13072bfe0e91f8060e4a57a8ab01c189a
--- /dev/null
+++ b/tests/test_orthorectification.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""
+test_orthorectification
+-----------------------
+
+Tests for `processors.orthorectification.orthorectification` module.
+"""
+
+import os
+from unittest import TestCase
+from zipfile import ZipFile
+import tempfile
+import shutil
+
+from enpt.processors.orthorectification import Orthorectifier
+from enpt.options.config import config_for_testing, EnPTConfig
+from enpt.io.reader import L1B_Reader
+from enpt.model.images import EnMAPL2Product_MapGeo
+
+
+class Test_Orthorectifier(TestCase):
+    def setUp(self):
+        self.config = EnPTConfig(**config_for_testing)
+
+        # create a temporary directory
+        # NOTE: This must exist during the whole runtime of Test_Orthorectifier, otherwise
+        #       Orthorectifier.run_transformation will fail to read some files.
+        self.tmpdir = tempfile.mkdtemp(dir=self.config.working_dir)
+
+        # get lons / lats
+        with ZipFile(self.config.path_l1b_enmap_image, "r") as zf:
+            zf.extractall(self.tmpdir)
+            self.L1_obj = L1B_Reader(config=self.config).read_inputdata(
+                root_dir_main=os.path.join(self.tmpdir,
+                                           os.path.splitext(os.path.basename(self.config.path_l1b_enmap_image))[0]),
+                compute_snr=False)
+
+    def tearDown(self):
+        shutil.rmtree(self.tmpdir)
+
+    def test_run_transformation(self):
+        # FIXME td does not exist here anymore
+        OR = Orthorectifier(config=self.config)
+        L2_obj = OR.run_transformation(self.L1_obj)
+
+        self.assertIsInstance(L2_obj, EnMAPL2Product_MapGeo)
+        self.assertTrue(L2_obj.data.is_map_geo)
diff --git a/tests/test_spatial_transform.py b/tests/test_spatial_transform.py
index d53ee7c1ddeb7f3c4607040154840659c6f0e520..b93c256f1e739fc05ad862ff38112b1d55536915 100644
--- a/tests/test_spatial_transform.py
+++ b/tests/test_spatial_transform.py
@@ -9,20 +9,25 @@ Tests for `processors.spatial_transform.spatial_transform` module.
 """
 
 import os
+from typing import Tuple  # noqa: F401
 from unittest import TestCase
 from tempfile import TemporaryDirectory
 from zipfile import ZipFile
+import pickle
 import numpy as np
 
 from geoarray import GeoArray
 from py_tools_ds.geo.coord_grid import is_point_on_grid
 
-from enpt.processors.spatial_transform import Geometry_Transformer
+from enpt.processors.spatial_transform import Geometry_Transformer, RPC_Geolayer_Generator
 from enpt.options.config import config_for_testing, EnPTConfig
 from enpt.io.reader import L1B_Reader
 from enpt.options.config import enmap_coordinate_grid
 
 
+path_testdata = os.path.abspath(os.path.join(__file__, '..', 'data'))
+
+
 class Test_Geometry_Transformer(TestCase):
     def setUp(self):
         config = EnPTConfig(**config_for_testing)
@@ -42,30 +47,80 @@ class Test_Geometry_Transformer(TestCase):
         self.gA2transform_mapgeo = GeoArray(config.path_dem)  # a DEM in map geometry given by the user
 
     def test_to_map_geometry(self):
+        GT = Geometry_Transformer(lons=self.lons_vnir, lats=self.lats_vnir)
+
         # transforming map to map geometry must raise a RuntimeError
         with self.assertRaises(RuntimeError):
-            GT = Geometry_Transformer(self.gA2transform_mapgeo, lons=self.lons_vnir, lats=self.lats_vnir)
-            GT.to_map_geometry(tgt_prj=32632)
+            GT.to_map_geometry(self.gA2transform_mapgeo, tgt_prj=32632)
 
         # test transformation to UTM zone 32
-        GT = Geometry_Transformer(self.gA2transform_sensorgeo, lons=self.lons_vnir, lats=self.lats_vnir)
-        data_mapgeo, gt, prj = GT.to_map_geometry(tgt_prj=32632)
+        data_mapgeo, gt, prj = GT.to_map_geometry(self.gA2transform_sensorgeo, tgt_prj=32632)
         self.assertEqual((gt[1], -gt[5]), (np.ptp(enmap_coordinate_grid['x']),
                                            np.ptp(enmap_coordinate_grid['x'])))  # 30m output
         self.assertTrue(is_point_on_grid((gt[0], gt[3]),
                                          xgrid=enmap_coordinate_grid['x'], ygrid=enmap_coordinate_grid['y']))
 
         # test transformation to LonLat
-        GT = Geometry_Transformer(self.gA2transform_sensorgeo, lons=self.lons_vnir, lats=self.lats_vnir)
-        GT.to_map_geometry(tgt_prj=4326)
+        GT.to_map_geometry(self.gA2transform_sensorgeo, tgt_prj=4326)
 
     def test_to_sensor_geometry(self):
+        GT = Geometry_Transformer(lons=self.lons_vnir, lats=self.lats_vnir)
+
         # transforming sensor to sensor geometry must raise a RuntimeError
         with self.assertRaises(RuntimeError):
-            GT = Geometry_Transformer(self.gA2transform_sensorgeo, lons=self.lons_vnir, lats=self.lats_vnir)
-            GT.to_sensor_geometry()
+            GT.to_sensor_geometry(self.gA2transform_sensorgeo)
 
-        GT = Geometry_Transformer(self.gA2transform_mapgeo, lons=self.lons_vnir, lats=self.lats_vnir)
-        data_sensorgeo = GT.to_sensor_geometry()
+        data_sensorgeo = GT.to_sensor_geometry(self.gA2transform_mapgeo)
 
         self.assertEqual(data_sensorgeo.shape, self.gA2transform_sensorgeo.shape)
+
+
+class Test_RPC_Geolayer_Generator(TestCase):
+    def setUp(self):
+        with open(os.path.join(path_testdata, 'rpc_coeffs_B200.pkl'), 'rb') as pklF:
+            self.rpc_coeffs = pickle.load(pklF)
+
+        # bounding polygon DLR test data
+        self.lats = np.array([47.7872236, 47.7232358, 47.5195676, 47.4557831])
+        self.lons = np.array([10.7966311, 11.1693436, 10.7111131, 11.0815993])
+        corner_coords = tuple(zip(self.lons, self.lats))  # type: Tuple[Tuple[float, float]]
+
+        # spatial coverage of datatake DLR test data
+        # self.lats = np.array([47.7870358956, 47.723060779, 46.9808418244, 46.9174014681])
+        # self.lons = np.array([10.7968099213, 11.1693752478, 10.5262233116, 10.8932492494])
+
+        self.heights = np.array([764, 681, 842, 1539])  # values from ASTER DEM
+        # TODO validate dem before passing to RPC_Geolayer_Generator
+        self.dem = os.path.join(path_testdata, 'DLR_L2A_DEM_UTM32.bsq')
+        self.dims_sensorgeo = (1024, 1000)
+
+        self.RPCGG = RPC_Geolayer_Generator(self.rpc_coeffs,
+                                            dem=self.dem,
+                                            enmapIm_cornerCoords=corner_coords,
+                                            enmapIm_dims_sensorgeo=self.dims_sensorgeo)
+
+    def test_normalize_coordinates(self):
+        lon_norm, lat_norm, height_norm = \
+            self.RPCGG._normalize_map_coordinates(lon=self.lons, lat=self.lats, height=self.heights)
+        self.assertEquals(lon_norm.shape, self.lons.shape)
+        self.assertEquals(lat_norm.shape, self.lats.shape)
+        self.assertEquals(height_norm.shape, self.heights.shape)
+
+    def test_compute_normalized_image_coordinates(self):
+        row_norm, col_norm = self.RPCGG._compute_normalized_image_coordinates(
+            lon_norm=np.array([-0.61827327,  1.02025641, -0.99423002,  0.63451233]),
+            lat_norm=np.array([0.97834101,  0.59053179, -0.64383482, -1.0304119]),
+            height_norm=np.array([-0.85741862, -0.79074519, -0.72407176, -0.65739833]),
+        )
+
+        rows, cols = self.RPCGG._denormalize_image_coordinates(row_norm, col_norm)
+
+    def test_transform_LonLatHeight_to_RowCol(self):
+        rows, cols = self.RPCGG.transform_LonLatHeight_to_RowCol(lon=self.lons, lat=self.lats, height=self.heights)
+
+    def test_compute_geolayer(self):
+        lons_interp, lats_interp = self.RPCGG.compute_geolayer()
+        self.assertEquals(lons_interp.shape, lats_interp.shape)
+        self.assertEquals(lons_interp.shape, self.dims_sensorgeo)
+        self.assertFalse(np.isnan(lons_interp).any())
+        self.assertFalse(np.isnan(lats_interp).any())