diff --git a/CMakeLists.txt b/CMakeLists.txt index d3d714388..4705872a3 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -21,6 +21,7 @@ set(CMAKE_CXX_STANDARD 11) option(ALE_BUILD_LOAD "If the C++ Python load interface should be built." ON) option(ALE_USE_EXTERNAL_JSON "If an external nlohmann JSON library should be used" ON) option(ALE_USE_EXTERNAL_EIGEN "If an external EIGEN library should be used" ON) +option(ALE_USE_EXTERNAL_HIGHFIVE "If an external HIGHFIVE library should be used" ON) # Third Party Dependencies if(ALE_USE_EXTERNAL_JSON) @@ -39,6 +40,12 @@ else() ${CMAKE_CURRENT_SOURCE_DIR}/eigen) endif() +if(ALE_USE_EXTERNAL_HIGHFIVE) + find_package(HDF5 COMPONENTS C CXX) + find_package(highfive REQUIRED) +endif() + + if(ALE_BUILD_LOAD) # If there is an Anaconda environment activated, search that for Python first if(EXISTS $ENV{CONDA_PREFIX}) @@ -141,4 +148,4 @@ if(ALE_BUILD_DOCS) add_subdirectory ("docs") else() message(STATUS "Skipping Docs") -endif() \ No newline at end of file +endif() diff --git a/environment.yml b/environment.yml index 1703afda5..a1790a943 100644 --- a/environment.yml +++ b/environment.yml @@ -1,13 +1,13 @@ name: ale channels: - conda-forge - - default dependencies: - cmake>=3.15 - pytest - eigen - gdal + - highfive - jupyter - nlohmann_json - numpy @@ -23,3 +23,5 @@ dependencies: - networkx - breathe - brotli + - h5py + - pip diff --git a/include/ale/Isd.h b/include/ale/Isd.h index 41e489c24..35471b28d 100644 --- a/include/ale/Isd.h +++ b/include/ale/Isd.h @@ -5,6 +5,7 @@ #include #include +#include #include #include "ale/Distortion.h" @@ -68,6 +69,63 @@ namespace ale { Orientations inst_pointing; Orientations body_rotation; }; + + + class MultiSpectralIsd { + public: + + MultiSpectralIsd(HighFive::File); + + std::vector usgscsm_name_model; + std::vector name_platform; + std::vector image_id; + std::vector name_sensor; + + std::vector semi_major; + std::vector semi_minor; + + std::vector detector_sample_summing; + std::vector detector_line_summing; + + std::vector focal_length; + std::vector focal_uncertainty; + + std::vector detector_center_line; + std::vector detector_center_sample; + + // should probably be ints + std::vector starting_detector_line; + std::vector starting_detector_sample; + + std::vector> focal2pixel_line; + std::vector> focal2pixel_sample; + + // maybe change + std::vector distortion_model; + std::vector> distortion_coefficients; + + std::vector image_lines; + std::vector image_samples; + + std::vector max_reference_height; + std::vector min_reference_height; + + std::vector>> line_scan_rate; + + std::vector starting_ephemeris_time; + std::vector center_ephemeris_time; + + std::vector naif_keywords; + + std::vector interpMethod; + + std::vector inst_pos; + std::vector sun_pos; + + std::vector inst_pointing; + std::vector body_rotation; + }; } + #endif diff --git a/include/ale/Util.h b/include/ale/Util.h index dbc50be69..3db39a73a 100644 --- a/include/ale/Util.h +++ b/include/ale/Util.h @@ -1,6 +1,7 @@ #ifndef ALE_UTIL_H #define ALE_UTIL_H +#include #include #include @@ -25,6 +26,17 @@ namespace ale { return positions; } + template + std::vector> getMultiSpectralArray(HighFive::DataSet ds){ + std::vector> data; + try { + ds.read(data); + } catch (...) { + throw std::runtime_error("Could not parse the hdf5 dataset"); + } + return data; + } + PositionInterpolation getInterpolationMethod(nlohmann::json isd); double getMinHeight(nlohmann::json isd); @@ -65,9 +77,43 @@ namespace ale { States getInstrumentPosition(nlohmann::json isd); States getSunPosition(nlohmann::json isd); - Orientations getBodyRotation(nlohmann::json isd); Orientations getInstrumentPointing(nlohmann::json isd); + + + std::vector getSensorModelName(HighFive::File hdf5); + std::vector getImageId(HighFive::File hdf5); + std::vector getPlatformName(HighFive::File hdf5); + std::vector getSensorName(HighFive::File hdf5); + std::vector getTotalLines(HighFive::File hdf5); + std::vector getTotalSamples(HighFive::File hdf5); + std::vector getStartingTime(HighFive::File hdf5); + std::vector getCenterTime(HighFive::File hdf5); + std::vector>> getLineScanRate(HighFive::File hdf5); + std::vector getSampleSumming(HighFive::File hdf5); + std::vector getLineSumming(HighFive::File hdf5); + std::vector getFocalLength(HighFive::File hdf5); + std::vector getFocalLengthUncertainty(HighFive::File hdf5); + std::vector> getFocal2PixelLines(HighFive::File hdf5); + std::vector> getFocal2PixelSamples(HighFive::File hdf5); + std::vector getDetectorCenterLine(HighFive::File hdf5); + std::vector getDetectorCenterSample(HighFive::File hdf5); + std::vector getDetectorStartingLine(HighFive::File hdf5); + std::vector getDetectorStartingSample(HighFive::File hdf5); + std::vector getMinHeight(HighFive::File hdf5); + std::vector getMaxHeight(HighFive::File hdf5); + std::vector getSemiMajorRadius(HighFive::File hdf5); + std::vector getSemiMinorRadius(HighFive::File hdf5); + std::vector getDistortionModel(HighFive::File hdf5); + std::vector> getDistortionCoeffs(HighFive::File hdf5); + std::vector getInterpolationMethod(HighFive::File hdf5); + std::vector getBodyRotation(HighFive::File hdf5); + std::vector> getVec3dArray(HighFive::DataSet ds); + std::vector> getQuatArray(HighFive::DataSet ds); + + std::vector getInstrumentPosition(HighFive::File hdf5); + std::vector getSunPosition(HighFive::File hdf5); + std::vector getInstrumentPointing(HighFive::File hdf5); } #endif diff --git a/notebooks/compress_isds.ipynb b/notebooks/compress_isds.ipynb new file mode 100644 index 000000000..9c7cc627a --- /dev/null +++ b/notebooks/compress_isds.ipynb @@ -0,0 +1,405 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 331, + "id": "a6615fc6-3c45-4a01-9105-762b4b27df68", + "metadata": {}, + "outputs": [], + "source": [ + "import ale\n", + "import base64\n", + "import brotli\n", + "import h5py\n", + "import gzip\n", + "import json\n", + "import numpy as np\n", + "import pandas as pd\n", + "import pyarrow as pa\n", + "import pyarrow.parquet as pq\n", + "import os\n", + "from functools import reduce\n", + "from sozipfile import sozipfile as szip\n", + "from osgeo import gdal" + ] + }, + { + "cell_type": "code", + "execution_count": 333, + "id": "0aeca2c7-b973-4149-a7d8-6441d33aa8b9", + "metadata": {}, + "outputs": [], + "source": [ + "filenames = ['./lroc_isd0.json','./lroc_isd1.json','./lroc_isd2.json',\n", + " './lroc_isd3.json','./lroc_isd4.json','./lroc_isd5.json',\n", + " './lroc_isd6.json','./lroc_isd7.json','./lroc_isd8.json',\n", + " './lroc_isd9.json']\n", + "isds = []\n", + "for fn in filenames:\n", + " with open(fn, 'r') as f: \n", + " new_isd = json.load(f)\n", + " isds.append(new_isd)" + ] + }, + { + "cell_type": "code", + "execution_count": 334, + "id": "0ee5e29f-6b5e-47bb-b0ff-2f7a4a90e8c0", + "metadata": {}, + "outputs": [], + "source": [ + "isds_altered = isds.copy()" + ] + }, + { + "cell_type": "code", + "execution_count": 335, + "id": "30e9317f-8622-435d-aa04-6384c63f30f0", + "metadata": {}, + "outputs": [], + "source": [ + "for i, altered in enumerate(isds_altered):\n", + " diff = (1 + (i/100000000))\n", + " altered['instrument_pointing']['ephemeris_times'] = [ diff * num for num in altered['instrument_pointing']['ephemeris_times']]\n", + " altered['instrument_pointing']['quaternions'] = [[diff * num for num in quats] for quats in altered['instrument_pointing']['quaternions']]\n", + " altered['instrument_pointing']['angular_velocities'] = [[diff * num for num in quats] for quats in altered['instrument_pointing']['angular_velocities']]\n" + ] + }, + { + "cell_type": "code", + "execution_count": 402, + "id": "00b302f3-83ea-4879-ac47-49a0aa370e5c", + "metadata": {}, + "outputs": [], + "source": [ + "def compress_multispectral_isd(outfile, json_str_list, alg='hdf5'):\n", + " if alg==\"hdf5\":\n", + " with h5py.File(outfile, 'w') as f:\n", + " def store_recursive(key, val, group):\n", + " chain = (group.name +'/'+ key).split('/')\n", + " # Drop empty elements from chain. Should find a better way to to do this.\n", + " chain = [item for item in chain if item]\n", + " if isinstance(val, dict):\n", + " # Create nested group in hdf5, i.e. f['group/subgroup']\n", + " subgroup = group.create_group(key)\n", + " for k, v in val.items():\n", + " store_recursive(k,v, subgroup)\n", + " elif isinstance(val, str):\n", + " data = np.array([val], dtype='S')\n", + " try:\n", + " # Use the list of keys to access same nested location in all the dictionaries\n", + " data = [reduce(lambda d, k: d[k], chain, item) for item in json_str_list]\n", + " except KeyError:\n", + " # If key isn't found, replicate a placeholder string for all isds.\n", + " # Should probably use data where it's available instead of replacing all values with empty string\n", + " data = [str(\"empty\")] * len(json_str_list)\n", + " # Encode all data to strings\n", + " data = list(map(lambda val: np.array([val], dtype='S'), data))\n", + " group.create_dataset(key, data=data)\n", + " else:\n", + " data = np.array([val])\n", + " if data.size > 1:\n", + " data = [reduce(lambda d, k: d.get(k, {'empty': []}), chain, item) for item in json_str_list]\n", + " group.create_dataset(key, data=data, compression='gzip', compression_opts=9)\n", + " else:\n", + " if '/' in key:\n", + " return\n", + " data = [reduce(lambda d, k: d.get(k, {'empty': []}), chain, item) for item in json_str_list]\n", + " group.create_dataset(key, data=data)\n", + " for k,v in json_str_list[0].items():\n", + " store_recursive(k,v, f)\n", + " elif alg==\"brotli\":\n", + " json_bytes = np.frombuffer(json_str.encode('utf-8'), dtype='uint8')\n", + " with open(outfile, 'wb') as f:\n", + " f.write(brotli.compress(json_bytes))\n", + " return True\n", + " elif alg==\"sozip\":\n", + " with szip.ZipFile(outfile, 'w', compression=szip.ZIP_DEFLATED) as f:\n", + " f.write(json_str, os.path.basename(outfile))\n", + " return True\n", + " elif alg==\"gdal\":\n", + " with gdal.Open('out.tif', gdal.GA_Update) as df:\n", + " def store_recursive(key, val, keyword):\n", + " chain = (keyword +'/'+ key).split('/')\n", + " # Drop empty elements from chain. Should find a better way to to do this.\n", + " chain = [item for item in chain if item]\n", + " if isinstance(val, dict):\n", + " # Create nested group in hdf5, i.e. f['group/subgroup']\n", + " subgroup = keyword + '/' + key\n", + " for k, v in val.items():\n", + " store_recursive(k,v, subgroup)\n", + " elif isinstance(val, str):\n", + " data = np.array([val], dtype='S')\n", + " try:\n", + " # Use the list of keys to access same nested location in all the dictionaries\n", + " data = [reduce(lambda d, k: d[k], chain, item) for item in json_str_list]\n", + " except KeyError:\n", + " # If key isn't found, replicate a placeholder string for all isds.\n", + " # Should probably use data where it's available instead of replacing all values with empty string\n", + " data = [str(\"empty\")] * len(json_str_list)\n", + " # Encode all data to strings\n", + " data = json.dumps(data)\n", + " df.SetMetadataItem('/'.join(chain), data)\n", + " else:\n", + " data = np.array([val])\n", + " if data.size > 10:\n", + " data = [reduce(lambda d, k: d.get(k, {'empty': []}), chain, item) for item in json_str_list]\n", + " # Uncomment these lines to compress \n", + " # list of lists -> list of ndarrays\n", + " #data = list(map(np.array, data))\n", + " # list of ndarrays -> list of bytes\n", + " #data = list(map(np.ndarray.tobytes, data))\n", + " # encode strings as utf-8\n", + " #data = [base64.b64encode(byte_data).decode('utf-8') for byte_data in data]\n", + " # dump to json \n", + " data = json.dumps(data)\n", + " df.SetMetadataItem('/'.join(chain), data)\n", + " else:\n", + " if '/' in key:\n", + " return\n", + " data = [reduce(lambda d, k: d.get(k, {'empty': []}), chain, item) for item in json_str_list]\n", + " #group.create_dataset(key, data=data)\n", + " try:\n", + " data = json.dumps(data.astype(list))\n", + " except:\n", + " data = json.dumps(data)\n", + " df.SetMetadataItem('/'.join(chain), data)\n", + " for k,v in json_str_list[0].items():\n", + " store_recursive(k,v, '')\n", + " else:\n", + " return False\n", + "\n", + "\n", + "\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 403, + "id": "2b9a922f-4c1b-4ba8-93bd-6fb23aa3bc84", + "metadata": {}, + "outputs": [], + "source": [ + "def read_hdf5(hdf5_file):\n", + " def read_recursive(group):\n", + " result = {}\n", + " for key, item in group.items():\n", + " if isinstance(item, h5py.Group): \n", + " result[key] = read_recursive(item)\n", + " elif isinstance(item, h5py.Dataset):\n", + " data = item[:]\n", + " decoded = decode_data(data)\n", + " if (key == \"ephemeris_times\" or key == \"constant_frames\") and not hasattr(decoded, '__iter__'):\n", + " result[key] = [decoded]\n", + " else:\n", + " result[key] = decoded\n", + " \n", + " return result\n", + " def decode_data(data):\n", + " \"\"\"Convert NumPy arrays to raw Python data types.\"\"\"\n", + " if isinstance(data, np.ndarray):\n", + " if data.dtype.char == 'S' or data.dtype.char == 'U': \n", + " return data.astype(str)[0]\n", + " elif np.issubdtype(data.dtype, np.number):\n", + " if data.size == 1:\n", + " return data.item()\n", + " else:\n", + " return data[0].tolist()\n", + " else:\n", + " return data.tolist()\n", + "\n", + " with h5py.File(hdf5_file, 'r') as f:\n", + " # Start the recursive reading from the root of the file\n", + " data = read_recursive(f)\n", + "\n", + " return data" + ] + }, + { + "cell_type": "code", + "execution_count": 404, + "id": "9512b86e-a251-4f27-a868-c108804e04e0", + "metadata": {}, + "outputs": [], + "source": [ + "def read_brotli(compressed_json_file):\n", + " with open(compressed_json_file, 'rb') as f:\n", + " data = brotli.decompress(f.read())\n", + " isd = json.loads(data)\n", + " return isd" + ] + }, + { + "cell_type": "code", + "execution_count": 405, + "id": "ce2b798c-ce71-4dd2-9e6a-a663b91851df", + "metadata": {}, + "outputs": [], + "source": [ + "def read_sozip(zip_file):\n", + " with szip.ZipFile(zip_file, 'r') as f:\n", + " data = f.read(\"lroc.zip\")\n", + " isd = json.loads(data.decode('utf-8'))\n", + " return isd" + ] + }, + { + "cell_type": "code", + "execution_count": 411, + "id": "23746aad-8306-4eca-b369-3daaec29fc44", + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "# Test that compression/decompression yields identical isds\n", + "#%timeit compress_isd(\"lroc.h5\", original_isd, \"hdf5\")\n", + "#compress_multispectral_isd(\"lroc_transpose.h5\", isds, \"hdf5\")\n", + "\n", + "compress_multispectral_isd(\"out.tif\", isds_altered, \"gdal\")\n", + "\n", + "\n", + "#print(\"compress brotli: \")\n", + "#%timeit compress_isd(\"lroc.br\", json.dumps(original_isd), \"brotli\")\n", + "#print(\"compress sozip: \")\n", + "#%timeit compress_isd(\"lroc.zip\", filename, \"sozip\")\n", + "#%timeit hdf5_isd = read_hdf5(\"lroc.h5\")\n", + "\n", + "#print(\"decompress brotli: \")\n", + "#%timeit brotli_isd = read_brotli(\"lroc.br\")\n", + "\n", + "#print(\"decompress sozip: \")\n", + "#%timeit sozip_isd = read_sozip(\"lroc.zip\")\n", + "\n", + "\n", + "#print(f\"HDF5: {hdf5_isd == original_isd}\")\n", + "#print(f\"brotli: {brotli_isd == original_isd}\")\n", + "#print(f\"sozip: {sozip_isd == original_isd}\")\n" + ] + }, + { + "cell_type": "code", + "execution_count": 356, + "id": "1ba5af5d-8d85-475a-84fb-d9f33f00b03b", + "metadata": {}, + "outputs": [], + "source": [ + "#compress_multispectral_isd(\"lroc_altered_transpose.h5\", isds_altered, \"hdf5\")" + ] + }, + { + "cell_type": "code", + "execution_count": 357, + "id": "0f27ad0c-357d-45f6-879e-0f998923e2a4", + "metadata": {}, + "outputs": [], + "source": [ + "hdf5_isd = read_hdf5('lroc_transpose.h5')" + ] + }, + { + "cell_type": "code", + "execution_count": 414, + "id": "9f2bf3a4-8b4b-4fda-8900-b3158cf28bc3", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "[[[ 6.22675585e-08 -1.02491422e-06 2.45531633e-06]\n", + " [ 6.22675607e-08 -1.02491422e-06 2.45531633e-06]]\n", + "\n", + " [[ 6.22675585e-08 -1.02491422e-06 2.45531633e-06]\n", + " [ 6.22675607e-08 -1.02491422e-06 2.45531633e-06]]\n", + "\n", + " [[ 6.22675585e-08 -1.02491422e-06 2.45531633e-06]\n", + " [ 6.22675607e-08 -1.02491422e-06 2.45531633e-06]]\n", + "\n", + " [[ 6.22675585e-08 -1.02491422e-06 2.45531633e-06]\n", + " [ 6.22675607e-08 -1.02491422e-06 2.45531633e-06]]\n", + "\n", + " [[ 6.22675585e-08 -1.02491422e-06 2.45531633e-06]\n", + " [ 6.22675607e-08 -1.02491422e-06 2.45531633e-06]]\n", + "\n", + " [[ 6.22675585e-08 -1.02491422e-06 2.45531633e-06]\n", + " [ 6.22675607e-08 -1.02491422e-06 2.45531633e-06]]\n", + "\n", + " [[ 6.22675585e-08 -1.02491422e-06 2.45531633e-06]\n", + " [ 6.22675607e-08 -1.02491422e-06 2.45531633e-06]]\n", + "\n", + " [[ 6.22675585e-08 -1.02491422e-06 2.45531633e-06]\n", + " [ 6.22675607e-08 -1.02491422e-06 2.45531633e-06]]\n", + "\n", + " [[ 6.22675585e-08 -1.02491422e-06 2.45531633e-06]\n", + " [ 6.22675607e-08 -1.02491422e-06 2.45531633e-06]]\n", + "\n", + " [[ 6.22675585e-08 -1.02491422e-06 2.45531633e-06]\n", + " [ 6.22675607e-08 -1.02491422e-06 2.45531633e-06]]]\n", + "\n", + "[[[ 5.00000000e-01 -2.58657795e+01 9.90570599e-04]]\n", + "\n", + " [[ 5.00000000e-01 -2.58657795e+01 9.90570599e-04]]\n", + "\n", + " [[ 5.00000000e-01 -2.58657795e+01 9.90570599e-04]]\n", + "\n", + " [[ 5.00000000e-01 -2.58657795e+01 9.90570599e-04]]\n", + "\n", + " [[ 5.00000000e-01 -2.58657795e+01 9.90570599e-04]]\n", + "\n", + " [[ 5.00000000e-01 -2.58657795e+01 9.90570599e-04]]\n", + "\n", + " [[ 5.00000000e-01 -2.58657795e+01 9.90570599e-04]]\n", + "\n", + " [[ 5.00000000e-01 -2.58657795e+01 9.90570599e-04]]\n", + "\n", + " [[ 5.00000000e-01 -2.58657795e+01 9.90570599e-04]]\n", + "\n", + " [[ 5.00000000e-01 -2.58657795e+01 9.90570599e-04]]]\n" + ] + } + ], + "source": [ + "with h5py.File('lroc_transpose.h5', 'r') as f:\n", + " # Start the recursive reading from the root of the file\n", + " print(f.keys())\n", + " print(f['body_rotation']['angular_velocities'][:])\n", + " print(f['isis_camera_version'])\n", + " print(f['line_scan_rate'][:])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b13d6b48-4087-4238-b68f-bf605c7bdcc6", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.11" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/Isd.cpp b/src/Isd.cpp index 855074dfd..4967353cd 100644 --- a/src/Isd.cpp +++ b/src/Isd.cpp @@ -59,3 +59,59 @@ ale::Isd::Isd(std::string isd_file) { inst_pointing = getInstrumentPointing(isd); body_rotation = getBodyRotation(isd); } + + + +ale::MultiSpectralIsd::MultiSpectralIsd(HighFive::File hdf5) { + usgscsm_name_model = getSensorModelName(hdf5); + image_id = getImageId(hdf5); + name_platform = getPlatformName(hdf5); + name_sensor = getSensorName(hdf5); + + image_lines = getTotalLines(hdf5); + image_samples = getTotalSamples(hdf5); + + starting_ephemeris_time = getStartingTime(hdf5); + center_ephemeris_time = getCenterTime(hdf5); + + try { + line_scan_rate = getLineScanRate(hdf5); + } catch (...) { + // Framers do not deal with line scan rates + // This is assuming that we may be dealing with a framer rather than + // a malformed ISD + line_scan_rate = std::vector>>(); + } + + detector_sample_summing = getSampleSumming(hdf5); + detector_line_summing = getLineSumming(hdf5); + + focal_length = getFocalLength(hdf5); + focal_uncertainty = getFocalLengthUncertainty(hdf5); + + focal2pixel_line = getFocal2PixelLines(hdf5); + focal2pixel_sample = getFocal2PixelSamples(hdf5); + + detector_center_line = getDetectorCenterLine(hdf5); + detector_center_sample = getDetectorCenterSample(hdf5); + + starting_detector_line = getDetectorStartingLine(hdf5); + starting_detector_sample = getDetectorStartingSample(hdf5); + + min_reference_height = getMinHeight(hdf5); + max_reference_height = getMaxHeight(hdf5); + + semi_major = getSemiMajorRadius(hdf5); + semi_minor = getSemiMinorRadius(hdf5); + + distortion_model = getDistortionModel(hdf5); + distortion_coefficients = getDistortionCoeffs(hdf5); + + interpMethod = getInterpolationMethod(hdf5); + + inst_pos = getInstrumentPosition(hdf5); + sun_pos = getSunPosition(hdf5); + + inst_pointing = getInstrumentPointing(hdf5); + body_rotation = getBodyRotation(hdf5); +} diff --git a/src/Util.cpp b/src/Util.cpp index 8afcfa164..84135471e 100644 --- a/src/Util.cpp +++ b/src/Util.cpp @@ -686,4 +686,777 @@ Orientations getBodyRotation(json isd) { } } + +std::vector getSensorModelName(HighFive::File hdf5) { + std::vector name; + try { + HighFive::DataSet ds = hdf5.getDataSet("name_model"); + ds.read(name); + } catch (...) { + throw std::runtime_error("Could not parse the sensor model name."); + } + return name; +} + +std::vector getImageId(HighFive::File hdf5) { + std::vector id; + try { + HighFive::DataSet ds = hdf5.getDataSet("image_identifier"); + ds.read(id); + } catch (...) { + throw std::runtime_error("Could not parse the image identifier."); + } + return id; +} + + +std::vector> getGeoTransform(HighFive::File hdf5) { + std::vector> transform; + try { + HighFive::DataSet ds = hdf5.getDataSet("geotransform"); + ds.read(transform); + } catch (std::exception &e) { + std::string originalError = e.what(); + std::string msg = "Could not parse the geo_transform. ERROR: \n" + originalError; + throw std::runtime_error(msg); + } + return transform; +} + + +std::vector getProjection(HighFive::File hdf5) { + std::vector projection; + try { + HighFive::DataSet ds = hdf5.getDataSet("projection"); + ds.read(projection); + } catch (...) { + throw std::runtime_error("Could not parse the projection string."); + } + return projection; +} + + +std::vector getSensorName(HighFive::File hdf5) { + std::vector name; + try { + HighFive::DataSet ds = hdf5.getDataSet("name_sensor"); + ds.read(name); + } catch (...) { + throw std::runtime_error("Could not parse the sensor name."); + } + return name; +} + +std::vector getIsisCameraVersion(HighFive::File hdf5) { + std::vector version; + try { + HighFive::DataSet ds = hdf5.getDataSet("IsisCameraVersion"); + ds.read(version); + } catch (...) { + throw std::runtime_error("Could not parse the IsisCameraVersion."); + } + return version; +} + + +std::vector getPlatformName(HighFive::File hdf5) { + std::vector platformName; + try { + HighFive::DataSet ds = hdf5.getDataSet("name_platform"); + ds.read(platformName); + } catch (...) { + throw std::runtime_error("Could not parse the platform name."); + } + return platformName; +} + +std::vector getLogFile(HighFive::File hdf5) { + std::vector logFile; + try { + HighFive::DataSet ds = hdf5.getDataSet("log_file"); + ds.read(logFile); + } catch (...) { + throw std::runtime_error("Could not parse the log filename."); + } + return logFile; +} + +std::vector getTotalLines(HighFive::File hdf5) { + std::vector lines; + try { + HighFive::DataSet ds = hdf5.getDataSet("image_lines"); + ds.read(lines); + } catch (...) { + throw std::runtime_error( + "Could not parse the number of lines in the image."); + } + return lines; +} + +std::vector getTotalSamples(HighFive::File hdf5) { + std::vector samples; + try { + HighFive::DataSet ds = hdf5.getDataSet("image_samples"); + ds.read(samples); + } catch (...) { + throw std::runtime_error( + "Could not parse the number of samples in the image."); + } + return samples; +} + +std::vector getStartingTime(HighFive::File hdf5) { + std::vector time; + try { + HighFive::DataSet ds = hdf5.getDataSet("starting_ephemeris_time"); + ds.read(time); + } catch (...) { + throw std::runtime_error("Could not parse the image start time."); + } + return time; +} + +std::vector getCenterTime(HighFive::File hdf5) { + std::vector time; + try { + HighFive::DataSet ds = hdf5.getDataSet("center_ephemeris_time"); + ds.read(time); + } catch (...) { + throw std::runtime_error("Could not parse the center image time."); + } + return time; +} + +std::vector getInterpolationMethod(HighFive::File hdf5) { + std::vector interpMethodStrings; + std::vector interpMethod; + try { + HighFive::DataSet ds = hdf5.getDataSet("interpolation_method"); + ds.read(interpMethodStrings); + + for (auto &it : interpMethodStrings){ + if (iequals(it, "linear")) { + interpMethod.push_back(PositionInterpolation::LINEAR); + } + else if (iequals(it, "spline")){ + interpMethod.push_back(PositionInterpolation::SPLINE); + } + else if (iequals(it, "lagrange")) { + interpMethod.push_back(PositionInterpolation::LAGRANGE); + } + } + } catch (...) { + throw std::runtime_error("Could not parse the interpolation method."); + } + + return interpMethod; +} + +std::vector>> getLineScanRate(HighFive::File hdf5) { + std::vector>> lines; + try { + HighFive::DataSet ds = hdf5.getDataSet("line_scan_rate"); + ds.read(lines); + } catch (...) { + throw std::runtime_error("Could not parse the line scan rate from the isd."); + } + return lines; +} + + +std::vector getSampleSumming(HighFive::File hdf5) { + std::vector summing; + try { + HighFive::DataSet ds = hdf5.getDataSet("detector_sample_summing"); + ds.read(summing); + } catch (...) { + throw std::runtime_error( + "Could not parse the sample direction detector pixel summing."); + } + return summing; +} + +std::vector getLineSumming(HighFive::File hdf5) { + std::vector summing; + try { + HighFive::DataSet ds = hdf5.getDataSet("detector_line_summing"); + ds.read(summing); + } catch (...) { + throw std::runtime_error( + "Could not parse the line direction detector pixel summing."); + } + return summing; +} + +std::vector getFocalLength(HighFive::File hdf5) { + std::vector focalLength; + try { + HighFive::DataSet ds = hdf5.getDataSet("focal_length_model/focal_length"); + ds.read(focalLength); + } catch (...) { + throw std::runtime_error("Could not parse the focal length."); + } + return focalLength; +} + +std::vector getFocalLengthUncertainty(HighFive::File hdf5) { + std::vector focalUncertainty; + try { + HighFive::DataSet ds = hdf5.getDataSet("focal_length_model/focal_uncertainty"); + ds.read(focalUncertainty); + } catch (...) { + throw std::runtime_error("Could not parse the focal length uncertainty."); + } + return focalUncertainty; +} + +std::vector> getFocal2PixelLines(HighFive::File hdf5) { + std::vector> transformation; + try { + HighFive::DataSet ds = hdf5.getDataSet("focal2pixel_lines"); + ds.read(transformation); + } catch (...) { + throw std::runtime_error("Could not parse the focal plane coordinate to " + "detector lines transformation."); + } + return transformation; +} + +std::vector> getFocal2PixelSamples(HighFive::File hdf5) { + std::vector> transformation; + try { + HighFive::DataSet ds = hdf5.getDataSet("focal2pixel_samples"); + ds.read(transformation); + } catch (...) { + throw std::runtime_error("Could not parse the focal plane coordinate to " + "detector samples transformation."); + } + return transformation; +} + +std::vector getDetectorCenterLine(HighFive::File hdf5) { + std::vector line; + try { + HighFive::DataSet ds = hdf5.getDataSet("detector_center/line"); + ds.read(line); + } catch (...) { + throw std::runtime_error("Could not parse the detector center line."); + } + return line; +} + +std::vector getDetectorCenterSample(HighFive::File hdf5) { + std::vector sample; + try { + HighFive::DataSet ds = hdf5.getDataSet("detector_center/sample"); + ds.read(sample); + } catch (...) { + throw std::runtime_error("Could not parse the detector center sample."); + } + return sample; +} + +std::vector getDetectorStartingLine(HighFive::File hdf5) { + std::vector line; + try { + HighFive::DataSet ds = hdf5.getDataSet("starting_detector_line"); + ds.read(line); + } catch (...) { + throw std::runtime_error("Could not parse the detector starting line."); + } + return line; +} + +std::vector getDetectorStartingSample(HighFive::File hdf5) { + std::vector sample; + try { + HighFive::DataSet ds = hdf5.getDataSet("starting_detector_sample"); + ds.read(sample); + } catch (...) { + throw std::runtime_error("Could not parse the detector starting sample."); + } + return sample; +} + +std::vector getMinHeight(HighFive::File hdf5) { + std::vector height; + try { + HighFive::DataSet ds = hdf5.getDataSet("reference_height/minheight"); + ds.read(height); + } catch (...) { + throw std::runtime_error( + "Could not parse the minimum height above the reference ellipsoid."); + } + return height; +} + +std::vector getMaxHeight(HighFive::File hdf5) { + std::vector height; + try { + HighFive::DataSet ds = hdf5.getDataSet("reference_height/maxheight"); + ds.read(height); + } catch (...) { + throw std::runtime_error( + "Could not parse the maximum height above the reference ellipsoid."); + } + return height; +} + +std::vector getSemiMajorRadius(HighFive::File hdf5) { + std::vector radius; + try { + HighFive::DataSet ds = hdf5.getDataSet("radii/semimajor"); + ds.read(radius); + } catch (...) { + throw std::runtime_error( + "Could not parse the reference ellipsoid semimajor radius."); + } + return radius; +} + +std::vector getSemiMinorRadius(HighFive::File hdf5) { + std::vector radius; + try { + HighFive::DataSet ds = hdf5.getDataSet("radii/semiminor"); + ds.read(radius); + + } catch (...) { + throw std::runtime_error( + "Could not parse the reference ellipsoid semiminor radius."); + } + return radius; +} + +// Converts the distortion model name from the ISD (string) to the enumeration +// type. +std::vector getDistortionModel(HighFive::File hdf5) { + std::vector distortions; + try { + HighFive::DataSet ds = hdf5.getDataSet("optical_distortion"); + std::vector distortionStrings; + ds.read(distortionStrings); + + for (auto &dist : distortionStrings){ + if (dist.compare("transverse") == 0) { + distortions.push_back(DistortionType::TRANSVERSE); + } else if (dist.compare("radial") == 0) { + distortions.push_back(DistortionType::RADIAL); + } else if (dist.compare("kaguyalism") == 0) { + distortions.push_back(DistortionType::KAGUYALISM); + } else if (dist.compare("dawnfc") == 0) { + distortions.push_back(DistortionType::DAWNFC); + } else if (dist.compare("lrolrocnac") == 0) { + distortions.push_back(DistortionType::LROLROCNAC); + } else if (dist.compare("cahvor") == 0) { + distortions.push_back(DistortionType::CAHVOR); + } else if (dist.compare("lunarorbiter") == 0) { + distortions.push_back(DistortionType::LUNARORBITER); + } else if (dist.compare("radtan") == 0) { + distortions.push_back(DistortionType::RADTAN); + } + } + } catch (...) { + throw std::runtime_error("Could not parse the distortion model."); + } + return distortions; +} + +std::vector> getDistortionCoeffs(HighFive::File hdf5) { + std::vector> coefficients; + + std::vector distortion = getDistortionModel(hdf5); + + //TODO handle different distortion types per band...? + switch (distortion[0]) { + case DistortionType::TRANSVERSE: { + try { + std::vector coefficientsX, coefficientsY; + + HighFive::DataSet ds = hdf5.getDataSet("optical_distortion/transverse/x"); + ds.read(coefficientsX); + + ds = hdf5.getDataSet("optical_distortion/transverse/y"); + ds.read(coefficientsY); + + //coefficients = coefficientsX; + + for (size_t i = 0; i < coefficientsX.size(); i++) { + // Combine the corresponding vectors from vec1 and vec2 + std::vector combinedCoeff; + combinedCoeff.push_back(coefficientsX[i]); + combinedCoeff.push_back(coefficientsY[i]); + //combinedCoeff.insert(combinedCoeff.end(), coefficientsY[i].begin(), coefficientsY[i].end()); + + // Add the combined vector to the result + coefficients.push_back(combinedCoeff); + } + + return coefficients; + } catch (...) { + throw std::runtime_error( + "Could not parse a set of transverse distortion model coefficients."); + /* TODO ??? + coefficients = std::vector(20, 0.0); + coefficients[1] = 1.0; + coefficients[12] = 1.0; + */ + } + } break; + case DistortionType::RADIAL: { + try { + HighFive::DataSet ds = hdf5.getDataSet("optical_distortion/radial/coefficients"); + ds.read(coefficients); + return coefficients; + } catch (...) { + throw std::runtime_error( + "Could not parse the radial distortion model coefficients."); + /* TODO ??? + coefficients = std::vector(3, 0.0); + */ + } + } break; + case DistortionType::KAGUYALISM: { + try { + std::vector> coefficientsX, coefficientsY; + std::vector boresightX, boresightY; + HighFive::DataSet ds = hdf5.getDataSet("optical_distortion/kaguyalism/x"); + ds.read(coefficientsX); + ds = hdf5.getDataSet("optical_distortion/kaguyalism/y"); + ds.read(coefficientsY); + + ds = hdf5.getDataSet("optical_distortion/kaguyalism/boresight_x"); + ds.read(boresightX); + ds = hdf5.getDataSet("optical_distortion/kaguyalism/boresight_y"); + ds.read(boresightY); + + for (int i = 0 ; i < coefficientsX.size(); i++){ + coefficientsX[i].insert(coefficientsX[i].begin(), boresightX[i]); + coefficientsY[i].insert(coefficientsY[i].begin(), boresightY[i]); + coefficientsX[i].insert(coefficientsX[i].end(), coefficientsY[i].begin(), + coefficientsY[i].end()); + } + + return coefficientsX; + } catch (...) { + throw std::runtime_error("Could not parse a set of Kaguya LISM " + "distortion model coefficients."); + /* TODO + coefficients = std::vector(8, 0.0); + */ + } + } break; + case DistortionType::DAWNFC: { + try { + HighFive::DataSet ds = hdf5.getDataSet("optical_distortion/dawnfc/coefficients"); + ds.read(coefficients); + return coefficients; + } catch (...) { + throw std::runtime_error( + "Could not parse the dawn radial distortion model coefficients."); + /* TODO + coefficients = std::vector(1, 0.0); + */ + } + } break; + case DistortionType::LROLROCNAC: { + try { + HighFive::DataSet ds = hdf5.getDataSet("optical_distortion/lrolrocnac/coefficients"); + ds.read(coefficients); + return coefficients; + } catch (...) { + throw std::runtime_error( + "Could not parse the lrolrocnac distortion model coefficients."); + /* TODO + coefficients = std::vector(1, 0.0); + */ + } + } break; + case DistortionType::CAHVOR: + { + try + { + HighFive::DataSet ds = hdf5.getDataSet("optical_distortion/cahvor/coefficients"); + ds.read(coefficients); + return coefficients; + } + catch (...) + { + throw std::runtime_error( + "Could not parse the cahvor distortion model coefficients."); + /* TODO + coefficients = std::vector(5, 0.0); + */ + } + } break; + case DistortionType::LUNARORBITER: + { + try + { + std::vector perspectiveX, perspectiveY, centerPointX, centerPointY; + HighFive::DataSet ds = hdf5.getDataSet("optical_distortion/lunarorbiter/perspective_x"); + ds.read(perspectiveX); + ds = hdf5.getDataSet("optical_distortion/lunarorbiter/perspective_y"); + ds.read(perspectiveY); + ds = hdf5.getDataSet("optical_distortion/lunarorbiter/center_point_x"); + ds.read(centerPointX); + ds = hdf5.getDataSet("optical_distortion/lunarorbiter/center_point_y"); + ds.read(centerPointY); + + for (int i = 0 ; i < perspectiveX.size(); i++){ + coefficients.push_back({perspectiveX[i],perspectiveY[i],centerPointX[i],centerPointY[i]}); + } + + return coefficients; + } + catch (...) + { + throw std::runtime_error( + "Could not parse the Lunar Orbiter distortion model coefficients."); + /* TODO + coefficients = std::vector(4, 0.0); + */ + } + } break; + case DistortionType::RADTAN: { + try { + HighFive::DataSet ds = hdf5.getDataSet("optical_distortion/radtan/coefficients"); + ds.read(coefficients); + return coefficients; + } catch (...) { + throw std::runtime_error( + "Could not parse the radtan distortion model coefficients."); + /* TODO + coefficients = std::vector(5, 0.0); + */ + } + } break; + } + throw std::runtime_error( + "Could not parse the distortion model coefficients."); + + return coefficients; +} + +std::vector> getVec3dArray(HighFive::DataSet ds) { + std::vector> positions; + // yikes. + // Each band in the multispectral isd has a 2d vector [band][position][x,y,z] + std::vector>> data; + try { + ds.read(data); + for(auto &band : data){ + // Create a vector of 3d coords for this band + std::vector bandVec; + for (auto &item: band) { + // Push all the 3d coords in this band into one vector + Vec3d vec(item[0],item[1],item[2]); + bandVec.push_back(vec); + } + // Add the vector to the multispectral vector + positions.push_back(bandVec); + } + } catch (...) { + throw std::runtime_error("Could not parse the 3D vector array."); + } + return positions; +} + + +std::vector> getQuatArray(HighFive::DataSet ds) { + std::vector> quats; + std::vector>> data; + try { + ds.read(data); + for (auto &band : data){ + std::vector bandQuats; + for (auto &row : band) { + Rotation vec(row[0], row[1], row[2], row[3]); + bandQuats.push_back(vec); + } + quats.push_back(bandQuats); + } + } catch (...) { + throw std::runtime_error("Could not parse the quaternion hdf5 object."); + } + return quats; +} + + +std::vector getInstrumentPosition(HighFive::File hdf5) { + try { + std::vector statesVec; + HighFive::DataSet ds = hdf5.getDataSet("instrument_position/positions"); + std::vector> positions = getVec3dArray(ds); + + ds = hdf5.getDataSet("instrument_position/ephemeris_times"); + std::vector> times = getMultiSpectralArray(ds); + + ds = hdf5.getDataSet("instrument_position/reference_frame"); + std::vector refFrames; + ds.read(refFrames); + + try{ + ds = hdf5.getDataSet("instrument_position/velocities"); + std::vector> velocities = getVec3dArray(ds); + for (int i = 0; i < positions.size(); i++){ + statesVec.push_back(States(times[i], positions[i], velocities[i], refFrames[i])); + } + return statesVec; + }catch(const HighFive::Exception&){ + // Intentionally left blank. If no velocities, move on. + } + + for (int i = 0; i < positions.size(); i++){ + statesVec.push_back(States(times[i], positions[i], refFrames[i])); + } + + return statesVec; + } catch (...) { + throw std::runtime_error("Could not parse the instrument position"); + } +} + + +std::vector getSunPosition(HighFive::File hdf5) { + try { + std::vector statesVec; + HighFive::DataSet ds = hdf5.getDataSet("sun_position/positions"); + std::vector> positions = getVec3dArray(ds); + + ds = hdf5.getDataSet("sun_position/ephemeris_times"); + std::vector> times = getMultiSpectralArray(ds); + + + ds = hdf5.getDataSet("sun_position/reference_frame"); + std::vector refFrames; + ds.read(refFrames); + + // If there are velocities, read them + try{ + ds = hdf5.getDataSet("sun_position/velocities"); + std::vector> velocities = getVec3dArray(ds); + for (int i = 0; i < positions.size(); i++){ + statesVec.push_back(States(times[i], positions[i], velocities[i], refFrames[i])); + } + return statesVec; + }catch (const HighFive::Exception&){ + // Intentionally left blank. If there are no velocities, just move on. + } + + for (int i = 0; i < positions.size(); i++){ + statesVec.push_back(States(times[i], positions[i], refFrames[i])); + } + return statesVec; + + } catch (...) { + throw std::runtime_error("Could not parse the sun position"); + } +} + +std::vector getInstrumentPointing(HighFive::File hdf5) { + try { + HighFive::DataSet ds = hdf5.getDataSet("instrument_pointing/quaternions"); + std::vector> rotations = getQuatArray(ds); + + ds = hdf5.getDataSet("instrument_pointing/ephemeris_times"); + std::vector> times = getMultiSpectralArray(ds); + + ds = hdf5.getDataSet("instrument_pointing/angular_velocities"); + std::vector> velocities = getVec3dArray(ds); + + std::vector> constFrames; + try{ + ds = hdf5.getDataSet("instrument_pointing/constant_frames"); + constFrames = getMultiSpectralArray(ds); + }catch(const HighFive::Exception&){ + //Intentionally left blank. If there are no constant frames, move on. + } + + std::vector> timeDepFrames; + try{ + ds = hdf5.getDataSet("instrument_pointing/time_dependent_frames"); + timeDepFrames= getMultiSpectralArray(ds); + }catch(const HighFive::Exception&){ + //Intentionally left blank. If there are no time dependent frames, move on. + } + + std::vector> rotArray(timeDepFrames.size(), {1,0,0,0,1,0,0,0,1}); + try{ + ds = hdf5.getDataSet("instrument_pointing/constant_rotation"); + rotArray = getMultiSpectralArray(ds); + }catch(const HighFive::Exception&){ + //Intentionally left blank. If there are no constant rotations, move on. + } + + std::vector constRots; + for (auto &rots : rotArray){ + Rotation constRot(rots); + constRots.push_back(constRot); + } + + + std::vector orientationsVec; + for (int i = 0 ; i < rotations.size(); i++){ + orientationsVec.push_back(Orientations(rotations[i], times[i], velocities[i], constRots[i], constFrames[i], timeDepFrames[i])); + } + + return orientationsVec; + + } catch (...) { + throw std::runtime_error("Could not parse the instrument pointing"); + } +} + +std::vector getBodyRotation(HighFive::File hdf5) { + try { + HighFive::DataSet ds = hdf5.getDataSet("body_rotation/quaternions"); + std::vector> rotations = getQuatArray(ds); + + ds = hdf5.getDataSet("body_rotation/ephemeris_times"); + std::vector> times = getMultiSpectralArray(ds); + ds = hdf5.getDataSet("body_rotation/angular_velocities"); + std::vector> velocities = getVec3dArray(ds); + + std::vector> constFrames; + try{ + ds = hdf5.getDataSet("body_rotation/constant_frames"); + constFrames = getMultiSpectralArray(ds); + }catch(const HighFive::Exception&){ + //Intentionally left blank. If no const frames, move on. + } + + std::vector> timeDepFrames; + try{ + ds = hdf5.getDataSet("body_rotation/time_dependent_frames"); + timeDepFrames = getMultiSpectralArray(ds); + }catch(const HighFive::Exception&){ + //Intentionally left blank. If no time dependent frames, move on. + } + + std::vector> rotArrays(rotations.size(), {1,0,0,0,1,0,0,0,1}); + try{ + ds = hdf5.getDataSet("body_rotation/constant_rotation"); + rotArrays = getMultiSpectralArray(ds); + }catch(const HighFive::Exception&){ + //Intentionally left blank. If no constant rotations, move on. + } + + std::vector constRots; + for (auto &rot : rotArrays){ + Rotation constRot(rot); + constRots.push_back(constRot); + } + + std::vector orientationsVec; + for (size_t i = 0 ; i < rotations.size(); i++){ + Orientations orientation(rotations[i], times[i], velocities[i], constRots[i], constFrames[i], timeDepFrames[i]); + orientationsVec.push_back(orientation); + } + return orientationsVec; + + } catch (...) { + throw std::runtime_error("Could not parse the body rotation"); + } +} + }