Rugged.java

  1. /* Copyright 2013-2025 CS GROUP
  2.  * Licensed to CS GROUP (CS) under one or more
  3.  * contributor license agreements.  See the NOTICE file distributed with
  4.  * this work for additional information regarding copyright ownership.
  5.  * CS licenses this file to You under the Apache License, Version 2.0
  6.  * (the "License"); you may not use this file except in compliance with
  7.  * the License.  You may obtain a copy of the License at
  8.  *
  9.  *   http://www.apache.org/licenses/LICENSE-2.0
  10.  *
  11.  * Unless required by applicable law or agreed to in writing, software
  12.  * distributed under the License is distributed on an "AS IS" BASIS,
  13.  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14.  * See the License for the specific language governing permissions and
  15.  * limitations under the License.
  16.  */
  17. package org.orekit.rugged.api;

  18. import java.util.Collection;
  19. import java.util.HashMap;
  20. import java.util.Map;

  21. import org.hipparchus.analysis.differentiation.Derivative;
  22. import org.hipparchus.analysis.differentiation.DerivativeStructure;
  23. import org.hipparchus.geometry.euclidean.threed.FieldVector3D;
  24. import org.hipparchus.geometry.euclidean.threed.Vector3D;
  25. import org.hipparchus.util.FastMath;
  26. import org.hipparchus.util.MathArrays;
  27. import org.orekit.bodies.GeodeticPoint;
  28. import org.orekit.frames.Transform;
  29. import org.orekit.rugged.errors.DumpManager;
  30. import org.orekit.rugged.errors.RuggedException;
  31. import org.orekit.rugged.errors.RuggedInternalError;
  32. import org.orekit.rugged.errors.RuggedMessages;
  33. import org.orekit.rugged.intersection.IntersectionAlgorithm;
  34. import org.orekit.rugged.linesensor.LineSensor;
  35. import org.orekit.rugged.linesensor.SensorMeanPlaneCrossing;
  36. import org.orekit.rugged.linesensor.SensorPixel;
  37. import org.orekit.rugged.linesensor.SensorPixelCrossing;
  38. import org.orekit.rugged.refraction.AtmosphericRefraction;
  39. import org.orekit.rugged.utils.DerivativeGenerator;
  40. import org.orekit.rugged.utils.ExtendedEllipsoid;
  41. import org.orekit.rugged.utils.NormalizedGeodeticPoint;
  42. import org.orekit.rugged.utils.SpacecraftToObservedBody;
  43. import org.orekit.time.AbsoluteDate;
  44. import org.orekit.utils.Constants;
  45. import org.orekit.utils.PVCoordinates;

  46. /** Main class of Rugged library API.
  47.  * @see RuggedBuilder
  48.  * @author Luc Maisonobe
  49.  * @author Guylaine Prat
  50.  * @author Jonathan Guinet
  51.  * @author Lucie LabatAllee
  52.  */
  53. public class Rugged {

  54.     /** Accuracy to use in the first stage of inverse location.
  55.      * <p>
  56.      * This accuracy is only used to locate the point within one
  57.      * pixel, hence there is no point in choosing a too small value here.
  58.      * </p>
  59.      */
  60.     private static final double COARSE_INVERSE_LOCATION_ACCURACY = 0.01;

  61.     /** Maximum number of evaluations for crossing algorithms. */
  62.     private static final int MAX_EVAL = 50;

  63.     /** Threshold for pixel convergence in fixed point method
  64.      * (for inverse location with atmospheric refraction correction). */
  65.     private static final double PIXEL_CV_THRESHOLD = 1.e-4;

  66.     /** Threshold for line convergence in fixed point method
  67.      * (for inverse location with atmospheric refraction correction). */
  68.     private static final double LINE_CV_THRESHOLD = 1.e-4;

  69.     /** Reference ellipsoid. */
  70.     private final ExtendedEllipsoid ellipsoid;

  71.     /** Converter between spacecraft and body. */
  72.     private final SpacecraftToObservedBody scToBody;

  73.     /** Sensors. */
  74.     private final Map<String, LineSensor> sensors;

  75.     /** Mean plane crossing finders. */
  76.     private final Map<String, SensorMeanPlaneCrossing> finders;

  77.     /** DEM intersection algorithm. */
  78.     private final IntersectionAlgorithm algorithm;

  79.     /** Flag for light time correction. */
  80.     private boolean lightTimeCorrection;

  81.     /** Flag for aberration of light correction. */
  82.     private boolean aberrationOfLightCorrection;

  83.     /** Rugged name. */
  84.     private final String name;

  85.     /** Atmospheric refraction for line of sight correction. */
  86.     private AtmosphericRefraction atmosphericRefraction;

  87.     /** Build a configured instance.
  88.      * <p>
  89.      * By default, the instance performs both light time correction (which refers
  90.      * to ground point motion with respect to inertial frame) and aberration of
  91.      * light correction (which refers to spacecraft proper velocity). Explicit calls
  92.      * to {@link #setLightTimeCorrection(boolean) setLightTimeCorrection}
  93.      * and {@link #setAberrationOfLightCorrection(boolean) setAberrationOfLightCorrection}
  94.      * can be made after construction if these phenomena should not be corrected.
  95.      * </p>
  96.      * @param algorithm algorithm to use for Digital Elevation Model intersection
  97.      * @param ellipsoid f reference ellipsoid
  98.      * @param lightTimeCorrection if true, the light travel time between ground
  99.      * @param aberrationOfLightCorrection if true, the aberration of light
  100.      * is corrected for more accurate location
  101.      * and spacecraft is compensated for more accurate location
  102.      * @param atmosphericRefraction the atmospheric refraction model to be used for more accurate location
  103.      * @param scToBody transforms interpolator
  104.      * @param sensors sensors
  105.      * @param name Rugged name
  106.      */
  107.     Rugged(final IntersectionAlgorithm algorithm, final ExtendedEllipsoid ellipsoid, final boolean lightTimeCorrection,
  108.            final boolean aberrationOfLightCorrection, final AtmosphericRefraction atmosphericRefraction,
  109.            final SpacecraftToObservedBody scToBody, final Collection<LineSensor> sensors, final String name) {


  110.         // space reference
  111.         this.ellipsoid = ellipsoid;

  112.         // orbit/attitude to body converter
  113.         this.scToBody = scToBody;

  114.         // intersection algorithm
  115.         this.algorithm = algorithm;

  116.         // Rugged name
  117.         // @since 2.0
  118.         this.name = name;

  119.         this.sensors = new HashMap<String, LineSensor>();
  120.         for (final LineSensor s : sensors) {
  121.             this.sensors.put(s.getName(), s);
  122.         }
  123.         this.finders = new HashMap<String, SensorMeanPlaneCrossing>();

  124.         this.lightTimeCorrection         = lightTimeCorrection;
  125.         this.aberrationOfLightCorrection = aberrationOfLightCorrection;
  126.         this.atmosphericRefraction       = atmosphericRefraction;
  127.     }

  128.     /** Get the Rugged name.
  129.      * @return Rugged name
  130.      * @since 2.0
  131.      */
  132.     public String getName() {
  133.         return name;
  134.     }

  135.     /** Get the DEM intersection algorithm.
  136.      * @return DEM intersection algorithm
  137.      */
  138.     public IntersectionAlgorithm getAlgorithm() {
  139.         return algorithm;
  140.     }

  141.     /** Get the DEM intersection algorithm identifier.
  142.      * @return DEM intersection algorithm Id
  143.      * @since 2.2
  144.      */
  145.     public AlgorithmId getAlgorithmId() {
  146.         return algorithm.getAlgorithmId();
  147.     }

  148.     /** Get flag for light time correction.
  149.      * @return true if the light time between ground and spacecraft is
  150.      * compensated for more accurate location
  151.      */
  152.     public boolean isLightTimeCorrected() {
  153.         return lightTimeCorrection;
  154.     }

  155.     /** Get flag for aberration of light correction.
  156.      * @return true if the aberration of light time is corrected
  157.      * for more accurate location
  158.      */
  159.     public boolean isAberrationOfLightCorrected() {
  160.         return aberrationOfLightCorrection;
  161.     }

  162.     /** Get the atmospheric refraction model.
  163.      * @return atmospheric refraction model
  164.      * @since 2.0
  165.      */
  166.     public AtmosphericRefraction getRefractionCorrection() {
  167.         return atmosphericRefraction;
  168.     }

  169.     /** Get the line sensors.
  170.      * @return line sensors
  171.      */
  172.     public Collection<LineSensor> getLineSensors() {
  173.         return sensors.values();
  174.     }

  175.     /** Get the start of search time span.
  176.      * @return start of search time span
  177.      */
  178.     public AbsoluteDate getMinDate() {
  179.         return scToBody.getMinDate();
  180.     }

  181.     /** Get the end of search time span.
  182.      * @return end of search time span
  183.      */
  184.     public AbsoluteDate getMaxDate() {
  185.         return scToBody.getMaxDate();
  186.     }

  187.     /** Check if a date is in the supported range.
  188.      * <p>
  189.      * The support range is given by the {@code minDate} and
  190.      * {@code maxDate} construction parameters, with an {@code
  191.      * overshootTolerance} margin accepted (i.e. a date slightly
  192.      * before {@code minDate} or slightly after {@code maxDate}
  193.      * will be considered in range if the overshoot does not exceed
  194.      * the tolerance set at construction).
  195.      * </p>
  196.      * @param date date to check
  197.      * @return true if date is in the supported range
  198.      */
  199.     public boolean isInRange(final AbsoluteDate date) {
  200.         return scToBody.isInRange(date);
  201.     }

  202.     /** Get the observed body ellipsoid.
  203.      * @return observed body ellipsoid
  204.      */
  205.     public ExtendedEllipsoid getEllipsoid() {
  206.         return ellipsoid;
  207.     }

  208.     /** Direct location of a sensor line.
  209.      * @param sensorName name of the line sensor
  210.      * @param lineNumber number of the line to localize on ground
  211.      * @return ground position of all pixels of the specified sensor line
  212.      */
  213.     public GeodeticPoint[] directLocation(final String sensorName, final double lineNumber) {

  214.         final LineSensor   sensor = getLineSensor(sensorName);
  215.         final Vector3D sensorPosition   = sensor.getPosition();
  216.         final AbsoluteDate date   = sensor.getDate(lineNumber);

  217.         // Compute the transform for the date
  218.         // from spacecraft to inertial
  219.         final Transform    scToInert   = scToBody.getScToInertial(date);
  220.         // from inertial to body
  221.         final Transform    inertToBody = scToBody.getInertialToBody(date);

  222.         // Compute spacecraft velocity in inertial frame
  223.         final Vector3D spacecraftVelocity = scToInert.transformPVCoordinates(PVCoordinates.ZERO).getVelocity();
  224.         // Compute sensor position in inertial frame
  225.         // TBN: for simplicity, due to the size of sensor, we consider each pixel to be at sensor position
  226.         final Vector3D pInert = scToInert.transformPosition(sensorPosition);

  227.         // Compute location of each pixel
  228.         final GeodeticPoint[] gp = new GeodeticPoint[sensor.getNbPixels()];
  229.         for (int i = 0; i < sensor.getNbPixels(); ++i) {

  230.             final Vector3D los = sensor.getLOS(date, i);
  231.             DumpManager.dumpDirectLocation(date, sensorPosition, los, lightTimeCorrection,
  232.                     aberrationOfLightCorrection, atmosphericRefraction != null);

  233.             // compute the line of sight in inertial frame (without correction)
  234.             final Vector3D obsLInert = scToInert.transformVector(los);
  235.             final Vector3D lInert;

  236.             if (aberrationOfLightCorrection) {
  237.                 // apply aberration of light correction on LOS
  238.                 lInert = applyAberrationOfLightCorrection(obsLInert, spacecraftVelocity);
  239.             } else {
  240.                 // don't apply aberration of light correction on LOS
  241.                 lInert = obsLInert;
  242.             }

  243.             if (lightTimeCorrection) {
  244.                 // compute DEM intersection with light time correction
  245.                 // TBN: for simplicity, due to the size of sensor, we consider each pixel to be at sensor position
  246.                 gp[i] = computeWithLightTimeCorrection(date, sensorPosition, los, scToInert, inertToBody, pInert, lInert);

  247.             } else {
  248.                 // compute DEM intersection without light time correction
  249.                 final Vector3D pBody = inertToBody.transformPosition(pInert);
  250.                 final Vector3D lBody = inertToBody.transformVector(lInert);
  251.                 gp[i] = algorithm.refineIntersection(ellipsoid, pBody, lBody,
  252.                                                      algorithm.intersection(ellipsoid, pBody, lBody));
  253.             }

  254.             // compute with atmospheric refraction correction if necessary
  255.             if (atmosphericRefraction != null && atmosphericRefraction.mustBeComputed()) {

  256.                 final Vector3D pBody;
  257.                 final Vector3D lBody;

  258.                 // Take into account the light time correction
  259.                 // @since 3.1
  260.                 if (lightTimeCorrection) {
  261.                     // Transform sensor position in inertial frame to observed body
  262.                     final Vector3D sP = inertToBody.transformPosition(pInert);
  263.                     // Convert ground location of the pixel in cartesian coordinates
  264.                     final Vector3D eP = ellipsoid.transform(gp[i]);
  265.                     // Compute the light time correction (s)
  266.                     final double deltaT = eP.distance(sP) / Constants.SPEED_OF_LIGHT;

  267.                     // Apply shift due to light time correction
  268.                     final Transform shiftedInertToBody = inertToBody.shiftedBy(-deltaT);

  269.                     pBody = shiftedInertToBody.transformPosition(pInert);
  270.                     lBody = shiftedInertToBody.transformVector(lInert);

  271.                 } else { // Light time correction NOT to be taken into account

  272.                     pBody = inertToBody.transformPosition(pInert);
  273.                     lBody = inertToBody.transformVector(lInert);

  274.                 } // end test on lightTimeCorrection

  275.                 // apply atmospheric refraction correction
  276.                 gp[i] = atmosphericRefraction.applyCorrection(pBody, lBody, (NormalizedGeodeticPoint) gp[i], algorithm);

  277.             }
  278.             DumpManager.dumpDirectLocationResult(gp[i]);
  279.         }
  280.         return gp;
  281.     }

  282.     /** Direct location of a single line-of-sight.
  283.      * @param date date of the location
  284.      * @param sensorPosition sensor position in spacecraft frame. For simplicity, due to the size of sensor,
  285.      * we consider each pixel to be at sensor position
  286.      * @param los normalized line-of-sight in spacecraft frame
  287.      * @return ground position of intersection point between specified los and ground
  288.      */
  289.     public GeodeticPoint directLocation(final AbsoluteDate date, final Vector3D sensorPosition, final Vector3D los) {

  290.         DumpManager.dumpDirectLocation(date, sensorPosition, los, lightTimeCorrection, aberrationOfLightCorrection,
  291.                                        atmosphericRefraction != null);

  292.         // Compute the transforms for the date
  293.         // from spacecraft to inertial
  294.         final Transform    scToInert   = scToBody.getScToInertial(date);
  295.         // from inertial to body
  296.         final Transform    inertToBody = scToBody.getInertialToBody(date);

  297.         // Compute spacecraft velocity in inertial frame
  298.         final Vector3D spacecraftVelocity = scToInert.transformPVCoordinates(PVCoordinates.ZERO).getVelocity();
  299.         // Compute sensor position in inertial frame
  300.         // TBN: for simplicity, due to the size of sensor, we consider each pixel to be at sensor position
  301.         final Vector3D pInert    = scToInert.transformPosition(sensorPosition);

  302.         // Compute the line of sight in inertial frame (without correction)
  303.         final Vector3D obsLInert = scToInert.transformVector(los);

  304.         final Vector3D lInert;
  305.         if (aberrationOfLightCorrection) {
  306.             // apply aberration of light correction on LOS
  307.             lInert = applyAberrationOfLightCorrection(obsLInert, spacecraftVelocity);
  308.         } else {
  309.             // don't apply aberration of light correction on LOS
  310.             lInert = obsLInert;
  311.         }

  312.         // Compute ground location of specified pixel according to light time correction flag
  313.         final NormalizedGeodeticPoint gp;

  314.         if (lightTimeCorrection) {
  315.             // compute DEM intersection with light time correction
  316.             // TBN: for simplicity, due to the size of sensor, we consider each pixel to be at sensor position
  317.             gp = computeWithLightTimeCorrection(date, sensorPosition, los, scToInert, inertToBody, pInert, lInert);

  318.         } else {
  319.             // compute DEM intersection without light time correction
  320.             final Vector3D pBody = inertToBody.transformPosition(pInert);
  321.             final Vector3D lBody = inertToBody.transformVector(lInert);
  322.             gp = algorithm.refineIntersection(ellipsoid, pBody, lBody,
  323.                                               algorithm.intersection(ellipsoid, pBody, lBody));
  324.         }

  325.         // Compute ground location of specified pixel according to atmospheric refraction correction flag
  326.         NormalizedGeodeticPoint result = gp;

  327.         // compute the ground location with atmospheric correction if asked for
  328.         if (atmosphericRefraction != null && atmosphericRefraction.mustBeComputed()) {

  329.             final Vector3D pBody;
  330.             final Vector3D lBody;

  331.             // Take into account the light time correction
  332.             // @since 3.1
  333.             if (lightTimeCorrection) {
  334.                 // Transform sensor position in inertial frame to observed body
  335.                 final Vector3D sP = inertToBody.transformPosition(pInert);
  336.                 // Convert ground location of the pixel in cartesian coordinates
  337.                 final Vector3D eP = ellipsoid.transform(gp);
  338.                 // Compute the light time correction (s)
  339.                 final double deltaT = eP.distance(sP) / Constants.SPEED_OF_LIGHT;

  340.                 // Apply shift due to light time correction
  341.                 final Transform shiftedInertToBody = inertToBody.shiftedBy(-deltaT);

  342.                 pBody = shiftedInertToBody.transformPosition(pInert);
  343.                 lBody = shiftedInertToBody.transformVector(lInert);

  344.             } else { // Light time correction NOT to be taken into account

  345.                 pBody = inertToBody.transformPosition(pInert);
  346.                 lBody = inertToBody.transformVector(lInert);

  347.             } // end test on lightTimeCorrection

  348.             // apply atmospheric refraction correction
  349.             result = atmosphericRefraction.applyCorrection(pBody, lBody, gp, algorithm);

  350.         } // end test on atmosphericRefraction != null

  351.         DumpManager.dumpDirectLocationResult(result);
  352.         return result;
  353.     }

  354.     /** Find the date at which sensor sees a ground point.
  355.      * <p>
  356.      * This method is a partial {@link #inverseLocation(String, GeodeticPoint, int, int) inverse location} focusing only on date.
  357.      * </p>
  358.      * <p>
  359.      * The point is given only by its latitude and longitude, the elevation is
  360.      * computed from the Digital Elevation Model.
  361.      * </p>
  362.      * <p>
  363.      * Note that for each sensor name, the {@code minLine} and {@code maxLine} settings
  364.      * are cached, because they induce costly frames computation. So these settings
  365.      * should not be tuned very finely and changed at each call, but should rather be
  366.      * a few thousand lines wide and refreshed only when needed. If for example an
  367.      * inverse location is roughly estimated to occur near line 53764 (for example
  368.      * using {@link org.orekit.rugged.utils.RoughVisibilityEstimator}), {@code minLine}
  369.      * and {@code maxLine} could be set for example to 50000 and 60000, which would
  370.      * be OK also if next line inverse location is expected to occur near line 53780,
  371.      * and next one ... The setting could be changed for example to 55000 and 65000 when
  372.      * an inverse location is expected to occur after 55750. Of course, these values
  373.      * are only an example and should be adjusted depending on mission needs.
  374.      * </p>
  375.      * @param sensorName name of the line  sensor
  376.      * @param latitude ground point latitude (rad)
  377.      * @param longitude ground point longitude (rad)
  378.      * @param minLine minimum line number
  379.      * @param maxLine maximum line number
  380.      * @return date at which ground point is seen by line sensor
  381.      * @see #inverseLocation(String, double, double, int, int)
  382.      * @see org.orekit.rugged.utils.RoughVisibilityEstimator
  383.      */
  384.     public AbsoluteDate dateLocation(final String sensorName,
  385.                                      final double latitude, final double longitude,
  386.                                      final int minLine, final int maxLine) {

  387.         final GeodeticPoint groundPoint =
  388.                 new GeodeticPoint(latitude, longitude, algorithm.getElevation(latitude, longitude));
  389.         return dateLocation(sensorName, groundPoint, minLine, maxLine);
  390.     }

  391.     /** Find the date at which sensor sees a ground point.
  392.      * <p>
  393.      * This method is a partial {@link #inverseLocation(String,
  394.      * GeodeticPoint, int, int) inverse location} focusing only on date.
  395.      * </p>
  396.      * <p>
  397.      * Note that for each sensor name, the {@code minLine} and {@code maxLine} settings
  398.      * are cached, because they induce costly frames computation. So these settings
  399.      * should not be tuned very finely and changed at each call, but should rather be
  400.      * a few thousand lines wide and refreshed only when needed. If for example an
  401.      * inverse location is roughly estimated to occur near line 53764 (for example
  402.      * using {@link org.orekit.rugged.utils.RoughVisibilityEstimator}), {@code minLine}
  403.      * and {@code maxLine} could be set for example to 50000 and 60000, which would
  404.      * be OK also if next line inverse location is expected to occur near line 53780,
  405.      * and next one ... The setting could be changed for example to 55000 and 65000 when
  406.      * an inverse location is expected to occur after 55750. Of course, these values
  407.      * are only an example and should be adjusted depending on mission needs.
  408.      * </p>
  409.      * @param sensorName name of the line sensor
  410.      * @param point point to localize
  411.      * @param minLine minimum line number
  412.      * @param maxLine maximum line number
  413.      * @return date at which ground point is seen by line sensor
  414.      * @see #inverseLocation(String, GeodeticPoint, int, int)
  415.      * @see org.orekit.rugged.utils.RoughVisibilityEstimator
  416.      */
  417.     public AbsoluteDate dateLocation(final String sensorName, final GeodeticPoint point,
  418.                                      final int minLine, final int maxLine) {

  419.         final LineSensor sensor = getLineSensor(sensorName);
  420.         final SensorMeanPlaneCrossing planeCrossing = getPlaneCrossing(sensorName, minLine, maxLine);

  421.         // find approximately the sensor line at which ground point crosses sensor mean plane
  422.         final Vector3D   target = ellipsoid.transform(point);
  423.         final SensorMeanPlaneCrossing.CrossingResult crossingResult = planeCrossing.find(target);
  424.         if (crossingResult == null) {
  425.             // target is out of search interval
  426.             return null;
  427.         } else {
  428.             return sensor.getDate(crossingResult.getLine());
  429.         }
  430.     }

  431.     /** Inverse location of a ground point.
  432.      * <p>
  433.      * The point is given only by its latitude and longitude, the elevation is
  434.      * computed from the Digital Elevation Model.
  435.      * </p>
  436.      * <p>
  437.      * Note that for each sensor name, the {@code minLine} and {@code maxLine} settings
  438.      * are cached, because they induce costly frames computation. So these settings
  439.      * should not be tuned very finely and changed at each call, but should rather be
  440.      * a few thousand lines wide and refreshed only when needed. If for example an
  441.      * inverse location is roughly estimated to occur near line 53764 (for example
  442.      * using {@link org.orekit.rugged.utils.RoughVisibilityEstimator}), {@code minLine}
  443.      * and {@code maxLine} could be set for example to 50000 and 60000, which would
  444.      * be OK also if next line inverse location is expected to occur near line 53780,
  445.      * and next one ... The setting could be changed for example to 55000 and 65000 when
  446.      * an inverse location is expected to occur after 55750. Of course, these values
  447.      * are only an example and should be adjusted depending on mission needs.
  448.      * </p>
  449.      * @param sensorName name of the line  sensor
  450.      * @param latitude ground point latitude (rad)
  451.      * @param longitude ground point longitude (rad)
  452.      * @param minLine minimum line number
  453.      * @param maxLine maximum line number
  454.      * @return sensor pixel seeing ground point, or null if ground point cannot
  455.      * be seen between the prescribed line numbers
  456.      * @see org.orekit.rugged.utils.RoughVisibilityEstimator
  457.      */
  458.     public SensorPixel inverseLocation(final String sensorName,
  459.                                        final double latitude, final double longitude,
  460.                                        final int minLine,  final int maxLine) {

  461.         final GeodeticPoint groundPoint = new GeodeticPoint(latitude, longitude, algorithm.getElevation(latitude, longitude));
  462.         return inverseLocation(sensorName, groundPoint, minLine, maxLine);
  463.     }

  464.     /** Inverse location of a point.
  465.      * <p>
  466.      * Note that for each sensor name, the {@code minLine} and {@code maxLine} settings
  467.      * are cached, because they induce costly frames computation. So these settings
  468.      * should not be tuned very finely and changed at each call, but should rather be
  469.      * a few thousand lines wide and refreshed only when needed. If for example an
  470.      * inverse location is roughly estimated to occur near line 53764 (for example
  471.      * using {@link org.orekit.rugged.utils.RoughVisibilityEstimator}), {@code minLine}
  472.      * and {@code maxLine} could be set for example to 50000 and 60000, which would
  473.      * be OK also if next line inverse location is expected to occur near line 53780,
  474.      * and next one ... The setting could be changed for example to 55000 and 65000 when
  475.      * an inverse location is expected to occur after 55750. Of course, these values
  476.      * are only an example and should be adjusted depending on mission needs.
  477.      * </p>
  478.      * @param sensorName name of the line sensor
  479.      * @param point geodetic point to localize
  480.      * @param minLine minimum line number where the search will be performed
  481.      * @param maxLine maximum line number where the search will be performed
  482.      * @return sensor pixel seeing point, or null if point cannot be seen between the
  483.      * prescribed line numbers
  484.      * @see #dateLocation(String, GeodeticPoint, int, int)
  485.      * @see org.orekit.rugged.utils.RoughVisibilityEstimator
  486.      */
  487.     public SensorPixel inverseLocation(final String sensorName, final GeodeticPoint point,
  488.                                        final int minLine, final int maxLine) {

  489.         final LineSensor sensor = getLineSensor(sensorName);
  490.         DumpManager.dumpInverseLocation(sensor, point, ellipsoid, minLine, maxLine, lightTimeCorrection,
  491.                                         aberrationOfLightCorrection, atmosphericRefraction != null);

  492.         final SensorMeanPlaneCrossing planeCrossing = getPlaneCrossing(sensorName, minLine, maxLine);
  493.         DumpManager.dumpSensorMeanPlane(planeCrossing);

  494.         if (atmosphericRefraction == null || !atmosphericRefraction.mustBeComputed()) {
  495.             // Compute inverse location WITHOUT atmospheric refraction
  496.             return findSensorPixelWithoutAtmosphere(point, sensor, planeCrossing);
  497.         } else {
  498.             // Compute inverse location WITH atmospheric refraction
  499.             return findSensorPixelWithAtmosphere(point, sensor, minLine, maxLine);
  500.         }
  501.     }

  502.     /** Apply aberration of light correction (for direct location).
  503.      * @param spacecraftVelocity spacecraft velocity in inertial frame
  504.      * @param obsLInert line of sight in inertial frame
  505.      * @return line of sight with aberration of light correction
  506.      */
  507.     private Vector3D applyAberrationOfLightCorrection(final Vector3D obsLInert, final Vector3D spacecraftVelocity) {

  508.         // As the spacecraft velocity is small with respect to speed of light,
  509.         // we use classical velocity addition and not relativistic velocity addition
  510.         // we look for a positive k such that: c * lInert + vsat = k * obsLInert
  511.         // with lInert normalized
  512.         final double a = obsLInert.getNormSq();
  513.         final double b = -Vector3D.dotProduct(obsLInert, spacecraftVelocity);
  514.         final double c = spacecraftVelocity.getNormSq() - Constants.SPEED_OF_LIGHT * Constants.SPEED_OF_LIGHT;

  515.         // a > 0 and c < 0
  516.         final double s = FastMath.sqrt(b * b - a * c);

  517.         // Only the k > 0 are kept as solutions (the solutions: -(s+b)/a and c/(s-b) are useless)
  518.         final double k = (b > 0) ? -c / (s + b) : (s - b) / a;

  519.         final Vector3D lInert = new Vector3D( k / Constants.SPEED_OF_LIGHT, obsLInert, -1.0 / Constants.SPEED_OF_LIGHT, spacecraftVelocity);
  520.         return lInert;
  521.     }

  522.     /** Compute the DEM intersection with light time correction.
  523.      * @param date date of the los
  524.      * @param sensorPosition sensor position in spacecraft frame
  525.      * @param los los in spacecraft frame
  526.      * @param scToInert transform for the date from spacecraft to inertial
  527.      * @param inertToBody transform for the date from inertial to body
  528.      * @param pInert sensor position in inertial frame
  529.      * @param lInert line of sight in inertial frame (with light time correction if asked for)
  530.      * @return geodetic point with light time correction
  531.      */
  532.     private NormalizedGeodeticPoint computeWithLightTimeCorrection(final AbsoluteDate date,
  533.                                                                    final Vector3D sensorPosition, final Vector3D los,
  534.                                                                    final Transform scToInert, final Transform inertToBody,
  535.                                                                    final Vector3D pInert, final Vector3D lInert) {

  536.         // Compute the transform between spacecraft and observed body
  537.         final Transform approximate = new Transform(date, scToInert, inertToBody);

  538.         // Transform LOS in spacecraft frame to observed body
  539.         final Vector3D  sL       = approximate.transformVector(los);
  540.         // Transform sensor position in spacecraft frame to observed body
  541.         final Vector3D  sP       = approximate.transformPosition(sensorPosition);

  542.         // Compute point intersecting ground (= the ellipsoid) along the pixel LOS
  543.         final Vector3D  eP1      = ellipsoid.transform(ellipsoid.pointOnGround(sP, sL, 0.0));

  544.         // Compute light time time correction (vs the ellipsoid) (s)
  545.         final double    deltaT1  = eP1.distance(sP) / Constants.SPEED_OF_LIGHT;

  546.         // Apply shift due to light time correction (vs the ellipsoid)
  547.         final Transform shifted1 = inertToBody.shiftedBy(-deltaT1);

  548.         // Search the intersection of LOS (taking into account the light time correction if asked for) with DEM
  549.         final NormalizedGeodeticPoint gp1  = algorithm.intersection(ellipsoid,
  550.                                                                     shifted1.transformPosition(pInert),
  551.                                                                     shifted1.transformVector(lInert));

  552.         // Convert the geodetic point (intersection of LOS with DEM) in cartesian coordinates
  553.         final Vector3D  eP2      = ellipsoid.transform(gp1);

  554.         // Compute the light time correction (vs DEM) (s)
  555.         final double    deltaT2  = eP2.distance(sP) / Constants.SPEED_OF_LIGHT;

  556.         // Apply shift due to light time correction (vs DEM)
  557.         final Transform shifted2 = inertToBody.shiftedBy(-deltaT2);

  558.         return algorithm.refineIntersection(ellipsoid,
  559.                                             shifted2.transformPosition(pInert),
  560.                                             shifted2.transformVector(lInert),
  561.                                             gp1);
  562.     }

  563.     /**
  564.      * Find the sensor pixel WITHOUT atmospheric refraction correction.
  565.      * @param point geodetic point to localize
  566.      * @param sensor the line sensor
  567.      * @param planeCrossing the sensor mean plane crossing
  568.      * @return the sensor pixel crossing or null if cannot be found
  569.      * @since 2.1
  570.      */
  571.     private SensorPixel findSensorPixelWithoutAtmosphere(final GeodeticPoint point,
  572.                                                          final LineSensor sensor, final SensorMeanPlaneCrossing planeCrossing) {

  573.         // find approximately the sensor line at which ground point crosses sensor mean plane
  574.         final Vector3D target = ellipsoid.transform(point);
  575.         final SensorMeanPlaneCrossing.CrossingResult crossingResult = planeCrossing.find(target);
  576.         if (crossingResult == null) {
  577.             // target is out of search interval
  578.             return null;
  579.         }

  580.         // find approximately the pixel along this sensor line
  581.         final SensorPixelCrossing pixelCrossing =
  582.                 new SensorPixelCrossing(sensor, planeCrossing.getMeanPlaneNormal(),
  583.                                         crossingResult.getTargetDirection(),
  584.                                         MAX_EVAL, COARSE_INVERSE_LOCATION_ACCURACY);
  585.         final double coarsePixel = pixelCrossing.locatePixel(crossingResult.getDate());
  586.         if (Double.isNaN(coarsePixel)) {
  587.             // target is out of search interval
  588.             return null;
  589.         }

  590.         // fix line by considering the closest pixel exact position and line-of-sight
  591.         // (this pixel might point towards a direction slightly above or below the mean sensor plane)
  592.         final int      lowIndex       = FastMath.max(0, FastMath.min(sensor.getNbPixels() - 2, (int) FastMath.floor(coarsePixel)));
  593.         final Vector3D lowLOS         = sensor.getLOS(crossingResult.getDate(), lowIndex);
  594.         final Vector3D highLOS        = sensor.getLOS(crossingResult.getDate(), lowIndex + 1);
  595.         final Vector3D localZ         = Vector3D.crossProduct(lowLOS, highLOS).normalize();
  596.         final double   beta           = FastMath.acos(Vector3D.dotProduct(crossingResult.getTargetDirection(), localZ));
  597.         final double   s              = Vector3D.dotProduct(crossingResult.getTargetDirectionDerivative(), localZ);
  598.         final double   betaDer        = -s / FastMath.sqrt(1 - s * s);
  599.         final double   deltaL         = (0.5 * FastMath.PI - beta) / betaDer;
  600.         final double   fixedLine      = crossingResult.getLine() + deltaL;
  601.         final Vector3D fixedDirection = new Vector3D(1, crossingResult.getTargetDirection(),
  602.                                                      deltaL, crossingResult.getTargetDirectionDerivative()).normalize();

  603.         // fix neighbouring pixels
  604.         final AbsoluteDate fixedDate  = sensor.getDate(fixedLine);
  605.         final Vector3D fixedX         = sensor.getLOS(fixedDate, lowIndex);
  606.         final Vector3D fixedZ         = Vector3D.crossProduct(fixedX, sensor.getLOS(fixedDate, lowIndex + 1));
  607.         final Vector3D fixedY         = Vector3D.crossProduct(fixedZ, fixedX);

  608.         // fix pixel
  609.         final double pixelWidth = FastMath.atan2(Vector3D.dotProduct(highLOS,        fixedY),
  610.                                                  Vector3D.dotProduct(highLOS,        fixedX));
  611.         final double alpha      = FastMath.atan2(Vector3D.dotProduct(fixedDirection, fixedY),
  612.                                                  Vector3D.dotProduct(fixedDirection, fixedX));
  613.         final double fixedPixel = lowIndex + alpha / pixelWidth;

  614.         final SensorPixel result = new SensorPixel(fixedLine, fixedPixel);
  615.         DumpManager.dumpInverseLocationResult(result);

  616.         return result;
  617.     }

  618.     /**
  619.      * Find the sensor pixel WITH atmospheric refraction correction.
  620.      * @param point geodetic point to localize
  621.      * @param sensor the line sensor
  622.      * @param minLine minimum line number where the search will be performed
  623.      * @param maxLine maximum line number where the search will be performed
  624.      * @return the sensor pixel crossing or null if cannot be found
  625.      * @since 2.1
  626.      */
  627.     private SensorPixel findSensorPixelWithAtmosphere(final GeodeticPoint point,
  628.                                                       final LineSensor sensor, final int minLine, final int maxLine) {

  629.         // TBN: there is no direct way to compute the inverse location.
  630.         // The method is based on an interpolation grid associated with the fixed point method

  631.         final String sensorName = sensor.getName();

  632.         // Compute a correction grid (at sensor level)
  633.         // ===========================================
  634.         // Need to be computed only once for a given sensor (with the same minLine and maxLine)
  635.         if (atmosphericRefraction.getBifPixel() == null || atmosphericRefraction.getBifLine() == null || // lazy evaluation
  636.             !atmosphericRefraction.isSameContext(sensorName, minLine, maxLine)) { // Must be recomputed if the context changed

  637.             // Definition of a regular grid (at sensor level)
  638.             atmosphericRefraction.configureCorrectionGrid(sensor, minLine, maxLine);

  639.             // Get the grid nodes
  640.             final int nbPixelGrid = atmosphericRefraction.getComputationParameters().getNbPixelGrid();
  641.             final int nbLineGrid = atmosphericRefraction.getComputationParameters().getNbLineGrid();
  642.             final double[] pixelGrid = atmosphericRefraction.getComputationParameters().getUgrid();
  643.             final double[] lineGrid = atmosphericRefraction.getComputationParameters().getVgrid();

  644.             // Computation, for the sensor grid, of the direct location WITH atmospheric refraction
  645.             // (full computation)
  646.             atmosphericRefraction.reactivateComputation();
  647.             final GeodeticPoint[][] geodeticGridWithAtmosphere = computeDirectLocOnGridWithAtmosphere(pixelGrid, lineGrid, sensor);
  648.             // pixelGrid and lineGrid are the nodes where the direct loc is computed WITH atmosphere

  649.             // Computation of the inverse location WITHOUT atmospheric refraction for the grid nodes
  650.             atmosphericRefraction.deactivateComputation();
  651.             final SensorPixel[][] sensorPixelGridInverseWithout = computeInverseLocOnGridWithoutAtmosphere(geodeticGridWithAtmosphere,
  652.                                                             nbPixelGrid, nbLineGrid, sensor, minLine, maxLine);
  653.             atmosphericRefraction.reactivateComputation();

  654.             // Compute the grid correction functions (for pixel and line)
  655.             atmosphericRefraction.computeGridCorrectionFunctions(sensorPixelGridInverseWithout);
  656.         }

  657.         // Fixed point method
  658.         // ==================
  659.         // Initialization
  660.         // --------------
  661.         // Deactivate the dump because no need to keep intermediate computations of inverse loc (can be regenerate)
  662.         final Boolean wasSuspended = DumpManager.suspend();

  663.         // compute the sensor pixel on the desired ground point WITHOUT atmosphere
  664.         atmosphericRefraction.deactivateComputation();
  665.         final SensorPixel sp0 = inverseLocation(sensorName, point, minLine, maxLine);
  666.         atmosphericRefraction.reactivateComputation();
  667.         // Reactivate the dump
  668.         DumpManager.resume(wasSuspended);

  669.         if (sp0 == null) {
  670.             // In order for the dump to end nicely
  671.             DumpManager.endNicely();

  672.             // Impossible to find the sensor pixel in the given range lines (without atmosphere)
  673.             throw new RuggedException(RuggedMessages.SENSOR_PIXEL_NOT_FOUND_IN_RANGE_LINES, minLine, maxLine);
  674.         }

  675.         // set up the starting point of the fixed point method
  676.         final double pixel0 = sp0.getPixelNumber();
  677.         final double line0 = sp0.getLineNumber();
  678.         // Needed data for the dump
  679.         sensor.dumpRate(line0);

  680.         // Apply fixed point method until convergence in pixel and line
  681.         // ------------------------------------------------------------
  682.         // compute the first (pixel, line) value:
  683.         // initial sensor pixel value + correction due to atmosphere at this same sensor pixel
  684.         double corrPixelPrevious =  pixel0 + atmosphericRefraction.getBifPixel().value(pixel0, line0);
  685.         double corrLinePrevious = line0 + atmosphericRefraction.getBifLine().value(pixel0, line0);

  686.         double deltaCorrPixel = Double.POSITIVE_INFINITY;
  687.         double deltaCorrLine = Double.POSITIVE_INFINITY;

  688.         while (deltaCorrPixel > PIXEL_CV_THRESHOLD && deltaCorrLine > LINE_CV_THRESHOLD) {
  689.             // Compute the current (pixel, line) value =
  690.             // initial sensor pixel value + correction due to atmosphere on the previous sensor pixel
  691.             final double corrPixelCurrent = pixel0 + atmosphericRefraction.getBifPixel().value(corrPixelPrevious, corrLinePrevious);
  692.             final double corrLineCurrent = line0 + atmosphericRefraction.getBifLine().value(corrPixelPrevious, corrLinePrevious);

  693.             // Compute the delta in pixel and line to check the convergence
  694.             deltaCorrPixel = FastMath.abs(corrPixelCurrent - corrPixelPrevious);
  695.             deltaCorrLine = FastMath.abs(corrLineCurrent - corrLinePrevious);

  696.             // Store the (pixel, line) for next loop
  697.             corrPixelPrevious = corrPixelCurrent;
  698.             corrLinePrevious = corrLineCurrent;
  699.         }
  700.         // The sensor pixel is found !
  701.         final SensorPixel sensorPixelWithAtmosphere = new SensorPixel(corrLinePrevious, corrPixelPrevious);

  702.         // Dump the found sensorPixel
  703.         DumpManager.dumpInverseLocationResult(sensorPixelWithAtmosphere);

  704.         return sensorPixelWithAtmosphere;
  705.     }

  706.     /** Compute the inverse location WITHOUT atmospheric refraction for the geodetic points
  707.      * associated to the sensor grid nodes.
  708.      * @param groundGridWithAtmosphere ground grid found for sensor grid nodes with atmosphere
  709.      * @param nbPixelGrid size of the pixel grid
  710.      * @param nbLineGrid size of the line grid
  711.      * @param sensor the line sensor
  712.      * @param minLine minimum line number where the search will be performed
  713.      * @param maxLine maximum line number where the search will be performed
  714.      * @return the sensor pixel grid computed without atmosphere
  715.      * @since 2.1
  716.      */
  717.     private SensorPixel[][] computeInverseLocOnGridWithoutAtmosphere(final GeodeticPoint[][] groundGridWithAtmosphere,
  718.                                                                      final int nbPixelGrid, final int nbLineGrid,
  719.                                                                      final LineSensor sensor, final int minLine, final int maxLine) {

  720.         // Deactivate the dump because no need to keep intermediate computations of inverse loc (can be regenerate)
  721.         final Boolean wasSuspended = DumpManager.suspend();

  722.         final SensorPixel[][] sensorPixelGrid = new SensorPixel[nbPixelGrid][nbLineGrid];
  723.         final String sensorName = sensor.getName();

  724.         for (int uIndex = 0; uIndex < nbPixelGrid; uIndex++) {
  725.             for (int vIndex = 0; vIndex < nbLineGrid; vIndex++) {

  726.                 // Check if the geodetic point exists
  727.                 if (groundGridWithAtmosphere[uIndex][vIndex] != null) {
  728.                     final GeodeticPoint groundPoint = groundGridWithAtmosphere[uIndex][vIndex];
  729.                     final double currentLat = groundPoint.getLatitude();
  730.                     final double currentLon = groundPoint.getLongitude();

  731.                     try {
  732.                         // Compute the inverse location for the current node
  733.                         sensorPixelGrid[uIndex][vIndex] = inverseLocation(sensorName, currentLat, currentLon, minLine, maxLine);

  734.                     } catch (RuggedException re) { // This should never happen
  735.                         // In order for the dump to end nicely
  736.                         DumpManager.endNicely();
  737.                         throw new RuggedInternalError(re);
  738.                     }

  739.                     // Check if the pixel is inside the sensor (with a margin) OR if the inverse location was impossible (null result)
  740.                     if (!pixelIsInside(sensorPixelGrid[uIndex][vIndex], sensor)) {

  741.                         // In order for the dump to end nicely
  742.                         DumpManager.endNicely();

  743.                         if (sensorPixelGrid[uIndex][vIndex] == null) {
  744.                             // Impossible to find the sensor pixel in the given range lines
  745.                             throw new RuggedException(RuggedMessages.SENSOR_PIXEL_NOT_FOUND_IN_RANGE_LINES, minLine, maxLine);
  746.                         } else {
  747.                             // Impossible to find the sensor pixel
  748.                             final double invLocationMargin = atmosphericRefraction.getComputationParameters().getInverseLocMargin();
  749.                             throw new RuggedException(RuggedMessages.SENSOR_PIXEL_NOT_FOUND_IN_PIXELS_LINE, sensorPixelGrid[uIndex][vIndex].getPixelNumber(),
  750.                                                       -invLocationMargin, invLocationMargin + sensor.getNbPixels() - 1, invLocationMargin);
  751.                         }
  752.                     }

  753.                 } else { // groundGrid[uIndex][vIndex] == null: impossible to compute inverse loc because ground point not defined

  754.                     sensorPixelGrid[uIndex][vIndex] = null;

  755.                 } // groundGrid[uIndex][vIndex] != null
  756.             } // end loop vIndex
  757.         } // end loop uIndex

  758.         // Reactivate the dump
  759.         DumpManager.resume(wasSuspended);

  760.         // The sensor grid computed WITHOUT atmospheric refraction correction
  761.         return sensorPixelGrid;
  762.     }

  763.     /** Check if pixel is inside the sensor with a margin.
  764.      * @param pixel pixel to check (may be null if not found)
  765.      * @param sensor the line sensor
  766.      * @return true if the pixel is inside the sensor
  767.      */
  768.     private boolean pixelIsInside(final SensorPixel pixel, final LineSensor sensor) {
  769.         // Get the inverse location margin
  770.         final double invLocationMargin = atmosphericRefraction.getComputationParameters().getInverseLocMargin();

  771.         return pixel != null && pixel.getPixelNumber() >= -invLocationMargin && pixel.getPixelNumber() < invLocationMargin + sensor.getNbPixels() - 1;
  772.     }

  773.     /** Computation, for the sensor pixels grid, of the direct location WITH atmospheric refraction.
  774.      * (full computation)
  775.      * @param pixelGrid the pixel grid
  776.      * @param lineGrid the line grid
  777.      * @param sensor the line sensor
  778.      * @return the ground grid computed with atmosphere
  779.      * @since 2.1
  780.      */
  781.     private GeodeticPoint[][] computeDirectLocOnGridWithAtmosphere(final double[] pixelGrid, final double[] lineGrid,
  782.                                                                    final LineSensor sensor) {

  783.         // Deactivate the dump because no need to keep intermediate computations of direct loc (can be regenerate)
  784.         final Boolean wasSuspended = DumpManager.suspend();

  785.         final int nbPixelGrid = pixelGrid.length;
  786.         final int nbLineGrid = lineGrid.length;
  787.         final GeodeticPoint[][] groundGridWithAtmosphere = new GeodeticPoint[nbPixelGrid][nbLineGrid];
  788.         final Vector3D sensorPosition = sensor.getPosition();

  789.         for (int uIndex = 0; uIndex < nbPixelGrid; uIndex++) {
  790.             final double pixelNumber = pixelGrid[uIndex];
  791.             for (int vIndex = 0; vIndex < nbLineGrid; vIndex++) {
  792.                 final double lineNumber = lineGrid[vIndex];
  793.                 final AbsoluteDate date = sensor.getDate(lineNumber);
  794.                 final Vector3D los = sensor.getLOS(date, pixelNumber);
  795.                 try {
  796.                     // Compute the direct location for the current node
  797.                     groundGridWithAtmosphere[uIndex][vIndex] = directLocation(date, sensorPosition, los);

  798.                 } catch (RuggedException re) { // This should never happen
  799.                     // In order for the dump to end nicely
  800.                     DumpManager.endNicely();
  801.                     throw new RuggedInternalError(re);
  802.                 }
  803.             } // end loop vIndex
  804.         } // end loop uIndex

  805.         // Reactivate the dump
  806.         DumpManager.resume(wasSuspended);

  807.         // The ground grid computed WITH atmospheric refraction correction
  808.         return groundGridWithAtmosphere;
  809.     }

  810.     /** Compute distances between two line sensors.
  811.      * @param sensorA line sensor A
  812.      * @param dateA current date for sensor A
  813.      * @param pixelA pixel index for sensor A
  814.      * @param scToBodyA spacecraft to body transform for sensor A
  815.      * @param sensorB line sensor B
  816.      * @param dateB current date for sensor B
  817.      * @param pixelB pixel index for sensor B
  818.      * @return distances computed between LOS and to the ground
  819.      * @since 2.0
  820.      */
  821.     public double[] distanceBetweenLOS(final LineSensor sensorA, final AbsoluteDate dateA, final double pixelA,
  822.                                        final SpacecraftToObservedBody scToBodyA,
  823.                                        final LineSensor sensorB, final AbsoluteDate dateB, final double pixelB) {

  824.         // Compute the approximate transform between spacecraft and observed body
  825.         // from Rugged instance A
  826.         final Transform scToInertA = scToBodyA.getScToInertial(dateA);
  827.         final Transform inertToBodyA = scToBodyA.getInertialToBody(dateA);
  828.         final Transform transformScToBodyA = new Transform(dateA, scToInertA, inertToBodyA);

  829.         // from (current) Rugged instance B
  830.         final Transform scToInertB = scToBody.getScToInertial(dateB);
  831.         final Transform inertToBodyB = scToBody.getInertialToBody(dateB);
  832.         final Transform transformScToBodyB = new Transform(dateB, scToInertB, inertToBodyB);

  833.         // Get sensors LOS into local frame
  834.         final Vector3D vALocal = sensorA.getLOS(dateA, pixelA);
  835.         final Vector3D vBLocal = sensorB.getLOS(dateB, pixelB);

  836.         // Position of sensors into local frame
  837.         final Vector3D sALocal = sensorA.getPosition(); // S_a : sensorA 's position
  838.         final Vector3D sBLocal = sensorB.getPosition(); // S_b : sensorB 's position

  839.         // Get sensors position and LOS into body frame
  840.         final Vector3D sA = transformScToBodyA.transformPosition(sALocal); // S_a : sensorA's position
  841.         final Vector3D vA = transformScToBodyA.transformVector(vALocal);   // V_a : line of sight's vectorA
  842.         final Vector3D sB = transformScToBodyB.transformPosition(sBLocal); // S_b : sensorB's position
  843.         final Vector3D vB = transformScToBodyB.transformVector(vBLocal);   // V_b : line of sight's vectorB

  844.         // Compute distance
  845.         final Vector3D vBase = sB.subtract(sA);            // S_b - S_a
  846.         final double svA = Vector3D.dotProduct(vBase, vA); // SV_a = (S_b - S_a).V_a
  847.         final double svB = Vector3D.dotProduct(vBase, vB); // SV_b = (S_b - S_a).V_b

  848.         final double vAvB = Vector3D.dotProduct(vA, vB); // V_a.V_b

  849.         // Compute lambda_b = (SV_a * V_a.V_b - SV_b) / (1 - (V_a.V_b)²)
  850.         final double lambdaB = (svA * vAvB - svB) / (1 - vAvB * vAvB);

  851.         // Compute lambda_a = SV_a + lambdaB * V_a.V_b
  852.         final double lambdaA = svA + lambdaB * vAvB;

  853.         // Compute vector M_a = S_a + lambda_a * V_a
  854.         final Vector3D mA = sA.add(vA.scalarMultiply(lambdaA));
  855.         // Compute vector M_b = S_b + lambda_b * V_b
  856.         final Vector3D mB = sB.add(vB.scalarMultiply(lambdaB));

  857.         // Compute vector M_a -> M_B for which distance between LOS is minimum
  858.         final Vector3D vDistanceMin = mB.subtract(mA); // M_b - M_a

  859.         // Compute vector from mid point of vector M_a -> M_B to the ground (corresponds to minimum elevation)
  860.         final Vector3D midPoint = (mB.add(mA)).scalarMultiply(0.5);

  861.         // Get the euclidean norms to compute the minimum distances: between LOS and to the ground
  862.         final double[] distances = {vDistanceMin.getNorm(), midPoint.getNorm()};

  863.         return distances;
  864.     }

  865.     /** Compute distances between two line sensors with derivatives.
  866.      * @param <T> derivative type
  867.      * @param sensorA line sensor A
  868.      * @param dateA current date for sensor A
  869.      * @param pixelA pixel index for sensor A
  870.      * @param scToBodyA spacecraftToBody transform for sensor A
  871.      * @param sensorB line sensor B
  872.      * @param dateB current date for sensor B
  873.      * @param pixelB pixel index for sensor B
  874.      * @param generator generator to use for building {@link DerivativeStructure} instances
  875.      * @return distances computed, with derivatives, between LOS and to the ground
  876.      * @see #distanceBetweenLOS(LineSensor, AbsoluteDate, double, SpacecraftToObservedBody, LineSensor, AbsoluteDate, double)
  877.      */
  878.     public <T extends Derivative<T>> T[] distanceBetweenLOSderivatives(
  879.                                  final LineSensor sensorA, final AbsoluteDate dateA, final double pixelA,
  880.                                  final SpacecraftToObservedBody scToBodyA,
  881.                                  final LineSensor sensorB, final AbsoluteDate dateB, final double pixelB,
  882.                                  final DerivativeGenerator<T> generator) {

  883.         // Compute the approximate transforms between spacecraft and observed body
  884.         // from Rugged instance A
  885.         final Transform scToInertA = scToBodyA.getScToInertial(dateA);
  886.         final Transform inertToBodyA = scToBodyA.getInertialToBody(dateA);
  887.         final Transform transformScToBodyA = new Transform(dateA, scToInertA, inertToBodyA);

  888.         // from (current) Rugged instance B
  889.         final Transform scToInertB = scToBody.getScToInertial(dateB);
  890.         final Transform inertToBodyB = scToBody.getInertialToBody(dateB);
  891.         final Transform transformScToBodyB = new Transform(dateB, scToInertB, inertToBodyB);

  892.         // Get sensors LOS into local frame
  893.         final FieldVector3D<T> vALocal = sensorA.getLOSDerivatives(dateA, pixelA, generator);
  894.         final FieldVector3D<T> vBLocal = sensorB.getLOSDerivatives(dateB, pixelB, generator);

  895.         // Get sensors LOS into body frame
  896.         final FieldVector3D<T> vA = transformScToBodyA.transformVector(vALocal); // V_a : line of sight's vectorA
  897.         final FieldVector3D<T> vB = transformScToBodyB.transformVector(vBLocal); // V_b : line of sight's vectorB

  898.         // Position of sensors into local frame
  899.         final Vector3D sAtmp = sensorA.getPosition();
  900.         final Vector3D sBtmp = sensorB.getPosition();

  901.         final T scaleFactor = FieldVector3D.dotProduct(vA.normalize(), vA.normalize()); // V_a.V_a=1

  902.         // Build a vector from the position and a scale factor (equals to 1).
  903.         // The vector built will be scaleFactor * sAtmp for example.
  904.         final FieldVector3D<T> sALocal = new FieldVector3D<>(scaleFactor, sAtmp);
  905.         final FieldVector3D<T> sBLocal = new FieldVector3D<>(scaleFactor, sBtmp);

  906.         // Get sensors position into body frame
  907.         final FieldVector3D<T> sA = transformScToBodyA.transformPosition(sALocal); // S_a : sensorA 's position
  908.         final FieldVector3D<T> sB = transformScToBodyB.transformPosition(sBLocal); // S_b : sensorB 's position

  909.         // Compute distance
  910.         final FieldVector3D<T> vBase = sB.subtract(sA);    // S_b - S_a
  911.         final T svA = FieldVector3D.dotProduct(vBase, vA); // SV_a = (S_b - S_a).V_a
  912.         final T svB = FieldVector3D.dotProduct(vBase, vB); // SV_b = (S_b - S_a).V_b

  913.         final T vAvB = FieldVector3D.dotProduct(vA, vB); // V_a.V_b

  914.         // Compute lambda_b = (SV_a * V_a.V_b - SV_b) / (1 - (V_a.V_b)²)
  915.         final T lambdaB = (svA.multiply(vAvB).subtract(svB)).divide(vAvB.multiply(vAvB).subtract(1).negate());

  916.         // Compute lambda_a = SV_a + lambdaB * V_a.V_b
  917.         final T lambdaA = vAvB.multiply(lambdaB).add(svA);

  918.         // Compute vector M_a:
  919.         final FieldVector3D<T> mA = sA.add(vA.scalarMultiply(lambdaA)); // M_a = S_a + lambda_a * V_a
  920.         // Compute vector M_b
  921.         final FieldVector3D<T> mB = sB.add(vB.scalarMultiply(lambdaB)); // M_b = S_b + lambda_b * V_b

  922.         // Compute vector M_a -> M_B for which distance between LOS is minimum
  923.         final FieldVector3D<T> vDistanceMin = mB.subtract(mA); // M_b - M_a

  924.         // Compute vector from mid point of vector M_a -> M_B to the ground (corresponds to minimum elevation)
  925.         final FieldVector3D<T> midPoint = (mB.add(mA)).scalarMultiply(0.5);

  926.         // Get the euclidean norms to compute the minimum distances:
  927.         // between LOS
  928.         final T dMin = vDistanceMin.getNorm();
  929.         // to the ground
  930.         final T dCentralBody = midPoint.getNorm();

  931.         final T[] ret = MathArrays.buildArray(dMin.getField(), 2);
  932.         ret[0] = dMin;
  933.         ret[1] = dCentralBody;
  934.         return ret;

  935.     }


  936.     /** Get the mean plane crossing finder for a sensor.
  937.      * @param sensorName name of the line sensor
  938.      * @param minLine minimum line number
  939.      * @param maxLine maximum line number
  940.      * @return mean plane crossing finder
  941.      */
  942.     private SensorMeanPlaneCrossing getPlaneCrossing(final String sensorName,
  943.                                                      final int minLine, final int maxLine) {

  944.         final LineSensor sensor = getLineSensor(sensorName);
  945.         SensorMeanPlaneCrossing planeCrossing = finders.get(sensorName);
  946.         if (planeCrossing == null ||
  947.             planeCrossing.getMinLine() != minLine ||
  948.             planeCrossing.getMaxLine() != maxLine) {

  949.             // create a new finder for the specified sensor and range
  950.             planeCrossing = new SensorMeanPlaneCrossing(sensor, scToBody, minLine, maxLine,
  951.                                                         lightTimeCorrection, aberrationOfLightCorrection,
  952.                                                         MAX_EVAL, COARSE_INVERSE_LOCATION_ACCURACY);

  953.             // store the finder, in order to reuse it
  954.             // (and save some computation done in its constructor)
  955.             setPlaneCrossing(planeCrossing);

  956.         }

  957.         return planeCrossing;
  958.     }

  959.     /** Set the mean plane crossing finder for a sensor.
  960.      * @param planeCrossing plane crossing finder
  961.      */
  962.     private void setPlaneCrossing(final SensorMeanPlaneCrossing planeCrossing) {
  963.         finders.put(planeCrossing.getSensor().getName(), planeCrossing);
  964.     }

  965.     /** Inverse location of a point with derivatives.
  966.      * @param <T> derivative type
  967.      * @param sensorName name of the line sensor
  968.      * @param point point to localize
  969.      * @param minLine minimum line number
  970.      * @param maxLine maximum line number
  971.      * @param generator generator to use for building {@link Derivative} instances
  972.      * @return sensor pixel seeing point with derivatives, or null if point cannot be seen between the
  973.      * prescribed line numbers
  974.      * @see #inverseLocation(String, GeodeticPoint, int, int)
  975.      * @since 2.0
  976.      */
  977.     public <T extends Derivative<T>> T[] inverseLocationDerivatives(final String sensorName,
  978.                                                                     final GeodeticPoint point,
  979.                                                                     final int minLine,
  980.                                                                     final int maxLine,
  981.                                                                     final DerivativeGenerator<T> generator) {

  982.         final LineSensor sensor = getLineSensor(sensorName);

  983.         final SensorMeanPlaneCrossing planeCrossing = getPlaneCrossing(sensorName, minLine, maxLine);

  984.         // find approximately the sensor line at which ground point crosses sensor mean plane
  985.         final Vector3D   target = ellipsoid.transform(point);
  986.         final SensorMeanPlaneCrossing.CrossingResult crossingResult = planeCrossing.find(target);
  987.         if (crossingResult == null) {
  988.             // target is out of search interval
  989.             return null;
  990.         }

  991.         // find approximately the pixel along this sensor line
  992.         final SensorPixelCrossing pixelCrossing =
  993.                 new SensorPixelCrossing(sensor, planeCrossing.getMeanPlaneNormal(),
  994.                                         crossingResult.getTargetDirection(),
  995.                                         MAX_EVAL, COARSE_INVERSE_LOCATION_ACCURACY);
  996.         final double coarsePixel = pixelCrossing.locatePixel(crossingResult.getDate());
  997.         if (Double.isNaN(coarsePixel)) {
  998.             // target is out of search interval
  999.             return null;
  1000.         }

  1001.         // fix line by considering the closest pixel exact position and line-of-sight
  1002.         // (this pixel might point towards a direction slightly above or below the mean sensor plane)
  1003.         final int lowIndex = FastMath.max(0, FastMath.min(sensor.getNbPixels() - 2, (int) FastMath.floor(coarsePixel)));
  1004.         final FieldVector3D<T> lowLOS =
  1005.                         sensor.getLOSDerivatives(crossingResult.getDate(), lowIndex, generator);
  1006.         final FieldVector3D<T> highLOS = sensor.getLOSDerivatives(crossingResult.getDate(), lowIndex + 1, generator);
  1007.         final FieldVector3D<T> localZ = FieldVector3D.crossProduct(lowLOS, highLOS).normalize();
  1008.         final T beta         = FieldVector3D.dotProduct(crossingResult.getTargetDirection(), localZ).acos();
  1009.         final T s            = FieldVector3D.dotProduct(crossingResult.getTargetDirectionDerivative(), localZ);
  1010.         final T minusBetaDer = s.divide(s.multiply(s).subtract(1).negate().sqrt());
  1011.         final T deltaL       = beta.subtract(0.5 * FastMath.PI) .divide(minusBetaDer);
  1012.         final T fixedLine    = deltaL.add(crossingResult.getLine());
  1013.         final FieldVector3D<T> fixedDirection =
  1014.                         new FieldVector3D<>(deltaL.getField().getOne(), crossingResult.getTargetDirection(),
  1015.                                             deltaL, crossingResult.getTargetDirectionDerivative()).normalize();

  1016.         // fix neighbouring pixels
  1017.         final AbsoluteDate fixedDate  = sensor.getDate(fixedLine.getValue());
  1018.         final FieldVector3D<T> fixedX = sensor.getLOSDerivatives(fixedDate, lowIndex, generator);
  1019.         final FieldVector3D<T> fixedZ = FieldVector3D.crossProduct(fixedX, sensor.getLOSDerivatives(fixedDate, lowIndex + 1, generator));
  1020.         final FieldVector3D<T> fixedY = FieldVector3D.crossProduct(fixedZ, fixedX);

  1021.         // fix pixel
  1022.         final T hY         = FieldVector3D.dotProduct(highLOS, fixedY);
  1023.         final T hX         = FieldVector3D.dotProduct(highLOS, fixedX);
  1024.         final T pixelWidth = hY.atan2(hX);
  1025.         final T fY         = FieldVector3D.dotProduct(fixedDirection, fixedY);
  1026.         final T fX         = FieldVector3D.dotProduct(fixedDirection, fixedX);
  1027.         final T alpha      = fY.atan2(fX);
  1028.         final T fixedPixel = alpha.divide(pixelWidth).add(lowIndex);

  1029.         final T[] ret = MathArrays.buildArray(fixedPixel.getField(), 2);
  1030.         ret[0] = fixedLine;
  1031.         ret[1] = fixedPixel;
  1032.         return ret;

  1033.     }

  1034.     /** Get transform from spacecraft to inertial frame.
  1035.      * @param date date of the transform
  1036.      * @return transform from spacecraft to inertial frame
  1037.      */
  1038.     public Transform getScToInertial(final AbsoluteDate date) {
  1039.         return scToBody.getScToInertial(date);
  1040.     }

  1041.     /** Get transform from inertial frame to observed body frame.
  1042.      * @param date date of the transform
  1043.      * @return transform from inertial frame to observed body frame
  1044.      */
  1045.     public Transform getInertialToBody(final AbsoluteDate date) {
  1046.         return scToBody.getInertialToBody(date);
  1047.     }

  1048.     /** Get transform from observed body frame to inertial frame.
  1049.      * @param date date of the transform
  1050.      * @return transform from observed body frame to inertial frame
  1051.      */
  1052.     public Transform getBodyToInertial(final AbsoluteDate date) {
  1053.         return scToBody.getBodyToInertial(date);
  1054.     }

  1055.     /** Get a sensor.
  1056.      * @param sensorName sensor name
  1057.      * @return selected sensor
  1058.      */
  1059.     public LineSensor getLineSensor(final String sensorName) {

  1060.         final LineSensor sensor = sensors.get(sensorName);
  1061.         if (sensor == null) {
  1062.             throw new RuggedException(RuggedMessages.UNKNOWN_SENSOR, sensorName);
  1063.         }
  1064.         return sensor;
  1065.     }

  1066.     /** Get converter between spacecraft and body.
  1067.      * @return the scToBody
  1068.      * @since 2.0
  1069.      */
  1070.     public SpacecraftToObservedBody getScToBody() {
  1071.         return scToBody;
  1072.     }
  1073. }