1 /* Copyright 2013-2025 CS GROUP
2 * Licensed to CS GROUP (CS) under one or more
3 * contributor license agreements. See the NOTICE file distributed with
4 * this work for additional information regarding copyright ownership.
5 * CS licenses this file to You under the Apache License, Version 2.0
6 * (the "License"); you may not use this file except in compliance with
7 * the License. You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17 package org.orekit.rugged.api;
18
19 import java.util.Collection;
20 import java.util.HashMap;
21 import java.util.Map;
22
23 import org.hipparchus.analysis.differentiation.Derivative;
24 import org.hipparchus.analysis.differentiation.DerivativeStructure;
25 import org.hipparchus.geometry.euclidean.threed.FieldVector3D;
26 import org.hipparchus.geometry.euclidean.threed.Vector3D;
27 import org.hipparchus.util.FastMath;
28 import org.hipparchus.util.MathArrays;
29 import org.orekit.bodies.GeodeticPoint;
30 import org.orekit.frames.Transform;
31 import org.orekit.rugged.errors.DumpManager;
32 import org.orekit.rugged.errors.RuggedException;
33 import org.orekit.rugged.errors.RuggedInternalError;
34 import org.orekit.rugged.errors.RuggedMessages;
35 import org.orekit.rugged.intersection.IntersectionAlgorithm;
36 import org.orekit.rugged.linesensor.LineSensor;
37 import org.orekit.rugged.linesensor.SensorMeanPlaneCrossing;
38 import org.orekit.rugged.linesensor.SensorPixel;
39 import org.orekit.rugged.linesensor.SensorPixelCrossing;
40 import org.orekit.rugged.refraction.AtmosphericRefraction;
41 import org.orekit.rugged.utils.DerivativeGenerator;
42 import org.orekit.rugged.utils.ExtendedEllipsoid;
43 import org.orekit.rugged.utils.NormalizedGeodeticPoint;
44 import org.orekit.rugged.utils.SpacecraftToObservedBody;
45 import org.orekit.time.AbsoluteDate;
46 import org.orekit.utils.Constants;
47 import org.orekit.utils.PVCoordinates;
48
49 /** Main class of Rugged library API.
50 * @see RuggedBuilder
51 * @author Luc Maisonobe
52 * @author Guylaine Prat
53 * @author Jonathan Guinet
54 * @author Lucie LabatAllee
55 */
56 public class Rugged {
57
58 /** Accuracy to use in the first stage of inverse location.
59 * <p>
60 * This accuracy is only used to locate the point within one
61 * pixel, hence there is no point in choosing a too small value here.
62 * </p>
63 */
64 private static final double COARSE_INVERSE_LOCATION_ACCURACY = 0.01;
65
66 /** Maximum number of evaluations for crossing algorithms. */
67 private static final int MAX_EVAL = 50;
68
69 /** Threshold for pixel convergence in fixed point method
70 * (for inverse location with atmospheric refraction correction). */
71 private static final double PIXEL_CV_THRESHOLD = 1.e-4;
72
73 /** Threshold for line convergence in fixed point method
74 * (for inverse location with atmospheric refraction correction). */
75 private static final double LINE_CV_THRESHOLD = 1.e-4;
76
77 /** Reference ellipsoid. */
78 private final ExtendedEllipsoid ellipsoid;
79
80 /** Converter between spacecraft and body. */
81 private final SpacecraftToObservedBody scToBody;
82
83 /** Sensors. */
84 private final Map<String, LineSensor> sensors;
85
86 /** Mean plane crossing finders. */
87 private final Map<String, SensorMeanPlaneCrossing> finders;
88
89 /** DEM intersection algorithm. */
90 private final IntersectionAlgorithm algorithm;
91
92 /** Flag for light time correction. */
93 private boolean lightTimeCorrection;
94
95 /** Flag for aberration of light correction. */
96 private boolean aberrationOfLightCorrection;
97
98 /** Rugged name. */
99 private final String name;
100
101 /** Atmospheric refraction for line of sight correction. */
102 private AtmosphericRefraction atmosphericRefraction;
103
104 /** Build a configured instance.
105 * <p>
106 * By default, the instance performs both light time correction (which refers
107 * to ground point motion with respect to inertial frame) and aberration of
108 * light correction (which refers to spacecraft proper velocity). Explicit calls
109 * to {@link #setLightTimeCorrection(boolean) setLightTimeCorrection}
110 * and {@link #setAberrationOfLightCorrection(boolean) setAberrationOfLightCorrection}
111 * can be made after construction if these phenomena should not be corrected.
112 * </p>
113 * @param algorithm algorithm to use for Digital Elevation Model intersection
114 * @param ellipsoid f reference ellipsoid
115 * @param lightTimeCorrection if true, the light travel time between ground
116 * @param aberrationOfLightCorrection if true, the aberration of light
117 * is corrected for more accurate location
118 * and spacecraft is compensated for more accurate location
119 * @param atmosphericRefraction the atmospheric refraction model to be used for more accurate location
120 * @param scToBody transforms interpolator
121 * @param sensors sensors
122 * @param name Rugged name
123 */
124 Rugged(final IntersectionAlgorithm algorithm, final ExtendedEllipsoid ellipsoid, final boolean lightTimeCorrection,
125 final boolean aberrationOfLightCorrection, final AtmosphericRefraction atmosphericRefraction,
126 final SpacecraftToObservedBody scToBody, final Collection<LineSensor> sensors, final String name) {
127
128
129 // space reference
130 this.ellipsoid = ellipsoid;
131
132 // orbit/attitude to body converter
133 this.scToBody = scToBody;
134
135 // intersection algorithm
136 this.algorithm = algorithm;
137
138 // Rugged name
139 // @since 2.0
140 this.name = name;
141
142 this.sensors = new HashMap<String, LineSensor>();
143 for (final LineSensor s : sensors) {
144 this.sensors.put(s.getName(), s);
145 }
146 this.finders = new HashMap<String, SensorMeanPlaneCrossing>();
147
148 this.lightTimeCorrection = lightTimeCorrection;
149 this.aberrationOfLightCorrection = aberrationOfLightCorrection;
150 this.atmosphericRefraction = atmosphericRefraction;
151 }
152
153 /** Get the Rugged name.
154 * @return Rugged name
155 * @since 2.0
156 */
157 public String getName() {
158 return name;
159 }
160
161 /** Get the DEM intersection algorithm.
162 * @return DEM intersection algorithm
163 */
164 public IntersectionAlgorithm getAlgorithm() {
165 return algorithm;
166 }
167
168 /** Get the DEM intersection algorithm identifier.
169 * @return DEM intersection algorithm Id
170 * @since 2.2
171 */
172 public AlgorithmId getAlgorithmId() {
173 return algorithm.getAlgorithmId();
174 }
175
176 /** Get flag for light time correction.
177 * @return true if the light time between ground and spacecraft is
178 * compensated for more accurate location
179 */
180 public boolean isLightTimeCorrected() {
181 return lightTimeCorrection;
182 }
183
184 /** Get flag for aberration of light correction.
185 * @return true if the aberration of light time is corrected
186 * for more accurate location
187 */
188 public boolean isAberrationOfLightCorrected() {
189 return aberrationOfLightCorrection;
190 }
191
192 /** Get the atmospheric refraction model.
193 * @return atmospheric refraction model
194 * @since 2.0
195 */
196 public AtmosphericRefraction getRefractionCorrection() {
197 return atmosphericRefraction;
198 }
199
200 /** Get the line sensors.
201 * @return line sensors
202 */
203 public Collection<LineSensor> getLineSensors() {
204 return sensors.values();
205 }
206
207 /** Get the start of search time span.
208 * @return start of search time span
209 */
210 public AbsoluteDate getMinDate() {
211 return scToBody.getMinDate();
212 }
213
214 /** Get the end of search time span.
215 * @return end of search time span
216 */
217 public AbsoluteDate getMaxDate() {
218 return scToBody.getMaxDate();
219 }
220
221 /** Check if a date is in the supported range.
222 * <p>
223 * The support range is given by the {@code minDate} and
224 * {@code maxDate} construction parameters, with an {@code
225 * overshootTolerance} margin accepted (i.e. a date slightly
226 * before {@code minDate} or slightly after {@code maxDate}
227 * will be considered in range if the overshoot does not exceed
228 * the tolerance set at construction).
229 * </p>
230 * @param date date to check
231 * @return true if date is in the supported range
232 */
233 public boolean isInRange(final AbsoluteDate date) {
234 return scToBody.isInRange(date);
235 }
236
237 /** Get the observed body ellipsoid.
238 * @return observed body ellipsoid
239 */
240 public ExtendedEllipsoid getEllipsoid() {
241 return ellipsoid;
242 }
243
244 /** Direct location of a sensor line.
245 * @param sensorName name of the line sensor
246 * @param lineNumber number of the line to localize on ground
247 * @return ground position of all pixels of the specified sensor line
248 */
249 public GeodeticPoint[] directLocation(final String sensorName, final double lineNumber) {
250
251 final LineSensor sensor = getLineSensor(sensorName);
252 final Vector3D sensorPosition = sensor.getPosition();
253 final AbsoluteDate date = sensor.getDate(lineNumber);
254
255 // Compute the transform for the date
256 // from spacecraft to inertial
257 final Transform scToInert = scToBody.getScToInertial(date);
258 // from inertial to body
259 final Transform inertToBody = scToBody.getInertialToBody(date);
260
261 // Compute spacecraft velocity in inertial frame
262 final Vector3D spacecraftVelocity = scToInert.transformPVCoordinates(PVCoordinates.ZERO).getVelocity();
263 // Compute sensor position in inertial frame
264 // TBN: for simplicity, due to the size of sensor, we consider each pixel to be at sensor position
265 final Vector3D pInert = scToInert.transformPosition(sensorPosition);
266
267 // Compute location of each pixel
268 final GeodeticPoint[] gp = new GeodeticPoint[sensor.getNbPixels()];
269 for (int i = 0; i < sensor.getNbPixels(); ++i) {
270
271 final Vector3D los = sensor.getLOS(date, i);
272 DumpManager.dumpDirectLocation(date, sensorPosition, los, lightTimeCorrection,
273 aberrationOfLightCorrection, atmosphericRefraction != null);
274
275 // compute the line of sight in inertial frame (without correction)
276 final Vector3D obsLInert = scToInert.transformVector(los);
277 final Vector3D lInert;
278
279 if (aberrationOfLightCorrection) {
280 // apply aberration of light correction on LOS
281 lInert = applyAberrationOfLightCorrection(obsLInert, spacecraftVelocity);
282 } else {
283 // don't apply aberration of light correction on LOS
284 lInert = obsLInert;
285 }
286
287 if (lightTimeCorrection) {
288 // compute DEM intersection with light time correction
289 // TBN: for simplicity, due to the size of sensor, we consider each pixel to be at sensor position
290 gp[i] = computeWithLightTimeCorrection(date, sensorPosition, los, scToInert, inertToBody, pInert, lInert);
291
292 } else {
293 // compute DEM intersection without light time correction
294 final Vector3D pBody = inertToBody.transformPosition(pInert);
295 final Vector3D lBody = inertToBody.transformVector(lInert);
296 gp[i] = algorithm.refineIntersection(ellipsoid, pBody, lBody,
297 algorithm.intersection(ellipsoid, pBody, lBody));
298 }
299
300 // compute with atmospheric refraction correction if necessary
301 if (atmosphericRefraction != null && atmosphericRefraction.mustBeComputed()) {
302
303 final Vector3D pBody;
304 final Vector3D lBody;
305
306 // Take into account the light time correction
307 // @since 3.1
308 if (lightTimeCorrection) {
309 // Transform sensor position in inertial frame to observed body
310 final Vector3D sP = inertToBody.transformPosition(pInert);
311 // Convert ground location of the pixel in cartesian coordinates
312 final Vector3D eP = ellipsoid.transform(gp[i]);
313 // Compute the light time correction (s)
314 final double deltaT = eP.distance(sP) / Constants.SPEED_OF_LIGHT;
315
316 // Apply shift due to light time correction
317 final Transform shiftedInertToBody = inertToBody.shiftedBy(-deltaT);
318
319 pBody = shiftedInertToBody.transformPosition(pInert);
320 lBody = shiftedInertToBody.transformVector(lInert);
321
322 } else { // Light time correction NOT to be taken into account
323
324 pBody = inertToBody.transformPosition(pInert);
325 lBody = inertToBody.transformVector(lInert);
326
327 } // end test on lightTimeCorrection
328
329 // apply atmospheric refraction correction
330 gp[i] = atmosphericRefraction.applyCorrection(pBody, lBody, (NormalizedGeodeticPoint) gp[i], algorithm);
331
332 }
333 DumpManager.dumpDirectLocationResult(gp[i]);
334 }
335 return gp;
336 }
337
338 /** Direct location of a single line-of-sight.
339 * @param date date of the location
340 * @param sensorPosition sensor position in spacecraft frame. For simplicity, due to the size of sensor,
341 * we consider each pixel to be at sensor position
342 * @param los normalized line-of-sight in spacecraft frame
343 * @return ground position of intersection point between specified los and ground
344 */
345 public GeodeticPoint directLocation(final AbsoluteDate date, final Vector3D sensorPosition, final Vector3D los) {
346
347 DumpManager.dumpDirectLocation(date, sensorPosition, los, lightTimeCorrection, aberrationOfLightCorrection,
348 atmosphericRefraction != null);
349
350 // Compute the transforms for the date
351 // from spacecraft to inertial
352 final Transform scToInert = scToBody.getScToInertial(date);
353 // from inertial to body
354 final Transform inertToBody = scToBody.getInertialToBody(date);
355
356 // Compute spacecraft velocity in inertial frame
357 final Vector3D spacecraftVelocity = scToInert.transformPVCoordinates(PVCoordinates.ZERO).getVelocity();
358 // Compute sensor position in inertial frame
359 // TBN: for simplicity, due to the size of sensor, we consider each pixel to be at sensor position
360 final Vector3D pInert = scToInert.transformPosition(sensorPosition);
361
362 // Compute the line of sight in inertial frame (without correction)
363 final Vector3D obsLInert = scToInert.transformVector(los);
364
365 final Vector3D lInert;
366 if (aberrationOfLightCorrection) {
367 // apply aberration of light correction on LOS
368 lInert = applyAberrationOfLightCorrection(obsLInert, spacecraftVelocity);
369 } else {
370 // don't apply aberration of light correction on LOS
371 lInert = obsLInert;
372 }
373
374 // Compute ground location of specified pixel according to light time correction flag
375 final NormalizedGeodeticPoint gp;
376
377 if (lightTimeCorrection) {
378 // compute DEM intersection with light time correction
379 // TBN: for simplicity, due to the size of sensor, we consider each pixel to be at sensor position
380 gp = computeWithLightTimeCorrection(date, sensorPosition, los, scToInert, inertToBody, pInert, lInert);
381
382 } else {
383 // compute DEM intersection without light time correction
384 final Vector3D pBody = inertToBody.transformPosition(pInert);
385 final Vector3D lBody = inertToBody.transformVector(lInert);
386 gp = algorithm.refineIntersection(ellipsoid, pBody, lBody,
387 algorithm.intersection(ellipsoid, pBody, lBody));
388 }
389
390 // Compute ground location of specified pixel according to atmospheric refraction correction flag
391 NormalizedGeodeticPoint result = gp;
392
393 // compute the ground location with atmospheric correction if asked for
394 if (atmosphericRefraction != null && atmosphericRefraction.mustBeComputed()) {
395
396 final Vector3D pBody;
397 final Vector3D lBody;
398
399 // Take into account the light time correction
400 // @since 3.1
401 if (lightTimeCorrection) {
402 // Transform sensor position in inertial frame to observed body
403 final Vector3D sP = inertToBody.transformPosition(pInert);
404 // Convert ground location of the pixel in cartesian coordinates
405 final Vector3D eP = ellipsoid.transform(gp);
406 // Compute the light time correction (s)
407 final double deltaT = eP.distance(sP) / Constants.SPEED_OF_LIGHT;
408
409 // Apply shift due to light time correction
410 final Transform shiftedInertToBody = inertToBody.shiftedBy(-deltaT);
411
412 pBody = shiftedInertToBody.transformPosition(pInert);
413 lBody = shiftedInertToBody.transformVector(lInert);
414
415 } else { // Light time correction NOT to be taken into account
416
417 pBody = inertToBody.transformPosition(pInert);
418 lBody = inertToBody.transformVector(lInert);
419
420 } // end test on lightTimeCorrection
421
422 // apply atmospheric refraction correction
423 result = atmosphericRefraction.applyCorrection(pBody, lBody, gp, algorithm);
424
425 } // end test on atmosphericRefraction != null
426
427 DumpManager.dumpDirectLocationResult(result);
428 return result;
429 }
430
431 /** Find the date at which sensor sees a ground point.
432 * <p>
433 * This method is a partial {@link #inverseLocation(String, GeodeticPoint, int, int) inverse location} focusing only on date.
434 * </p>
435 * <p>
436 * The point is given only by its latitude and longitude, the elevation is
437 * computed from the Digital Elevation Model.
438 * </p>
439 * <p>
440 * Note that for each sensor name, the {@code minLine} and {@code maxLine} settings
441 * are cached, because they induce costly frames computation. So these settings
442 * should not be tuned very finely and changed at each call, but should rather be
443 * a few thousand lines wide and refreshed only when needed. If for example an
444 * inverse location is roughly estimated to occur near line 53764 (for example
445 * using {@link org.orekit.rugged.utils.RoughVisibilityEstimator}), {@code minLine}
446 * and {@code maxLine} could be set for example to 50000 and 60000, which would
447 * be OK also if next line inverse location is expected to occur near line 53780,
448 * and next one ... The setting could be changed for example to 55000 and 65000 when
449 * an inverse location is expected to occur after 55750. Of course, these values
450 * are only an example and should be adjusted depending on mission needs.
451 * </p>
452 * @param sensorName name of the line sensor
453 * @param latitude ground point latitude (rad)
454 * @param longitude ground point longitude (rad)
455 * @param minLine minimum line number
456 * @param maxLine maximum line number
457 * @return date at which ground point is seen by line sensor
458 * @see #inverseLocation(String, double, double, int, int)
459 * @see org.orekit.rugged.utils.RoughVisibilityEstimator
460 */
461 public AbsoluteDate dateLocation(final String sensorName,
462 final double latitude, final double longitude,
463 final int minLine, final int maxLine) {
464
465 final GeodeticPoint groundPoint =
466 new GeodeticPoint(latitude, longitude, algorithm.getElevation(latitude, longitude));
467 return dateLocation(sensorName, groundPoint, minLine, maxLine);
468 }
469
470 /** Find the date at which sensor sees a ground point.
471 * <p>
472 * This method is a partial {@link #inverseLocation(String,
473 * GeodeticPoint, int, int) inverse location} focusing only on date.
474 * </p>
475 * <p>
476 * Note that for each sensor name, the {@code minLine} and {@code maxLine} settings
477 * are cached, because they induce costly frames computation. So these settings
478 * should not be tuned very finely and changed at each call, but should rather be
479 * a few thousand lines wide and refreshed only when needed. If for example an
480 * inverse location is roughly estimated to occur near line 53764 (for example
481 * using {@link org.orekit.rugged.utils.RoughVisibilityEstimator}), {@code minLine}
482 * and {@code maxLine} could be set for example to 50000 and 60000, which would
483 * be OK also if next line inverse location is expected to occur near line 53780,
484 * and next one ... The setting could be changed for example to 55000 and 65000 when
485 * an inverse location is expected to occur after 55750. Of course, these values
486 * are only an example and should be adjusted depending on mission needs.
487 * </p>
488 * @param sensorName name of the line sensor
489 * @param point point to localize
490 * @param minLine minimum line number
491 * @param maxLine maximum line number
492 * @return date at which ground point is seen by line sensor
493 * @see #inverseLocation(String, GeodeticPoint, int, int)
494 * @see org.orekit.rugged.utils.RoughVisibilityEstimator
495 */
496 public AbsoluteDate dateLocation(final String sensorName, final GeodeticPoint point,
497 final int minLine, final int maxLine) {
498
499 final LineSensor sensor = getLineSensor(sensorName);
500 final SensorMeanPlaneCrossing planeCrossing = getPlaneCrossing(sensorName, minLine, maxLine);
501
502 // find approximately the sensor line at which ground point crosses sensor mean plane
503 final Vector3D target = ellipsoid.transform(point);
504 final SensorMeanPlaneCrossing.CrossingResult crossingResult = planeCrossing.find(target);
505 if (crossingResult == null) {
506 // target is out of search interval
507 return null;
508 } else {
509 return sensor.getDate(crossingResult.getLine());
510 }
511 }
512
513 /** Inverse location of a ground point.
514 * <p>
515 * The point is given only by its latitude and longitude, the elevation is
516 * computed from the Digital Elevation Model.
517 * </p>
518 * <p>
519 * Note that for each sensor name, the {@code minLine} and {@code maxLine} settings
520 * are cached, because they induce costly frames computation. So these settings
521 * should not be tuned very finely and changed at each call, but should rather be
522 * a few thousand lines wide and refreshed only when needed. If for example an
523 * inverse location is roughly estimated to occur near line 53764 (for example
524 * using {@link org.orekit.rugged.utils.RoughVisibilityEstimator}), {@code minLine}
525 * and {@code maxLine} could be set for example to 50000 and 60000, which would
526 * be OK also if next line inverse location is expected to occur near line 53780,
527 * and next one ... The setting could be changed for example to 55000 and 65000 when
528 * an inverse location is expected to occur after 55750. Of course, these values
529 * are only an example and should be adjusted depending on mission needs.
530 * </p>
531 * @param sensorName name of the line sensor
532 * @param latitude ground point latitude (rad)
533 * @param longitude ground point longitude (rad)
534 * @param minLine minimum line number
535 * @param maxLine maximum line number
536 * @return sensor pixel seeing ground point, or null if ground point cannot
537 * be seen between the prescribed line numbers
538 * @see org.orekit.rugged.utils.RoughVisibilityEstimator
539 */
540 public SensorPixel inverseLocation(final String sensorName,
541 final double latitude, final double longitude,
542 final int minLine, final int maxLine) {
543
544 final GeodeticPoint groundPoint = new GeodeticPoint(latitude, longitude, algorithm.getElevation(latitude, longitude));
545 return inverseLocation(sensorName, groundPoint, minLine, maxLine);
546 }
547
548 /** Inverse location of a point.
549 * <p>
550 * Note that for each sensor name, the {@code minLine} and {@code maxLine} settings
551 * are cached, because they induce costly frames computation. So these settings
552 * should not be tuned very finely and changed at each call, but should rather be
553 * a few thousand lines wide and refreshed only when needed. If for example an
554 * inverse location is roughly estimated to occur near line 53764 (for example
555 * using {@link org.orekit.rugged.utils.RoughVisibilityEstimator}), {@code minLine}
556 * and {@code maxLine} could be set for example to 50000 and 60000, which would
557 * be OK also if next line inverse location is expected to occur near line 53780,
558 * and next one ... The setting could be changed for example to 55000 and 65000 when
559 * an inverse location is expected to occur after 55750. Of course, these values
560 * are only an example and should be adjusted depending on mission needs.
561 * </p>
562 * @param sensorName name of the line sensor
563 * @param point geodetic point to localize
564 * @param minLine minimum line number where the search will be performed
565 * @param maxLine maximum line number where the search will be performed
566 * @return sensor pixel seeing point, or null if point cannot be seen between the
567 * prescribed line numbers
568 * @see #dateLocation(String, GeodeticPoint, int, int)
569 * @see org.orekit.rugged.utils.RoughVisibilityEstimator
570 */
571 public SensorPixel inverseLocation(final String sensorName, final GeodeticPoint point,
572 final int minLine, final int maxLine) {
573
574 final LineSensor sensor = getLineSensor(sensorName);
575 DumpManager.dumpInverseLocation(sensor, point, ellipsoid, minLine, maxLine, lightTimeCorrection,
576 aberrationOfLightCorrection, atmosphericRefraction != null);
577
578 final SensorMeanPlaneCrossing planeCrossing = getPlaneCrossing(sensorName, minLine, maxLine);
579 DumpManager.dumpSensorMeanPlane(planeCrossing);
580
581 if (atmosphericRefraction == null || !atmosphericRefraction.mustBeComputed()) {
582 // Compute inverse location WITHOUT atmospheric refraction
583 return findSensorPixelWithoutAtmosphere(point, sensor, planeCrossing);
584 } else {
585 // Compute inverse location WITH atmospheric refraction
586 return findSensorPixelWithAtmosphere(point, sensor, minLine, maxLine);
587 }
588 }
589
590 /** Apply aberration of light correction (for direct location).
591 * @param spacecraftVelocity spacecraft velocity in inertial frame
592 * @param obsLInert line of sight in inertial frame
593 * @return line of sight with aberration of light correction
594 */
595 private Vector3D applyAberrationOfLightCorrection(final Vector3D obsLInert, final Vector3D spacecraftVelocity) {
596
597 // As the spacecraft velocity is small with respect to speed of light,
598 // we use classical velocity addition and not relativistic velocity addition
599 // we look for a positive k such that: c * lInert + vsat = k * obsLInert
600 // with lInert normalized
601 final double a = obsLInert.getNormSq();
602 final double b = -Vector3D.dotProduct(obsLInert, spacecraftVelocity);
603 final double c = spacecraftVelocity.getNormSq() - Constants.SPEED_OF_LIGHT * Constants.SPEED_OF_LIGHT;
604
605 // a > 0 and c < 0
606 final double s = FastMath.sqrt(b * b - a * c);
607
608 // Only the k > 0 are kept as solutions (the solutions: -(s+b)/a and c/(s-b) are useless)
609 final double k = (b > 0) ? -c / (s + b) : (s - b) / a;
610
611 final Vector3D lInert = new Vector3D( k / Constants.SPEED_OF_LIGHT, obsLInert, -1.0 / Constants.SPEED_OF_LIGHT, spacecraftVelocity);
612 return lInert;
613 }
614
615 /** Compute the DEM intersection with light time correction.
616 * @param date date of the los
617 * @param sensorPosition sensor position in spacecraft frame
618 * @param los los in spacecraft frame
619 * @param scToInert transform for the date from spacecraft to inertial
620 * @param inertToBody transform for the date from inertial to body
621 * @param pInert sensor position in inertial frame
622 * @param lInert line of sight in inertial frame (with light time correction if asked for)
623 * @return geodetic point with light time correction
624 */
625 private NormalizedGeodeticPoint computeWithLightTimeCorrection(final AbsoluteDate date,
626 final Vector3D sensorPosition, final Vector3D los,
627 final Transform scToInert, final Transform inertToBody,
628 final Vector3D pInert, final Vector3D lInert) {
629
630 // Compute the transform between spacecraft and observed body
631 final Transform approximate = new Transform(date, scToInert, inertToBody);
632
633 // Transform LOS in spacecraft frame to observed body
634 final Vector3D sL = approximate.transformVector(los);
635 // Transform sensor position in spacecraft frame to observed body
636 final Vector3D sP = approximate.transformPosition(sensorPosition);
637
638 // Compute point intersecting ground (= the ellipsoid) along the pixel LOS
639 final Vector3D eP1 = ellipsoid.transform(ellipsoid.pointOnGround(sP, sL, 0.0));
640
641 // Compute light time time correction (vs the ellipsoid) (s)
642 final double deltaT1 = eP1.distance(sP) / Constants.SPEED_OF_LIGHT;
643
644 // Apply shift due to light time correction (vs the ellipsoid)
645 final Transform shifted1 = inertToBody.shiftedBy(-deltaT1);
646
647 // Search the intersection of LOS (taking into account the light time correction if asked for) with DEM
648 final NormalizedGeodeticPoint gp1 = algorithm.intersection(ellipsoid,
649 shifted1.transformPosition(pInert),
650 shifted1.transformVector(lInert));
651
652 // Convert the geodetic point (intersection of LOS with DEM) in cartesian coordinates
653 final Vector3D eP2 = ellipsoid.transform(gp1);
654
655 // Compute the light time correction (vs DEM) (s)
656 final double deltaT2 = eP2.distance(sP) / Constants.SPEED_OF_LIGHT;
657
658 // Apply shift due to light time correction (vs DEM)
659 final Transform shifted2 = inertToBody.shiftedBy(-deltaT2);
660
661 return algorithm.refineIntersection(ellipsoid,
662 shifted2.transformPosition(pInert),
663 shifted2.transformVector(lInert),
664 gp1);
665 }
666
667 /**
668 * Find the sensor pixel WITHOUT atmospheric refraction correction.
669 * @param point geodetic point to localize
670 * @param sensor the line sensor
671 * @param planeCrossing the sensor mean plane crossing
672 * @return the sensor pixel crossing or null if cannot be found
673 * @since 2.1
674 */
675 private SensorPixel findSensorPixelWithoutAtmosphere(final GeodeticPoint point,
676 final LineSensor sensor, final SensorMeanPlaneCrossing planeCrossing) {
677
678 // find approximately the sensor line at which ground point crosses sensor mean plane
679 final Vector3D target = ellipsoid.transform(point);
680 final SensorMeanPlaneCrossing.CrossingResult crossingResult = planeCrossing.find(target);
681 if (crossingResult == null) {
682 // target is out of search interval
683 return null;
684 }
685
686 // find approximately the pixel along this sensor line
687 final SensorPixelCrossing pixelCrossing =
688 new SensorPixelCrossing(sensor, planeCrossing.getMeanPlaneNormal(),
689 crossingResult.getTargetDirection(),
690 MAX_EVAL, COARSE_INVERSE_LOCATION_ACCURACY);
691 final double coarsePixel = pixelCrossing.locatePixel(crossingResult.getDate());
692 if (Double.isNaN(coarsePixel)) {
693 // target is out of search interval
694 return null;
695 }
696
697 // fix line by considering the closest pixel exact position and line-of-sight
698 // (this pixel might point towards a direction slightly above or below the mean sensor plane)
699 final int lowIndex = FastMath.max(0, FastMath.min(sensor.getNbPixels() - 2, (int) FastMath.floor(coarsePixel)));
700 final Vector3D lowLOS = sensor.getLOS(crossingResult.getDate(), lowIndex);
701 final Vector3D highLOS = sensor.getLOS(crossingResult.getDate(), lowIndex + 1);
702 final Vector3D localZ = Vector3D.crossProduct(lowLOS, highLOS).normalize();
703 final double beta = FastMath.acos(Vector3D.dotProduct(crossingResult.getTargetDirection(), localZ));
704 final double s = Vector3D.dotProduct(crossingResult.getTargetDirectionDerivative(), localZ);
705 final double betaDer = -s / FastMath.sqrt(1 - s * s);
706 final double deltaL = (0.5 * FastMath.PI - beta) / betaDer;
707 final double fixedLine = crossingResult.getLine() + deltaL;
708 final Vector3D fixedDirection = new Vector3D(1, crossingResult.getTargetDirection(),
709 deltaL, crossingResult.getTargetDirectionDerivative()).normalize();
710
711 // fix neighbouring pixels
712 final AbsoluteDate fixedDate = sensor.getDate(fixedLine);
713 final Vector3D fixedX = sensor.getLOS(fixedDate, lowIndex);
714 final Vector3D fixedZ = Vector3D.crossProduct(fixedX, sensor.getLOS(fixedDate, lowIndex + 1));
715 final Vector3D fixedY = Vector3D.crossProduct(fixedZ, fixedX);
716
717 // fix pixel
718 final double pixelWidth = FastMath.atan2(Vector3D.dotProduct(highLOS, fixedY),
719 Vector3D.dotProduct(highLOS, fixedX));
720 final double alpha = FastMath.atan2(Vector3D.dotProduct(fixedDirection, fixedY),
721 Vector3D.dotProduct(fixedDirection, fixedX));
722 final double fixedPixel = lowIndex + alpha / pixelWidth;
723
724 final SensorPixel result = new SensorPixel(fixedLine, fixedPixel);
725 DumpManager.dumpInverseLocationResult(result);
726
727 return result;
728 }
729
730 /**
731 * Find the sensor pixel WITH atmospheric refraction correction.
732 * @param point geodetic point to localize
733 * @param sensor the line sensor
734 * @param minLine minimum line number where the search will be performed
735 * @param maxLine maximum line number where the search will be performed
736 * @return the sensor pixel crossing or null if cannot be found
737 * @since 2.1
738 */
739 private SensorPixel findSensorPixelWithAtmosphere(final GeodeticPoint point,
740 final LineSensor sensor, final int minLine, final int maxLine) {
741
742 // TBN: there is no direct way to compute the inverse location.
743 // The method is based on an interpolation grid associated with the fixed point method
744
745 final String sensorName = sensor.getName();
746
747 // Compute a correction grid (at sensor level)
748 // ===========================================
749 // Need to be computed only once for a given sensor (with the same minLine and maxLine)
750 if (atmosphericRefraction.getBifPixel() == null || atmosphericRefraction.getBifLine() == null || // lazy evaluation
751 !atmosphericRefraction.isSameContext(sensorName, minLine, maxLine)) { // Must be recomputed if the context changed
752
753 // Definition of a regular grid (at sensor level)
754 atmosphericRefraction.configureCorrectionGrid(sensor, minLine, maxLine);
755
756 // Get the grid nodes
757 final int nbPixelGrid = atmosphericRefraction.getComputationParameters().getNbPixelGrid();
758 final int nbLineGrid = atmosphericRefraction.getComputationParameters().getNbLineGrid();
759 final double[] pixelGrid = atmosphericRefraction.getComputationParameters().getUgrid();
760 final double[] lineGrid = atmosphericRefraction.getComputationParameters().getVgrid();
761
762 // Computation, for the sensor grid, of the direct location WITH atmospheric refraction
763 // (full computation)
764 atmosphericRefraction.reactivateComputation();
765 final GeodeticPoint[][] geodeticGridWithAtmosphere = computeDirectLocOnGridWithAtmosphere(pixelGrid, lineGrid, sensor);
766 // pixelGrid and lineGrid are the nodes where the direct loc is computed WITH atmosphere
767
768 // Computation of the inverse location WITHOUT atmospheric refraction for the grid nodes
769 atmosphericRefraction.deactivateComputation();
770 final SensorPixel[][] sensorPixelGridInverseWithout = computeInverseLocOnGridWithoutAtmosphere(geodeticGridWithAtmosphere,
771 nbPixelGrid, nbLineGrid, sensor, minLine, maxLine);
772 atmosphericRefraction.reactivateComputation();
773
774 // Compute the grid correction functions (for pixel and line)
775 atmosphericRefraction.computeGridCorrectionFunctions(sensorPixelGridInverseWithout);
776 }
777
778 // Fixed point method
779 // ==================
780 // Initialization
781 // --------------
782 // Deactivate the dump because no need to keep intermediate computations of inverse loc (can be regenerate)
783 final Boolean wasSuspended = DumpManager.suspend();
784
785 // compute the sensor pixel on the desired ground point WITHOUT atmosphere
786 atmosphericRefraction.deactivateComputation();
787 final SensorPixel sp0 = inverseLocation(sensorName, point, minLine, maxLine);
788 atmosphericRefraction.reactivateComputation();
789 // Reactivate the dump
790 DumpManager.resume(wasSuspended);
791
792 if (sp0 == null) {
793 // In order for the dump to end nicely
794 DumpManager.endNicely();
795
796 // Impossible to find the sensor pixel in the given range lines (without atmosphere)
797 throw new RuggedException(RuggedMessages.SENSOR_PIXEL_NOT_FOUND_IN_RANGE_LINES, minLine, maxLine);
798 }
799
800 // set up the starting point of the fixed point method
801 final double pixel0 = sp0.getPixelNumber();
802 final double line0 = sp0.getLineNumber();
803 // Needed data for the dump
804 sensor.dumpRate(line0);
805
806 // Apply fixed point method until convergence in pixel and line
807 // ------------------------------------------------------------
808 // compute the first (pixel, line) value:
809 // initial sensor pixel value + correction due to atmosphere at this same sensor pixel
810 double corrPixelPrevious = pixel0 + atmosphericRefraction.getBifPixel().value(pixel0, line0);
811 double corrLinePrevious = line0 + atmosphericRefraction.getBifLine().value(pixel0, line0);
812
813 double deltaCorrPixel = Double.POSITIVE_INFINITY;
814 double deltaCorrLine = Double.POSITIVE_INFINITY;
815
816 while (deltaCorrPixel > PIXEL_CV_THRESHOLD && deltaCorrLine > LINE_CV_THRESHOLD) {
817 // Compute the current (pixel, line) value =
818 // initial sensor pixel value + correction due to atmosphere on the previous sensor pixel
819 final double corrPixelCurrent = pixel0 + atmosphericRefraction.getBifPixel().value(corrPixelPrevious, corrLinePrevious);
820 final double corrLineCurrent = line0 + atmosphericRefraction.getBifLine().value(corrPixelPrevious, corrLinePrevious);
821
822 // Compute the delta in pixel and line to check the convergence
823 deltaCorrPixel = FastMath.abs(corrPixelCurrent - corrPixelPrevious);
824 deltaCorrLine = FastMath.abs(corrLineCurrent - corrLinePrevious);
825
826 // Store the (pixel, line) for next loop
827 corrPixelPrevious = corrPixelCurrent;
828 corrLinePrevious = corrLineCurrent;
829 }
830 // The sensor pixel is found !
831 final SensorPixel sensorPixelWithAtmosphere = new SensorPixel(corrLinePrevious, corrPixelPrevious);
832
833 // Dump the found sensorPixel
834 DumpManager.dumpInverseLocationResult(sensorPixelWithAtmosphere);
835
836 return sensorPixelWithAtmosphere;
837 }
838
839 /** Compute the inverse location WITHOUT atmospheric refraction for the geodetic points
840 * associated to the sensor grid nodes.
841 * @param groundGridWithAtmosphere ground grid found for sensor grid nodes with atmosphere
842 * @param nbPixelGrid size of the pixel grid
843 * @param nbLineGrid size of the line grid
844 * @param sensor the line sensor
845 * @param minLine minimum line number where the search will be performed
846 * @param maxLine maximum line number where the search will be performed
847 * @return the sensor pixel grid computed without atmosphere
848 * @since 2.1
849 */
850 private SensorPixel[][] computeInverseLocOnGridWithoutAtmosphere(final GeodeticPoint[][] groundGridWithAtmosphere,
851 final int nbPixelGrid, final int nbLineGrid,
852 final LineSensor sensor, final int minLine, final int maxLine) {
853
854 // Deactivate the dump because no need to keep intermediate computations of inverse loc (can be regenerate)
855 final Boolean wasSuspended = DumpManager.suspend();
856
857 final SensorPixel[][] sensorPixelGrid = new SensorPixel[nbPixelGrid][nbLineGrid];
858 final String sensorName = sensor.getName();
859
860 for (int uIndex = 0; uIndex < nbPixelGrid; uIndex++) {
861 for (int vIndex = 0; vIndex < nbLineGrid; vIndex++) {
862
863 // Check if the geodetic point exists
864 if (groundGridWithAtmosphere[uIndex][vIndex] != null) {
865 final GeodeticPoint groundPoint = groundGridWithAtmosphere[uIndex][vIndex];
866 final double currentLat = groundPoint.getLatitude();
867 final double currentLon = groundPoint.getLongitude();
868
869 try {
870 // Compute the inverse location for the current node
871 sensorPixelGrid[uIndex][vIndex] = inverseLocation(sensorName, currentLat, currentLon, minLine, maxLine);
872
873 } catch (RuggedException re) { // This should never happen
874 // In order for the dump to end nicely
875 DumpManager.endNicely();
876 throw new RuggedInternalError(re);
877 }
878
879 // Check if the pixel is inside the sensor (with a margin) OR if the inverse location was impossible (null result)
880 if (!pixelIsInside(sensorPixelGrid[uIndex][vIndex], sensor)) {
881
882 // In order for the dump to end nicely
883 DumpManager.endNicely();
884
885 if (sensorPixelGrid[uIndex][vIndex] == null) {
886 // Impossible to find the sensor pixel in the given range lines
887 throw new RuggedException(RuggedMessages.SENSOR_PIXEL_NOT_FOUND_IN_RANGE_LINES, minLine, maxLine);
888 } else {
889 // Impossible to find the sensor pixel
890 final double invLocationMargin = atmosphericRefraction.getComputationParameters().getInverseLocMargin();
891 throw new RuggedException(RuggedMessages.SENSOR_PIXEL_NOT_FOUND_IN_PIXELS_LINE, sensorPixelGrid[uIndex][vIndex].getPixelNumber(),
892 -invLocationMargin, invLocationMargin + sensor.getNbPixels() - 1, invLocationMargin);
893 }
894 }
895
896 } else { // groundGrid[uIndex][vIndex] == null: impossible to compute inverse loc because ground point not defined
897
898 sensorPixelGrid[uIndex][vIndex] = null;
899
900 } // groundGrid[uIndex][vIndex] != null
901 } // end loop vIndex
902 } // end loop uIndex
903
904 // Reactivate the dump
905 DumpManager.resume(wasSuspended);
906
907 // The sensor grid computed WITHOUT atmospheric refraction correction
908 return sensorPixelGrid;
909 }
910
911 /** Check if pixel is inside the sensor with a margin.
912 * @param pixel pixel to check (may be null if not found)
913 * @param sensor the line sensor
914 * @return true if the pixel is inside the sensor
915 */
916 private boolean pixelIsInside(final SensorPixel pixel, final LineSensor sensor) {
917 // Get the inverse location margin
918 final double invLocationMargin = atmosphericRefraction.getComputationParameters().getInverseLocMargin();
919
920 return pixel != null && pixel.getPixelNumber() >= -invLocationMargin && pixel.getPixelNumber() < invLocationMargin + sensor.getNbPixels() - 1;
921 }
922
923 /** Computation, for the sensor pixels grid, of the direct location WITH atmospheric refraction.
924 * (full computation)
925 * @param pixelGrid the pixel grid
926 * @param lineGrid the line grid
927 * @param sensor the line sensor
928 * @return the ground grid computed with atmosphere
929 * @since 2.1
930 */
931 private GeodeticPoint[][] computeDirectLocOnGridWithAtmosphere(final double[] pixelGrid, final double[] lineGrid,
932 final LineSensor sensor) {
933
934 // Deactivate the dump because no need to keep intermediate computations of direct loc (can be regenerate)
935 final Boolean wasSuspended = DumpManager.suspend();
936
937 final int nbPixelGrid = pixelGrid.length;
938 final int nbLineGrid = lineGrid.length;
939 final GeodeticPoint[][] groundGridWithAtmosphere = new GeodeticPoint[nbPixelGrid][nbLineGrid];
940 final Vector3D sensorPosition = sensor.getPosition();
941
942 for (int uIndex = 0; uIndex < nbPixelGrid; uIndex++) {
943 final double pixelNumber = pixelGrid[uIndex];
944 for (int vIndex = 0; vIndex < nbLineGrid; vIndex++) {
945 final double lineNumber = lineGrid[vIndex];
946 final AbsoluteDate date = sensor.getDate(lineNumber);
947 final Vector3D los = sensor.getLOS(date, pixelNumber);
948 try {
949 // Compute the direct location for the current node
950 groundGridWithAtmosphere[uIndex][vIndex] = directLocation(date, sensorPosition, los);
951
952 } catch (RuggedException re) { // This should never happen
953 // In order for the dump to end nicely
954 DumpManager.endNicely();
955 throw new RuggedInternalError(re);
956 }
957 } // end loop vIndex
958 } // end loop uIndex
959
960 // Reactivate the dump
961 DumpManager.resume(wasSuspended);
962
963 // The ground grid computed WITH atmospheric refraction correction
964 return groundGridWithAtmosphere;
965 }
966
967 /** Compute distances between two line sensors.
968 * @param sensorA line sensor A
969 * @param dateA current date for sensor A
970 * @param pixelA pixel index for sensor A
971 * @param scToBodyA spacecraft to body transform for sensor A
972 * @param sensorB line sensor B
973 * @param dateB current date for sensor B
974 * @param pixelB pixel index for sensor B
975 * @return distances computed between LOS and to the ground
976 * @since 2.0
977 */
978 public double[] distanceBetweenLOS(final LineSensor sensorA, final AbsoluteDate dateA, final double pixelA,
979 final SpacecraftToObservedBody scToBodyA,
980 final LineSensor sensorB, final AbsoluteDate dateB, final double pixelB) {
981
982 // Compute the approximate transform between spacecraft and observed body
983 // from Rugged instance A
984 final Transform scToInertA = scToBodyA.getScToInertial(dateA);
985 final Transform inertToBodyA = scToBodyA.getInertialToBody(dateA);
986 final Transform transformScToBodyA = new Transform(dateA, scToInertA, inertToBodyA);
987
988 // from (current) Rugged instance B
989 final Transform scToInertB = scToBody.getScToInertial(dateB);
990 final Transform inertToBodyB = scToBody.getInertialToBody(dateB);
991 final Transform transformScToBodyB = new Transform(dateB, scToInertB, inertToBodyB);
992
993 // Get sensors LOS into local frame
994 final Vector3D vALocal = sensorA.getLOS(dateA, pixelA);
995 final Vector3D vBLocal = sensorB.getLOS(dateB, pixelB);
996
997 // Position of sensors into local frame
998 final Vector3D sALocal = sensorA.getPosition(); // S_a : sensorA 's position
999 final Vector3D sBLocal = sensorB.getPosition(); // S_b : sensorB 's position
1000
1001 // Get sensors position and LOS into body frame
1002 final Vector3D sA = transformScToBodyA.transformPosition(sALocal); // S_a : sensorA's position
1003 final Vector3D vA = transformScToBodyA.transformVector(vALocal); // V_a : line of sight's vectorA
1004 final Vector3D sB = transformScToBodyB.transformPosition(sBLocal); // S_b : sensorB's position
1005 final Vector3D vB = transformScToBodyB.transformVector(vBLocal); // V_b : line of sight's vectorB
1006
1007 // Compute distance
1008 final Vector3D vBase = sB.subtract(sA); // S_b - S_a
1009 final double svA = Vector3D.dotProduct(vBase, vA); // SV_a = (S_b - S_a).V_a
1010 final double svB = Vector3D.dotProduct(vBase, vB); // SV_b = (S_b - S_a).V_b
1011
1012 final double vAvB = Vector3D.dotProduct(vA, vB); // V_a.V_b
1013
1014 // Compute lambda_b = (SV_a * V_a.V_b - SV_b) / (1 - (V_a.V_b)²)
1015 final double lambdaB = (svA * vAvB - svB) / (1 - vAvB * vAvB);
1016
1017 // Compute lambda_a = SV_a + lambdaB * V_a.V_b
1018 final double lambdaA = svA + lambdaB * vAvB;
1019
1020 // Compute vector M_a = S_a + lambda_a * V_a
1021 final Vector3D mA = sA.add(vA.scalarMultiply(lambdaA));
1022 // Compute vector M_b = S_b + lambda_b * V_b
1023 final Vector3D mB = sB.add(vB.scalarMultiply(lambdaB));
1024
1025 // Compute vector M_a -> M_B for which distance between LOS is minimum
1026 final Vector3D vDistanceMin = mB.subtract(mA); // M_b - M_a
1027
1028 // Compute vector from mid point of vector M_a -> M_B to the ground (corresponds to minimum elevation)
1029 final Vector3D midPoint = (mB.add(mA)).scalarMultiply(0.5);
1030
1031 // Get the euclidean norms to compute the minimum distances: between LOS and to the ground
1032 final double[] distances = {vDistanceMin.getNorm(), midPoint.getNorm()};
1033
1034 return distances;
1035 }
1036
1037 /** Compute distances between two line sensors with derivatives.
1038 * @param <T> derivative type
1039 * @param sensorA line sensor A
1040 * @param dateA current date for sensor A
1041 * @param pixelA pixel index for sensor A
1042 * @param scToBodyA spacecraftToBody transform for sensor A
1043 * @param sensorB line sensor B
1044 * @param dateB current date for sensor B
1045 * @param pixelB pixel index for sensor B
1046 * @param generator generator to use for building {@link DerivativeStructure} instances
1047 * @return distances computed, with derivatives, between LOS and to the ground
1048 * @see #distanceBetweenLOS(LineSensor, AbsoluteDate, double, SpacecraftToObservedBody, LineSensor, AbsoluteDate, double)
1049 */
1050 public <T extends Derivative<T>> T[] distanceBetweenLOSderivatives(
1051 final LineSensor sensorA, final AbsoluteDate dateA, final double pixelA,
1052 final SpacecraftToObservedBody scToBodyA,
1053 final LineSensor sensorB, final AbsoluteDate dateB, final double pixelB,
1054 final DerivativeGenerator<T> generator) {
1055
1056 // Compute the approximate transforms between spacecraft and observed body
1057 // from Rugged instance A
1058 final Transform scToInertA = scToBodyA.getScToInertial(dateA);
1059 final Transform inertToBodyA = scToBodyA.getInertialToBody(dateA);
1060 final Transform transformScToBodyA = new Transform(dateA, scToInertA, inertToBodyA);
1061
1062 // from (current) Rugged instance B
1063 final Transform scToInertB = scToBody.getScToInertial(dateB);
1064 final Transform inertToBodyB = scToBody.getInertialToBody(dateB);
1065 final Transform transformScToBodyB = new Transform(dateB, scToInertB, inertToBodyB);
1066
1067 // Get sensors LOS into local frame
1068 final FieldVector3D<T> vALocal = sensorA.getLOSDerivatives(dateA, pixelA, generator);
1069 final FieldVector3D<T> vBLocal = sensorB.getLOSDerivatives(dateB, pixelB, generator);
1070
1071 // Get sensors LOS into body frame
1072 final FieldVector3D<T> vA = transformScToBodyA.transformVector(vALocal); // V_a : line of sight's vectorA
1073 final FieldVector3D<T> vB = transformScToBodyB.transformVector(vBLocal); // V_b : line of sight's vectorB
1074
1075 // Position of sensors into local frame
1076 final Vector3D sAtmp = sensorA.getPosition();
1077 final Vector3D sBtmp = sensorB.getPosition();
1078
1079 final T scaleFactor = FieldVector3D.dotProduct(vA.normalize(), vA.normalize()); // V_a.V_a=1
1080
1081 // Build a vector from the position and a scale factor (equals to 1).
1082 // The vector built will be scaleFactor * sAtmp for example.
1083 final FieldVector3D<T> sALocal = new FieldVector3D<>(scaleFactor, sAtmp);
1084 final FieldVector3D<T> sBLocal = new FieldVector3D<>(scaleFactor, sBtmp);
1085
1086 // Get sensors position into body frame
1087 final FieldVector3D<T> sA = transformScToBodyA.transformPosition(sALocal); // S_a : sensorA 's position
1088 final FieldVector3D<T> sB = transformScToBodyB.transformPosition(sBLocal); // S_b : sensorB 's position
1089
1090 // Compute distance
1091 final FieldVector3D<T> vBase = sB.subtract(sA); // S_b - S_a
1092 final T svA = FieldVector3D.dotProduct(vBase, vA); // SV_a = (S_b - S_a).V_a
1093 final T svB = FieldVector3D.dotProduct(vBase, vB); // SV_b = (S_b - S_a).V_b
1094
1095 final T vAvB = FieldVector3D.dotProduct(vA, vB); // V_a.V_b
1096
1097 // Compute lambda_b = (SV_a * V_a.V_b - SV_b) / (1 - (V_a.V_b)²)
1098 final T lambdaB = (svA.multiply(vAvB).subtract(svB)).divide(vAvB.multiply(vAvB).subtract(1).negate());
1099
1100 // Compute lambda_a = SV_a + lambdaB * V_a.V_b
1101 final T lambdaA = vAvB.multiply(lambdaB).add(svA);
1102
1103 // Compute vector M_a:
1104 final FieldVector3D<T> mA = sA.add(vA.scalarMultiply(lambdaA)); // M_a = S_a + lambda_a * V_a
1105 // Compute vector M_b
1106 final FieldVector3D<T> mB = sB.add(vB.scalarMultiply(lambdaB)); // M_b = S_b + lambda_b * V_b
1107
1108 // Compute vector M_a -> M_B for which distance between LOS is minimum
1109 final FieldVector3D<T> vDistanceMin = mB.subtract(mA); // M_b - M_a
1110
1111 // Compute vector from mid point of vector M_a -> M_B to the ground (corresponds to minimum elevation)
1112 final FieldVector3D<T> midPoint = (mB.add(mA)).scalarMultiply(0.5);
1113
1114 // Get the euclidean norms to compute the minimum distances:
1115 // between LOS
1116 final T dMin = vDistanceMin.getNorm();
1117 // to the ground
1118 final T dCentralBody = midPoint.getNorm();
1119
1120 final T[] ret = MathArrays.buildArray(dMin.getField(), 2);
1121 ret[0] = dMin;
1122 ret[1] = dCentralBody;
1123 return ret;
1124
1125 }
1126
1127
1128 /** Get the mean plane crossing finder for a sensor.
1129 * @param sensorName name of the line sensor
1130 * @param minLine minimum line number
1131 * @param maxLine maximum line number
1132 * @return mean plane crossing finder
1133 */
1134 private SensorMeanPlaneCrossing getPlaneCrossing(final String sensorName,
1135 final int minLine, final int maxLine) {
1136
1137 final LineSensor sensor = getLineSensor(sensorName);
1138 SensorMeanPlaneCrossing planeCrossing = finders.get(sensorName);
1139 if (planeCrossing == null ||
1140 planeCrossing.getMinLine() != minLine ||
1141 planeCrossing.getMaxLine() != maxLine) {
1142
1143 // create a new finder for the specified sensor and range
1144 planeCrossing = new SensorMeanPlaneCrossing(sensor, scToBody, minLine, maxLine,
1145 lightTimeCorrection, aberrationOfLightCorrection,
1146 MAX_EVAL, COARSE_INVERSE_LOCATION_ACCURACY);
1147
1148 // store the finder, in order to reuse it
1149 // (and save some computation done in its constructor)
1150 setPlaneCrossing(planeCrossing);
1151
1152 }
1153
1154 return planeCrossing;
1155 }
1156
1157 /** Set the mean plane crossing finder for a sensor.
1158 * @param planeCrossing plane crossing finder
1159 */
1160 private void setPlaneCrossing(final SensorMeanPlaneCrossing planeCrossing) {
1161 finders.put(planeCrossing.getSensor().getName(), planeCrossing);
1162 }
1163
1164 /** Inverse location of a point with derivatives.
1165 * @param <T> derivative type
1166 * @param sensorName name of the line sensor
1167 * @param point point to localize
1168 * @param minLine minimum line number
1169 * @param maxLine maximum line number
1170 * @param generator generator to use for building {@link Derivative} instances
1171 * @return sensor pixel seeing point with derivatives, or null if point cannot be seen between the
1172 * prescribed line numbers
1173 * @see #inverseLocation(String, GeodeticPoint, int, int)
1174 * @since 2.0
1175 */
1176 public <T extends Derivative<T>> T[] inverseLocationDerivatives(final String sensorName,
1177 final GeodeticPoint point,
1178 final int minLine,
1179 final int maxLine,
1180 final DerivativeGenerator<T> generator) {
1181
1182 final LineSensor sensor = getLineSensor(sensorName);
1183
1184 final SensorMeanPlaneCrossing planeCrossing = getPlaneCrossing(sensorName, minLine, maxLine);
1185
1186 // find approximately the sensor line at which ground point crosses sensor mean plane
1187 final Vector3D target = ellipsoid.transform(point);
1188 final SensorMeanPlaneCrossing.CrossingResult crossingResult = planeCrossing.find(target);
1189 if (crossingResult == null) {
1190 // target is out of search interval
1191 return null;
1192 }
1193
1194 // find approximately the pixel along this sensor line
1195 final SensorPixelCrossing pixelCrossing =
1196 new SensorPixelCrossing(sensor, planeCrossing.getMeanPlaneNormal(),
1197 crossingResult.getTargetDirection(),
1198 MAX_EVAL, COARSE_INVERSE_LOCATION_ACCURACY);
1199 final double coarsePixel = pixelCrossing.locatePixel(crossingResult.getDate());
1200 if (Double.isNaN(coarsePixel)) {
1201 // target is out of search interval
1202 return null;
1203 }
1204
1205 // fix line by considering the closest pixel exact position and line-of-sight
1206 // (this pixel might point towards a direction slightly above or below the mean sensor plane)
1207 final int lowIndex = FastMath.max(0, FastMath.min(sensor.getNbPixels() - 2, (int) FastMath.floor(coarsePixel)));
1208 final FieldVector3D<T> lowLOS =
1209 sensor.getLOSDerivatives(crossingResult.getDate(), lowIndex, generator);
1210 final FieldVector3D<T> highLOS = sensor.getLOSDerivatives(crossingResult.getDate(), lowIndex + 1, generator);
1211 final FieldVector3D<T> localZ = FieldVector3D.crossProduct(lowLOS, highLOS).normalize();
1212 final T beta = FieldVector3D.dotProduct(crossingResult.getTargetDirection(), localZ).acos();
1213 final T s = FieldVector3D.dotProduct(crossingResult.getTargetDirectionDerivative(), localZ);
1214 final T minusBetaDer = s.divide(s.multiply(s).subtract(1).negate().sqrt());
1215 final T deltaL = beta.subtract(0.5 * FastMath.PI) .divide(minusBetaDer);
1216 final T fixedLine = deltaL.add(crossingResult.getLine());
1217 final FieldVector3D<T> fixedDirection =
1218 new FieldVector3D<>(deltaL.getField().getOne(), crossingResult.getTargetDirection(),
1219 deltaL, crossingResult.getTargetDirectionDerivative()).normalize();
1220
1221 // fix neighbouring pixels
1222 final AbsoluteDate fixedDate = sensor.getDate(fixedLine.getValue());
1223 final FieldVector3D<T> fixedX = sensor.getLOSDerivatives(fixedDate, lowIndex, generator);
1224 final FieldVector3D<T> fixedZ = FieldVector3D.crossProduct(fixedX, sensor.getLOSDerivatives(fixedDate, lowIndex + 1, generator));
1225 final FieldVector3D<T> fixedY = FieldVector3D.crossProduct(fixedZ, fixedX);
1226
1227 // fix pixel
1228 final T hY = FieldVector3D.dotProduct(highLOS, fixedY);
1229 final T hX = FieldVector3D.dotProduct(highLOS, fixedX);
1230 final T pixelWidth = hY.atan2(hX);
1231 final T fY = FieldVector3D.dotProduct(fixedDirection, fixedY);
1232 final T fX = FieldVector3D.dotProduct(fixedDirection, fixedX);
1233 final T alpha = fY.atan2(fX);
1234 final T fixedPixel = alpha.divide(pixelWidth).add(lowIndex);
1235
1236 final T[] ret = MathArrays.buildArray(fixedPixel.getField(), 2);
1237 ret[0] = fixedLine;
1238 ret[1] = fixedPixel;
1239 return ret;
1240
1241 }
1242
1243 /** Get transform from spacecraft to inertial frame.
1244 * @param date date of the transform
1245 * @return transform from spacecraft to inertial frame
1246 */
1247 public Transform getScToInertial(final AbsoluteDate date) {
1248 return scToBody.getScToInertial(date);
1249 }
1250
1251 /** Get transform from inertial frame to observed body frame.
1252 * @param date date of the transform
1253 * @return transform from inertial frame to observed body frame
1254 */
1255 public Transform getInertialToBody(final AbsoluteDate date) {
1256 return scToBody.getInertialToBody(date);
1257 }
1258
1259 /** Get transform from observed body frame to inertial frame.
1260 * @param date date of the transform
1261 * @return transform from observed body frame to inertial frame
1262 */
1263 public Transform getBodyToInertial(final AbsoluteDate date) {
1264 return scToBody.getBodyToInertial(date);
1265 }
1266
1267 /** Get a sensor.
1268 * @param sensorName sensor name
1269 * @return selected sensor
1270 */
1271 public LineSensor getLineSensor(final String sensorName) {
1272
1273 final LineSensor sensor = sensors.get(sensorName);
1274 if (sensor == null) {
1275 throw new RuggedException(RuggedMessages.UNKNOWN_SENSOR, sensorName);
1276 }
1277 return sensor;
1278 }
1279
1280 /** Get converter between spacecraft and body.
1281 * @return the scToBody
1282 * @since 2.0
1283 */
1284 public SpacecraftToObservedBody getScToBody() {
1285 return scToBody;
1286 }
1287 }