1 /* Copyright 2002-2025 CS GROUP
2 * Licensed to CS GROUP (CS) under one or more
3 * contributor license agreements. See the NOTICE file distributed with
4 * this work for additional information regarding copyright ownership.
5 * CS licenses this file to You under the Apache License, Version 2.0
6 * (the "License"); you may not use this file except in compliance with
7 * the License. You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17 package org.orekit.data;
18
19 import java.io.File;
20 import java.io.FileInputStream;
21 import java.io.IOException;
22 import java.io.InputStream;
23 import java.text.ParseException;
24 import java.util.Arrays;
25 import java.util.regex.Pattern;
26
27 import org.hipparchus.exception.DummyLocalizable;
28 import org.orekit.errors.OrekitException;
29 import org.orekit.errors.OrekitMessages;
30
31
32 /** Provider for data files stored in a directories tree on filesystem.
33 * <p>
34 * This class handles data files recursively starting from a root directories
35 * tree. The organization of files in the directories is free. There may be
36 * sub-directories to any level. All sub-directories are browsed and all terminal
37 * files are checked for loading.
38 * </p>
39 * <p>
40 * All {@link FiltersManager#addFilter(DataFilter) registered}
41 * {@link DataFilter filters} are applied.
42 * </p>
43 * <p>
44 * Zip archives entries are supported recursively.
45 * </p>
46 * <p>
47 * This is a simple application of the <code>visitor</code> design pattern for
48 * directory hierarchy crawling.
49 * </p>
50 * @see DataProvidersManager
51 * @author Luc Maisonobe
52 */
53 public class DirectoryCrawler implements DataProvider {
54
55 /** Root directory. */
56 private final File root;
57
58 /** Build a data files crawler.
59 * @param root root of the directories tree (must be a directory)
60 */
61 public DirectoryCrawler(final File root) {
62 if (!root.isDirectory()) {
63 throw new OrekitException(OrekitMessages.NOT_A_DIRECTORY, root.getAbsolutePath());
64 }
65 this.root = root;
66 }
67
68 /** {@inheritDoc} */
69 public boolean feed(final Pattern supported,
70 final DataLoader visitor,
71 final DataProvidersManager manager) {
72 try {
73 return feed(supported, visitor, manager, root);
74 } catch (IOException | ParseException ioe) {
75 throw new OrekitException(ioe, new DummyLocalizable(ioe.getMessage()));
76 }
77 }
78
79 /** Feed a data file loader by browsing a directory hierarchy.
80 * @param supported pattern for file names supported by the visitor
81 * @param visitor data file visitor to feed
82 * @param manager with the filters to apply.
83 * @param directory current directory
84 * @return true if something has been loaded
85 * @exception IOException if data cannot be read
86 * @exception ParseException if data cannot be read
87 */
88 private boolean feed(final Pattern supported,
89 final DataLoader visitor,
90 final DataProvidersManager manager,
91 final File directory)
92 throws IOException, ParseException {
93
94 // search in current directory
95 final File[] list = directory.listFiles();
96 if (list == null) {
97 // notify about race condition if directory is removed by another program
98 throw new OrekitException(OrekitMessages.NOT_A_DIRECTORY, directory.getAbsolutePath());
99 }
100 Arrays.sort(list, File::compareTo);
101
102 OrekitException delayedException = null;
103 boolean loaded = false;
104 for (final File file : list) {
105 try {
106 if (visitor.stillAcceptsData()) {
107 if (file.isDirectory()) {
108
109 // recurse in the sub-directory
110 loaded = feed(supported, visitor, manager, file) || loaded;
111
112 } else if (ZIP_ARCHIVE_PATTERN.matcher(file.getName()).matches()) {
113
114 // browse inside the zip/jar file
115 final DataProvider zipProvider = new ZipJarCrawler(file);
116 loaded = zipProvider.feed(supported, visitor, manager) || loaded;
117
118 } else {
119
120 // apply all registered filters
121 DataSource data = new DataSource(file.getName(), () -> new FileInputStream(file));
122 data = manager.getFiltersManager().applyRelevantFilters(data);
123
124 if (supported.matcher(data.getName()).matches()) {
125 // visit the current file
126 try (InputStream input = data.getOpener().openStreamOnce()) {
127 visitor.loadData(input, file.getPath());
128 loaded = true;
129 }
130 }
131
132 }
133 }
134 } catch (OrekitException oe) {
135 delayedException = oe;
136 }
137
138 }
139
140 if (!loaded && delayedException != null) {
141 throw delayedException;
142 }
143
144 return loaded;
145
146 }
147
148 }