77
88import ch .rasc .sse .eventbus .SseEvent ;
99import ch .rasc .sse .eventbus .SseEventBus ;
10- import java .io .File ;
11- import java .io .IOException ;
12- import java .lang .invoke .MethodHandles ;
13- import java .nio .charset .StandardCharsets ;
14- import java .nio .file .Files ;
15- import java .nio .file .Path ;
16- import java .time .Instant ;
17- import java .util .ArrayList ;
18- import java .util .Comparator ;
19- import java .util .List ;
20- import java .util .Map ;
21- import java .util .Objects ;
22- import java .util .Set ;
23- import java .util .concurrent .TimeUnit ;
24- import java .util .concurrent .atomic .AtomicInteger ;
25- import java .util .stream .Stream ;
26- import java .util .zip .ZipEntry ;
27- import java .util .zip .ZipOutputStream ;
10+ import jakarta .annotation .PostConstruct ;
2811import org .apache .commons .lang3 .StringUtils ;
2912import org .geotools .api .data .FeatureEvent ;
3013import org .geotools .api .data .FileDataStore ;
5639import org .springframework .scheduling .annotation .Scheduled ;
5740import org .springframework .stereotype .Service ;
5841import org .springframework .transaction .annotation .Transactional ;
59- import org .springframework .web .server .ResponseStatusException ;
6042import org .tailormap .api .controller .LayerExtractController ;
6143import org .tailormap .api .geotools .collection .ProgressReportingFeatureCollection ;
6244import org .tailormap .api .geotools .data .excel .ExcelDataStore ;
6850import tools .jackson .databind .SerializationFeature ;
6951import tools .jackson .databind .json .JsonMapper ;
7052
53+ import java .io .File ;
54+ import java .io .IOException ;
55+ import java .io .UncheckedIOException ;
56+ import java .lang .invoke .MethodHandles ;
57+ import java .nio .charset .StandardCharsets ;
58+ import java .nio .file .Files ;
59+ import java .nio .file .Path ;
60+ import java .time .Instant ;
61+ import java .util .ArrayList ;
62+ import java .util .Comparator ;
63+ import java .util .List ;
64+ import java .util .Map ;
65+ import java .util .Objects ;
66+ import java .util .Set ;
67+ import java .util .concurrent .TimeUnit ;
68+ import java .util .concurrent .atomic .AtomicInteger ;
69+ import java .util .stream .Stream ;
70+ import java .util .zip .ZipEntry ;
71+ import java .util .zip .ZipOutputStream ;
72+
7173@ Service
7274public class CreateLayerExtractService {
7375 private static final Logger logger =
@@ -77,9 +79,13 @@ public class CreateLayerExtractService {
7779 private final FeatureSourceFactoryHelper featureSourceFactoryHelper ;
7880 private final FilterFactory ff = CommonFactoryFinder .getFilterFactory (GeoTools .getDefaultHints ());
7981
82+ private static final String EXTRACT_SUBDIRECTORY = "tm-extracts" ;
8083 // we can safely use the tmp dir as a default here because we are running in a docker container without a shell so
8184 // access is limited
85+ // Base directory from config; actual export dir is <base>/tm-extracts
8286 @ Value ("${tailormap-api.extract.location:#{systemProperties['java.io.tmpdir']}}" )
87+ private String exportFilesBaseLocation ;
88+
8389 private String exportFilesLocation ;
8490
8591 @ Value ("${tailormap-api.extract.cleanup-minutes:120}" )
@@ -91,6 +97,19 @@ public class CreateLayerExtractService {
9197 @ Value ("${tailormap-api.features.wfs_count_exact:false}" )
9298 private boolean exactWfsCounts ;
9399
100+ @ PostConstruct
101+ void initializeExtractDirectory () {
102+ try {
103+ Path exportRoot = Path .of (exportFilesBaseLocation , EXTRACT_SUBDIRECTORY );
104+ Files .createDirectories (exportRoot );
105+ this .exportFilesLocation = exportRoot .toRealPath ().toString ();
106+ logger .info ("Using extract output directory: {}" , this .exportFilesLocation );
107+ } catch (IOException e ) {
108+ throw new UncheckedIOException (
109+ "Failed to initialize extract directory under base path: " + exportFilesBaseLocation , e );
110+ }
111+ }
112+
94113 public CreateLayerExtractService (
95114 @ Qualifier ("viewerSseEventBus" ) SseEventBus eventBus ,
96115 JsonMapper jsonMapper ,
@@ -249,13 +268,12 @@ private void handleGeoPackage(
249268 @ NonNull String outputFileName ) {
250269
251270 SimpleFeatureSource inputFeatureSource = null ;
252- File outputFile = null ;
271+ File outputFile ;
253272 try {
254273 outputFile = getValidatedOutputFile (outputFileName );
255274 if (!logger .isDebugEnabled ()) {
256275 // delete in production after JVM exit because the event bus will be reset when the JVM exits, and then
257- // we
258- // are unlikely to have a reference to the file anymore.
276+ // we are unlikely to have a reference to the file anymore.
259277 // In debug/development mode we want to keep the file for inspection.
260278 outputFile .deleteOnExit ();
261279 }
@@ -347,10 +365,15 @@ private void handleSingleFileFormats(
347365 clientId ,
348366 "Extract result contains %d features, which exceeds the maximum of %d for Excel output format. Please refine your filter or choose a different output format."
349367 .formatted (featCount , ExcelDataStore .getMaxRows ()));
350- throw new ResponseStatusException (
351- org .springframework .http .HttpStatus .BAD_REQUEST ,
352- "Extract result contains %d features, which exceeds the maximum of %d for Excel output format. Please refine your filter or choose a different output format."
353- .formatted (featCount , ExcelDataStore .getMaxRows ()));
368+ logger .error (
369+ "Extract result contains {} features, which exceeds the maximum of {} for Excel output format. Please refine your filter or choose a different output format." ,
370+ featCount ,
371+ ExcelDataStore .getMaxRows ());
372+ // nothing we can do now as we are in a background/async process, so we just return without creating an
373+ // extract file.
374+ // The client will receive no extract completed event, and we have already emitted an error message with
375+ // details.
376+ return ;
354377 }
355378
356379 outputDataStore = this .getExtractDataStore (
@@ -390,7 +413,7 @@ private void handleSingleFileFormats(
390413 this .emitError (clientId , "Output datastore is not a SimpleFeatureStore, cannot write features" );
391414 logger .error ("Output datastore is not a SimpleFeatureStore, cannot write features" );
392415 }
393- } catch (IOException | SchemaException | IllegalArgumentException e ) {
416+ } catch (IOException | SchemaException | IllegalArgumentException | NullPointerException e ) {
394417 emitError (clientId , e .getMessage ());
395418 logger .error ("Creating extract failed" , e );
396419 } finally {
@@ -527,10 +550,9 @@ private void handleWithShapeDumper(
527550 .resolve (baseName )
528551 .toFile ()
529552 .getCanonicalFile ();
530- if (logger .isDebugEnabled ()) {
553+ if (! logger .isDebugEnabled ()) {
531554 // delete in production after JVM exit because the event bus will be reset when the JVM exits, and then
532- // we
533- // are unlikely to have a reference to the file anymore.
555+ // we are unlikely to have a reference to the file anymore.
534556 // In debug/development mode we want to keep the directory for inspection.
535557 outputDirectory .deleteOnExit ();
536558 }
@@ -616,7 +638,7 @@ private Query createQuery(
616638 @ Scheduled (fixedDelay = 5 , timeUnit = TimeUnit .MINUTES , initialDelay = 15 )
617639 public void cleanupExpiredExtracts () {
618640 logger .debug ("Running expired extracts cleanup..." );
619- List <FileWithAttributes > clientFilesOnDisk = new ArrayList <>();
641+ List <FileWithAttributes > oldDownloadFilesOnDisk = new ArrayList <>();
620642 Set <String > validClientIds = eventBus .getAllClientIds ();
621643
622644 // list download files in export location and delete those that are not bound to an active sse stream client
@@ -635,8 +657,12 @@ public void cleanupExpiredExtracts() {
635657 logger .error ("Failed to delete unattached extract file {}" , filename );
636658 }
637659 } else {
638- Instant timestampPart = UUIDv7 .timestampAsInstant (UUIDv7 .fromString (parts [2 ]));
639- clientFilesOnDisk .add (new FileWithAttributes (file , timestampPart , clientId ));
660+ try {
661+ Instant timestampPart = UUIDv7 .timestampAsInstant (UUIDv7 .fromString (parts [2 ]));
662+ oldDownloadFilesOnDisk .add (new FileWithAttributes (file , timestampPart , clientId ));
663+ } catch (IllegalArgumentException ignored ) {
664+ // not a valid v7 uuid
665+ }
640666 }
641667 });
642668
@@ -651,25 +677,39 @@ public void cleanupExpiredExtracts() {
651677 }
652678 String clientId = parts [1 ];
653679 if (!validClientIds .contains (clientId )) {
654- if (!file .delete ()) {
655- logger .error ("Failed to delete unattached extract file {}" , filename );
680+ try {
681+ deleteDirectoryRecursively (file .toPath ());
682+ } catch (IOException e ) {
683+ logger .error ("Failed to delete unattached extract directory {}" , filename );
656684 }
657685 } else {
658- Instant timestampPart = UUIDv7 .timestampAsInstant (UUIDv7 .fromString (parts [2 ]));
659- clientFilesOnDisk .add (new FileWithAttributes (file , timestampPart , clientId ));
686+ try {
687+ Instant timestampPart = UUIDv7 .timestampAsInstant (UUIDv7 .fromString (parts [2 ]));
688+ oldDownloadFilesOnDisk .add (new FileWithAttributes (file , timestampPart , clientId ));
689+ } catch (IllegalArgumentException ignored ) {
690+ // not a valid v7 uuid
691+ }
660692 }
661693 });
662694 }
663695
664- // delete any files are older than the cutoff
665- clientFilesOnDisk .stream ()
696+ // delete any files/directories are older than the cutoff
697+ oldDownloadFilesOnDisk .stream ()
666698 .filter (f -> f .timestamp ()
667699 .isBefore (Instant .now ().minusSeconds (TimeUnit .MINUTES .toSeconds (cleanupIntervalMinutes ))))
668700 .forEach (f -> {
669- if (!f .file ().delete ()) {
670- logger .error (
671- "Failed to delete expired extract file {}" ,
672- f .file ().getName ());
701+ if (f .file .isDirectory ()) {
702+ try {
703+ deleteDirectoryRecursively (f .file ().toPath ());
704+ } catch (IOException ignored ) {
705+ logger .warn ("Failed to delete directory {}" , f .file ());
706+ }
707+ } else {
708+ if (!f .file ().delete ()) {
709+ logger .error (
710+ "Failed to delete expired extract file {}" ,
711+ f .file ().getName ());
712+ }
673713 }
674714 });
675715 } catch (IOException e ) {
0 commit comments