diff --git a/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/config/JDBCExporter.java b/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/config/JDBCExporter.java index 76336ccdbe..03155d1829 100644 --- a/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/config/JDBCExporter.java +++ b/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/config/JDBCExporter.java @@ -23,6 +23,7 @@ import static com.here.xyz.events.ContextAwareEvent.SpaceContext.EXTENSION; import static com.here.xyz.events.ContextAwareEvent.SpaceContext.SUPER; import static com.here.xyz.httpconnector.util.jobs.Job.CSVFormat.PARTITIONID_FC_B64; +import static com.here.xyz.httpconnector.util.jobs.Job.CSVFormat.JSON_WKB; import com.here.xyz.events.ContextAwareEvent; import com.here.xyz.events.GetFeaturesByGeometryEvent; @@ -144,7 +145,7 @@ Is used for incremental exports (tiles) - here we have to export modified tiles. }); default: exportQuery = generateFilteredExportQuery(job.getId(), schema, job.getTargetSpaceId(), propertyFilter, spatialFilter, - job.getTargetVersion(), job.getParams(), job.getCsvFormat()); + job.getTargetVersion(), job.getParams(), job.getCsvFormat(), compositeCalculation); return calculateThreadCountForDownload(job, schema, exportQuery) .compose(threads -> { try { @@ -153,7 +154,7 @@ Is used for incremental exports (tiles) - here we have to export modified tiles. for (int i = 0; i < threads; i++) { String s3Prefix = i + "_"; - SQLQuery q2 = buildS3ExportQuery(job, schema, s3Bucket, s3Path, s3Prefix, s3Region, + SQLQuery q2 = buildS3ExportQuery(job, schema, s3Bucket, s3Path, s3Prefix, s3Region, compositeCalculation, (threads > 1 ? new SQLQuery("AND i%% " + threads + " = " + i) : null)); exportFutures.add(exportTypeDownload(job.getTargetConnector(), q2, job, s3Path)); } @@ -313,14 +314,14 @@ public static SQLQuery buildVMLCalculateQuery(Export job, String schema, SQLQuer public static SQLQuery buildS3ExportQuery(Export j, String schema, String s3Bucket, String s3Path, String s3FilePrefix, String s3Region, - SQLQuery customWhereCondition) throws SQLException { + boolean isForCompositeContentDetection, SQLQuery customWhereCondition) throws SQLException { String propertyFilter = (j.getFilters() == null ? null : j.getFilters().getPropertyFilter()); Export.SpatialFilter spatialFilter= (j.getFilters() == null ? null : j.getFilters().getSpatialFilter()); s3Path = s3Path+ "/" +(s3FilePrefix == null ? "" : s3FilePrefix)+"export.csv"; SQLQuery exportSelectString = generateFilteredExportQuery(j.getId(), schema, j.getTargetSpaceId(), propertyFilter, spatialFilter, - j.getTargetVersion(), j.getParams(), j.getCsvFormat(), customWhereCondition, false, + j.getTargetVersion(), j.getParams(), j.getCsvFormat(), customWhereCondition, isForCompositeContentDetection, j.getPartitionKey(), j.getOmitOnNull()); SQLQuery q = new SQLQuery("SELECT * /* s3_export_hint m499#jobId(" + j.getId() + ") */ from aws_s3.query_export_to_s3( "+ @@ -409,6 +410,12 @@ private static SQLQuery generateFilteredExportQuery(String jobId, String schema, return generateFilteredExportQuery(jobId, schema, spaceId, propertyFilter, spatialFilter, targetVersion, params, csvFormat, null, false, null, false); } + private static SQLQuery generateFilteredExportQuery(String jobId, String schema, String spaceId, String propertyFilter, + Export.SpatialFilter spatialFilter, String targetVersion, Map params, CSVFormat csvFormat, boolean isForCompositeContentDetection) throws SQLException { + return generateFilteredExportQuery(jobId, schema, spaceId, propertyFilter, spatialFilter, targetVersion, params, csvFormat, null, isForCompositeContentDetection, null, false); + } + + private static SQLQuery generateFilteredExportQueryForCompositeTileCalculation(String jobId, String schema, String spaceId, String propertyFilter, Export.SpatialFilter spatialFilter, String targetVersion, Map params, CSVFormat csvFormat) throws SQLException { return generateFilteredExportQuery(jobId, schema, spaceId, propertyFilter, spatialFilter, targetVersion, params, csvFormat, null, true, null, false); @@ -473,12 +480,13 @@ private static SQLQuery generateFilteredExportQuery(String jobId, String schema, dbHandler.setConfig(new PSQLConfig(event, schema)); boolean partitionByPropertyValue = ( csvFormat == PARTITIONID_FC_B64 && partitionKey != null && !"id".equalsIgnoreCase(partitionKey)), - partitionByFeatureId = ( csvFormat == PARTITIONID_FC_B64 && !partitionByPropertyValue ); + partitionByFeatureId = ( csvFormat == PARTITIONID_FC_B64 && !partitionByPropertyValue ), + downloadAsJsonWkb = ( csvFormat == JSON_WKB ); SpaceContext ctxStashed = event.getContext(); if (isForCompositeContentDetection) - event.setContext( partitionByFeatureId ? EXTENSION : COMPOSITE_EXTENSION); + event.setContext( (partitionByFeatureId || downloadAsJsonWkb) ? EXTENSION : COMPOSITE_EXTENSION); SQLQuery sqlQuery, sqlQueryContentByPropertyValue = null; diff --git a/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/util/jobs/Export.java b/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/util/jobs/Export.java index e78861d6aa..0a741c706b 100644 --- a/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/util/jobs/Export.java +++ b/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/util/jobs/Export.java @@ -53,6 +53,7 @@ import static com.here.xyz.httpconnector.util.jobs.Export.ExportTarget.Type.VML; import static com.here.xyz.httpconnector.util.jobs.Job.CSVFormat.PARTITIONID_FC_B64; import static com.here.xyz.httpconnector.util.jobs.Job.CSVFormat.TILEID_FC_B64; +import static com.here.xyz.httpconnector.util.jobs.Job.CSVFormat.JSON_WKB; import static com.here.xyz.httpconnector.util.jobs.Job.Status.*; import static com.here.xyz.httpconnector.util.scheduler.JobQueue.*; import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST; @@ -252,10 +253,10 @@ private Export validateExport() throws HttpException { } if(!compositeMode.equals(CompositeMode.DEACTIVATED)) { - if (getExportTarget().getType() == DOWNLOAD) + if (getExportTarget().getType() == DOWNLOAD && getCsvFormat() != JSON_WKB) throw new HttpException(HttpResponseStatus.BAD_REQUEST, "CompositeMode is not available for Type Download!"); - if (getCsvFormat() != TILEID_FC_B64 && getCsvFormat() != PARTITIONID_FC_B64) + if (getCsvFormat() != TILEID_FC_B64 && getCsvFormat() != PARTITIONID_FC_B64 && getCsvFormat() != JSON_WKB) throw new HttpException(BAD_REQUEST, "CompositeMode does not support the provided CSV format!"); if(ext == null) { @@ -299,9 +300,12 @@ protected Future isValidForStart() { CompositeMode compositeMode = readParamCompositeMode(); if (!compositeMode.equals(CompositeMode.DEACTIVATED)) { - if (getCsvFormat() != TILEID_FC_B64 && getCsvFormat() != PARTITIONID_FC_B64) - return Future.failedFuture(new HttpException(BAD_REQUEST, "CSV format is not supported for CompositeMode!")); - if (getExportTarget().getType() == DOWNLOAD) + switch( getCsvFormat() ) + { case TILEID_FC_B64 : case PARTITIONID_FC_B64 : case JSON_WKB : break; + default: return Future.failedFuture(new HttpException(BAD_REQUEST, "CSV format is not supported for CompositeMode!")); + } + + if (getExportTarget().getType() == DOWNLOAD && getCsvFormat() != JSON_WKB ) return Future.failedFuture(new HttpException(HttpResponseStatus.BAD_REQUEST, "CompositeMode Export is not available for Type Download!")); } diff --git a/xyz-hub-test/src/test/java/com/here/xyz/hub/rest/jobs/JobApiCompositeExportIT.java b/xyz-hub-test/src/test/java/com/here/xyz/hub/rest/jobs/JobApiCompositeExportIT.java index 1f529dfe2f..d1886237a5 100644 --- a/xyz-hub-test/src/test/java/com/here/xyz/hub/rest/jobs/JobApiCompositeExportIT.java +++ b/xyz-hub-test/src/test/java/com/here/xyz/hub/rest/jobs/JobApiCompositeExportIT.java @@ -160,17 +160,6 @@ public void invalidConfig() throws Exception{ } deleteAllJobsOnSpace(testSpaceId1Ext); - job = buildTestJob(testExportJobId, null, new Export.ExportTarget().withType(DOWNLOAD), JSON_WKB); - try { - /** Invalid Type - creation fails */ - performExport(job, testSpaceId1, finalized, failed, Export.CompositeMode.CHANGES); - }catch (HttpException e){ - assertEquals(BAD_REQUEST, e.status); - exceptionCnt++; - } - - deleteAllJobsOnSpace(testSpaceId1Ext); - job = buildTestJob(testExportJobId, null, new Export.ExportTarget().withType(DOWNLOAD), GEOJSON); try { /** No extended layer - creation fails */ @@ -181,7 +170,7 @@ public void invalidConfig() throws Exception{ } /** Check if we got the expected amount of failures */ - assertEquals(3, exceptionCnt); + assertEquals(2, exceptionCnt); } @Test diff --git a/xyz-hub-test/src/test/java/com/here/xyz/hub/rest/jobs/JobApiExportIT.java b/xyz-hub-test/src/test/java/com/here/xyz/hub/rest/jobs/JobApiExportIT.java index a1e327bb0c..9ed5bc3428 100644 --- a/xyz-hub-test/src/test/java/com/here/xyz/hub/rest/jobs/JobApiExportIT.java +++ b/xyz-hub-test/src/test/java/com/here/xyz/hub/rest/jobs/JobApiExportIT.java @@ -676,6 +676,20 @@ public void testFullVMLCompositeL1ExportByTileChanges() throws Exception { downloadAndCheckFC(urls, 1306, 3, mustContain, 3); } + @Test + public void testFullVMLCompositeL1ExportJsonWkbChanges() throws Exception { +// export json_wkb only changes + Export.ExportTarget exportTarget = new Export.ExportTarget().withType(DOWNLOAD); + + /** Create job */ + Export job = buildTestJob(testExportJobId, null, exportTarget, Job.CSVFormat.JSON_WKB); + List urls = performExport(job, getScopedSpaceId(testSpaceId3Ext, scope), finalized, failed, Export.CompositeMode.CHANGES ); + + List mustContain = Arrays.asList("id000", "id002", "movedFromEmpty", "deltaonly"); + + downloadAndCheck(urls, 635, 2, mustContain); + } + /** ------------------- only for local testing with big spaces -------------------- */ // @Test