From 3a37a708278682b91143e52f12a23533fbcb0c77 Mon Sep 17 00:00:00 2001 From: GeoNetwork opensource <59019313+geonetworkbuild@users.noreply.github.com> Date: Sat, 2 Mar 2024 22:39:20 -0800 Subject: [PATCH 001/317] Create scorecard.yml Add the official GitHub Action for OSSF Scorecards. --- .github/workflows/scorecard.yml | 72 +++++++++++++++++++++++++++++++++ 1 file changed, 72 insertions(+) create mode 100644 .github/workflows/scorecard.yml diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml new file mode 100644 index 00000000000..e552ee3ea2c --- /dev/null +++ b/.github/workflows/scorecard.yml @@ -0,0 +1,72 @@ +# This workflow uses actions that are not certified by GitHub. They are provided +# by a third-party and are governed by separate terms of service, privacy +# policy, and support documentation. + +name: Scorecard supply-chain security +on: + # For Branch-Protection check. Only the default branch is supported. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection + branch_protection_rule: + # To guarantee Maintained check is occasionally updated. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained + schedule: + - cron: '26 10 * * 5' + push: + branches: [ "main" ] + +# Declare default permissions as read only. +permissions: read-all + +jobs: + analysis: + name: Scorecard analysis + runs-on: ubuntu-latest + permissions: + # Needed to upload the results to code-scanning dashboard. + security-events: write + # Needed to publish results and get a badge (see publish_results below). + id-token: write + # Uncomment the permissions below if installing in a private repository. + # contents: read + # actions: read + + steps: + - name: "Checkout code" + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # v3.1.0 + with: + persist-credentials: false + + - name: "Run analysis" + uses: ossf/scorecard-action@e38b1902ae4f44df626f11ba0734b14fb91f8f86 # v2.1.2 + with: + results_file: results.sarif + results_format: sarif + # (Optional) "write" PAT token. Uncomment the `repo_token` line below if: + # - you want to enable the Branch-Protection check on a *public* repository, or + # - you are installing Scorecard on a *private* repository + # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat. + # repo_token: ${{ secrets.SCORECARD_TOKEN }} + + # Public repositories: + # - Publish results to OpenSSF REST API for easy access by consumers + # - Allows the repository to include the Scorecard badge. + # - See https://github.com/ossf/scorecard-action#publishing-results. + # For private repositories: + # - `publish_results` will always be set to `false`, regardless + # of the value entered here. + publish_results: true + + # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF + # format to the repository Actions tab. + - name: "Upload artifact" + uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # v3.1.0 + with: + name: SARIF file + path: results.sarif + retention-days: 5 + + # Upload the results to GitHub's code scanning dashboard. + - name: "Upload to code-scanning" + uses: github/codeql-action/upload-sarif@17573ee1cc1b9d061760f3a006fc4aac4f944fd5 # v2.2.4 + with: + sarif_file: results.sarif From eb4f083abfb272c7513e165301d0de86c62d9ea8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fran=C3=A7ois=20Prunayre?= Date: Mon, 4 Mar 2024 10:23:15 +0100 Subject: [PATCH 002/317] Thesaurus / Improve support of EU publication office SKOS format (#7673) * Thesaurus / Improve support of EU publication office SKOS format When loading thesaurus downloadable from the EU publication office, title and namespace of the thesaurus are not extracted properly. The SKOS format provided contains specificity eg. https://op.europa.eu/en/web/eu-vocabularies/dataset/-/resource?uri=http://publications.europa.eu/resource/dataset/data-theme Thesaurus title is stored in various properties ie. `at:prefLabel`, `rdfs:label`, `skos:prefLabel` but none of them were used so far for title extraction. ```xml Data theme Data theme 20220715-0 Data theme ``` This change add them to the XPath. Also fix some sonar lint items. Funded by Wallonia region (SPW) * Update core/src/main/java/org/fao/geonet/kernel/Thesaurus.java --- .../org/fao/geonet/kernel/AllThesaurus.java | 22 +++----- .../java/org/fao/geonet/kernel/Thesaurus.java | 53 ++++++++++--------- .../api/records/formatters/FormatterApi.java | 2 +- 3 files changed, 38 insertions(+), 39 deletions(-) diff --git a/core/src/main/java/org/fao/geonet/kernel/AllThesaurus.java b/core/src/main/java/org/fao/geonet/kernel/AllThesaurus.java index 971e1c072a0..361c7fc816b 100644 --- a/core/src/main/java/org/fao/geonet/kernel/AllThesaurus.java +++ b/core/src/main/java/org/fao/geonet/kernel/AllThesaurus.java @@ -27,15 +27,13 @@ import com.google.common.base.Function; import com.google.common.collect.Lists; import com.google.common.collect.Maps; - -import org.locationtech.jts.util.Assert; - import org.fao.geonet.Constants; import org.fao.geonet.constants.Geonet; import org.fao.geonet.exceptions.TermNotFoundException; import org.fao.geonet.kernel.search.keyword.KeywordRelation; import org.fao.geonet.languages.IsoLanguagesMapper; import org.fao.geonet.utils.Log; +import org.locationtech.jts.util.Assert; import org.openrdf.model.GraphException; import org.openrdf.model.URI; import org.openrdf.sesame.config.AccessDeniedException; @@ -46,6 +44,8 @@ import org.openrdf.sesame.repository.local.LocalRepository; import org.springframework.beans.factory.annotation.Autowired; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; @@ -59,9 +59,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; - /** * @author Jesse on 2/27/2015. */ @@ -221,8 +218,7 @@ public synchronized URI addElement(KeywordBean keyword) throws IOException, Acce } @Override - public synchronized Thesaurus removeElement(KeywordBean keyword) throws MalformedQueryException, QueryEvaluationException, - IOException, AccessDeniedException { + public synchronized Thesaurus removeElement(KeywordBean keyword) throws AccessDeniedException { throw new UnsupportedOperationException(); } @@ -237,8 +233,7 @@ public synchronized Thesaurus removeElement(String uri) throws AccessDeniedExcep } @Override - public synchronized URI updateElement(KeywordBean keyword, boolean replace) throws AccessDeniedException, IOException, - MalformedQueryException, QueryEvaluationException, GraphException { + public synchronized URI updateElement(KeywordBean keyword, boolean replace) throws AccessDeniedException { throw new UnsupportedOperationException(); } @@ -266,12 +261,12 @@ public Thesaurus updateCode(KeywordBean bean, String newcode) throws AccessDenie } @Override - public synchronized Thesaurus updateCode(String namespace, String oldcode, String newcode) throws AccessDeniedException, IOException { + public synchronized Thesaurus updateCode(String namespace, String oldcode, String newcode) throws AccessDeniedException { throw new UnsupportedOperationException(); } @Override - public synchronized Thesaurus updateCodeByURI(String olduri, String newuri) throws AccessDeniedException, IOException { + public synchronized Thesaurus updateCodeByURI(String olduri, String newuri) throws AccessDeniedException { throw new UnsupportedOperationException(); } @@ -287,8 +282,7 @@ public IsoLanguagesMapper getIsoLanguageMapper() { } @Override - public synchronized void addRelation(String subject, KeywordRelation related, String relatedSubject) throws AccessDeniedException, - IOException, MalformedQueryException, QueryEvaluationException, GraphException { + public synchronized void addRelation(String subject, KeywordRelation related, String relatedSubject) throws AccessDeniedException { throw new UnsupportedOperationException(); } diff --git a/core/src/main/java/org/fao/geonet/kernel/Thesaurus.java b/core/src/main/java/org/fao/geonet/kernel/Thesaurus.java index d9937ed8958..efaeaf60a89 100644 --- a/core/src/main/java/org/fao/geonet/kernel/Thesaurus.java +++ b/core/src/main/java/org/fao/geonet/kernel/Thesaurus.java @@ -76,6 +76,7 @@ public class Thesaurus { private static final String DEFAULT_THESAURUS_NAMESPACE = "http://custom.shared.obj.ch/concept#"; private static final String RDF_NAMESPACE = "http://www.w3.org/1999/02/22-rdf-syntax-ns#"; + private static final String RDF_SCHEMA_NAMESPACE = "http://www.w3.org/2000/01/rdf-schema#"; private static final String SKOS_NAMESPACE = "http://www.w3.org/2004/02/skos/core#"; @@ -360,7 +361,8 @@ public boolean hasConceptScheme(String uri) { try { return performRequest(query).getRowCount() > 0; } catch (Exception e) { - Log.error(Geonet.THESAURUS_MAN, "Error retrieving concept scheme for " + thesaurusFile + ". Error is: " + e.getMessage()); + Log.error(Geonet.THESAURUS_MAN, + String.format("Error retrieving concept scheme for %s. Error is: %s", thesaurusFile, e.getMessage())); throw new RuntimeException(e); } } @@ -380,7 +382,8 @@ public List getConceptSchemes() { } return ret; } catch (Exception e) { - Log.error(Geonet.THESAURUS_MAN, "Error retrieving concept schemes for " + thesaurusFile + ". Error is: " + e.getMessage()); + Log.error(Geonet.THESAURUS_MAN, String.format( + "Error retrieving concept schemes for %s. Error is: %s", thesaurusFile, e.getMessage())); return Collections.emptyList(); } } @@ -452,8 +455,7 @@ public synchronized URI addElement(KeywordBean keyword) throws IOException, Acce /** * Remove keyword from thesaurus. */ - public synchronized Thesaurus removeElement(KeywordBean keyword) throws MalformedQueryException, - QueryEvaluationException, IOException, AccessDeniedException { + public synchronized Thesaurus removeElement(KeywordBean keyword) throws AccessDeniedException { String namespace = keyword.getNameSpaceCode(); String code = keyword.getRelativeCode(); @@ -518,8 +520,7 @@ private String toiso639_1_Lang(String lang) { * languages) and the coordinates will only be updated if they are non-empty * strings. */ - public synchronized URI updateElement(KeywordBean keyword, boolean replace) throws AccessDeniedException, IOException, - MalformedQueryException, QueryEvaluationException, GraphException { + public synchronized URI updateElement(KeywordBean keyword, boolean replace) throws AccessDeniedException { THESAURUS_SEARCH_CACHE.invalidateAll(); // Get thesaurus graph @@ -661,7 +662,7 @@ public Thesaurus updateCode(KeywordBean bean, String newcode) throws AccessDenie * Update concept code by creating URI from namespace and code. This is recommended when * thesaurus concept identifiers contains # eg. http://vocab.nerc.ac.uk/collection/P07/current#CFV13N44 */ - public synchronized Thesaurus updateCode(String namespace, String oldcode, String newcode) throws AccessDeniedException, IOException { + public synchronized Thesaurus updateCode(String namespace, String oldcode, String newcode) throws AccessDeniedException { Graph myGraph = repository.getGraph(); ValueFactory myFactory = myGraph.getValueFactory(); @@ -679,7 +680,7 @@ public synchronized Thesaurus updateCode(String namespace, String oldcode, Strin * * eg. http://vocab.nerc.ac.uk/collection/P07/current/CFV13N44/ */ - public synchronized Thesaurus updateCodeByURI(String olduri, String newuri) throws AccessDeniedException, IOException { + public synchronized Thesaurus updateCodeByURI(String olduri, String newuri) throws AccessDeniedException { Graph myGraph = repository.getGraph(); ValueFactory myFactory = myGraph.getValueFactory(); @@ -894,7 +895,11 @@ private void retrieveDublinCore(Element thesaurusEl) { // } private void retrieveMultiLingualTitles(Element thesaurusEl) { try { - String xpathTitles = "skos:ConceptScheme/dc:title[@xml:lang]|skos:ConceptScheme/dcterms:title[@xml:lang]|rdf:Description[rdf:type/@rdf:resource = 'http://www.w3.org/2004/02/skos/core#ConceptScheme']/dc:title[@xml:lang]"; + String xpathTitles = "skos:ConceptScheme/dc:title[@xml:lang]" + + "|skos:ConceptScheme/dcterms:title[@xml:lang]" + + "|skos:ConceptScheme/rdfs:label[@xml:lang]" + + "|skos:ConceptScheme/skos:prefLabel[@xml:lang]" + + "|rdf:Description[rdf:type/@rdf:resource = 'http://www.w3.org/2004/02/skos/core#ConceptScheme']/dc:title[@xml:lang]"; multilingualTitles.clear(); multilingualTitles.putAll(retrieveMultilingualField(thesaurusEl, xpathTitles)); } catch (Exception e) { @@ -944,25 +949,23 @@ private void retrieveThesaurusInformation(Path thesaurusFile, String defaultTitl try { Element thesaurusEl = Xml.loadFile(thesaurusFile); - List theNSs = new ArrayList<>(); - Namespace rdfNamespace = Namespace.getNamespace("rdf", RDF_NAMESPACE); - theNSs.add(rdfNamespace); - theNSs.add(Namespace.getNamespace("skos", SKOS_NAMESPACE)); - theNSs.add(Namespace.getNamespace("dc", DC_NAMESPACE)); - theNSs.add(Namespace.getNamespace("dcterms", DCTERMS_NAMESPACE)); + List theNSs = getThesaurusNamespaces(); this.defaultNamespace = null; retrieveMultiLingualTitles(thesaurusEl); retrieveDublinCore(thesaurusEl); Element titleEl = Xml.selectElement(thesaurusEl, - "skos:ConceptScheme/dc:title|skos:ConceptScheme/dcterms:title|" + - "skos:Collection/dc:title|skos:Collection/dcterms:title|" + - "rdf:Description/dc:title|rdf:Description/dcterms:title", theNSs); + "skos:ConceptScheme/dc:title|skos:ConceptScheme/dcterms:title" + + "|skos:ConceptScheme/rdfs:label|skos:ConceptScheme/skos:prefLabel" + + "|skos:Collection/dc:title|skos:Collection/dcterms:title" + + "|rdf:Description/dc:title|rdf:Description/dcterms:title", theNSs); if (titleEl != null) { this.title = titleEl.getValue(); - this.defaultNamespace = titleEl.getParentElement().getAttributeValue("about", rdfNamespace); + this.defaultNamespace = titleEl + .getParentElement() + .getAttributeValue("about", Namespace.getNamespace("rdf", RDF_NAMESPACE)); } else { this.title = defaultTitle; this.defaultNamespace = DEFAULT_THESAURUS_NAMESPACE; @@ -1027,11 +1030,13 @@ private void retrieveThesaurusInformation(Path thesaurusFile, String defaultTitl } if (Log.isDebugEnabled(Geonet.THESAURUS_MAN)) { - Log.debug(Geonet.THESAURUS_MAN, "Thesaurus information: " + this.title + " (" + this.date + ")"); + Log.debug(Geonet.THESAURUS_MAN, String.format( + "Thesaurus information: %s (%s)", this.title, this.date)); } } catch (Exception ex) { if (!ignoreMissingError) - Log.error(Geonet.THESAURUS_MAN, "Error getting thesaurus info for " + thesaurusFile + ". Error is: " + ex.getMessage()); + Log.error(Geonet.THESAURUS_MAN, String.format( + "Error getting thesaurus info for %s. Error is: %s", thesaurusFile, ex.getMessage())); } } @@ -1102,8 +1107,7 @@ public IsoLanguagesMapper getIsoLanguageMapper() { * @param subject the keyword that is related to the other keyword * @param related the relation between the two keywords */ - public synchronized void addRelation(String subject, KeywordRelation related, String relatedSubject) throws AccessDeniedException, IOException, - MalformedQueryException, QueryEvaluationException, GraphException { + public synchronized void addRelation(String subject, KeywordRelation related, String relatedSubject) throws AccessDeniedException { THESAURUS_SEARCH_CACHE.invalidateAll(); Graph myGraph = repository.getGraph(); @@ -1126,7 +1130,7 @@ public synchronized void addRelation(String subject, KeywordRelation related, St * @return keyword */ public KeywordBean getKeyword(String uri, String... languages) { - String cacheKey = "getKeyword" + uri + Arrays.stream(languages).collect(Collectors.joining("")); + String cacheKey = "getKeyword" + uri + String.join("", languages); Object cacheValue = THESAURUS_SEARCH_CACHE.getIfPresent(cacheKey); if (cacheValue != null) { return (KeywordBean) cacheValue; @@ -1370,6 +1374,7 @@ private ArrayList classifyTermWithNoBroaderTerms(KeywordBean term) private List getThesaurusNamespaces() { List theNSs = new ArrayList<>(); theNSs.add(Namespace.getNamespace("rdf", RDF_NAMESPACE)); + theNSs.add(Namespace.getNamespace("rdfs", RDF_SCHEMA_NAMESPACE)); theNSs.add(Namespace.getNamespace("skos", SKOS_NAMESPACE)); theNSs.add(Namespace.getNamespace("dc", DC_NAMESPACE)); theNSs.add(Namespace.getNamespace("dcterms", DCTERMS_NAMESPACE)); diff --git a/services/src/main/java/org/fao/geonet/api/records/formatters/FormatterApi.java b/services/src/main/java/org/fao/geonet/api/records/formatters/FormatterApi.java index 78289ce69e8..1a9830c3ef6 100644 --- a/services/src/main/java/org/fao/geonet/api/records/formatters/FormatterApi.java +++ b/services/src/main/java/org/fao/geonet/api/records/formatters/FormatterApi.java @@ -226,7 +226,7 @@ public void getRecordFormattedBy( // if text/html > xsl_view // if application/pdf > xsl_view and PDF output // if application/x-gn-+(xml|html|pdf|text) - // Force PDF ouutput when URL parameter is set. + // Force PDF output when URL parameter is set. // This is useful when making GET link to PDF which // can not use headers. if (MediaType.ALL_VALUE.equals(acceptHeader)) { From 99f6b8e99fbbba98f7336b5a5326e732fce9b348 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jose=20Garc=C3=ADa?= Date: Thu, 15 Feb 2024 16:16:40 +0100 Subject: [PATCH 003/317] Harvesters / Reset harvester history pagination when selecting a harvester --- .../resources/catalog/js/admin/HarvestSettingsController.js | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/web-ui/src/main/resources/catalog/js/admin/HarvestSettingsController.js b/web-ui/src/main/resources/catalog/js/admin/HarvestSettingsController.js index dbcd3d5fc9e..9da0c24bcaa 100644 --- a/web-ui/src/main/resources/catalog/js/admin/HarvestSettingsController.js +++ b/web-ui/src/main/resources/catalog/js/admin/HarvestSettingsController.js @@ -438,6 +438,12 @@ $scope.harvesterHistory = {}; $scope.searchResults = null; $scope.searchResultsTotal = null; + $scope.harvesterHistoryPaging = { + page: 1, + size: 3, + pages: 0, + total: 0 + }; loadHarvester(h["@id"]).then(function (data) { loadHistory(); From 18b0d73026b6d2540ae67299a8576ea6a1fbf239 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jose=20Garc=C3=ADa?= Date: Mon, 4 Mar 2024 10:41:12 +0100 Subject: [PATCH 004/317] Record view / ISO19139 / ISO19115-3.2008 display the unit part in @uom attribute, not the full url (#7791) * Record view / ISO19139 / ISO19115-3.2008 display the unit part in @uom attribute, not the full url * Record view / ISO19139 / ISO19115-3.2008 display the unit part in @uom attribute, not the full url: - Update indexing. - Handle units not prefixed with url --- .../main/plugin/iso19115-3.2018/formatter/xsl-view/view.xsl | 5 ++++- .../src/main/plugin/iso19115-3.2018/index-fields/index.xsl | 4 +++- .../src/main/plugin/iso19139/formatter/xsl-view/view.xsl | 5 ++++- .../iso19139/src/main/plugin/iso19139/index-fields/index.xsl | 4 +++- 4 files changed, 14 insertions(+), 4 deletions(-) diff --git a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/formatter/xsl-view/view.xsl b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/formatter/xsl-view/view.xsl index abb49d93770..62a7f60fd03 100644 --- a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/formatter/xsl-view/view.xsl +++ b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/formatter/xsl-view/view.xsl @@ -1180,7 +1180,10 @@ -   + +   diff --git a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/index-fields/index.xsl b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/index-fields/index.xsl index d1cefd8d1ff..18290d71948 100644 --- a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/index-fields/index.xsl +++ b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/index-fields/index.xsl @@ -692,7 +692,9 @@ - + diff --git a/schemas/iso19139/src/main/plugin/iso19139/formatter/xsl-view/view.xsl b/schemas/iso19139/src/main/plugin/iso19139/formatter/xsl-view/view.xsl index 3f13daa000f..fdb57470c2f 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/formatter/xsl-view/view.xsl +++ b/schemas/iso19139/src/main/plugin/iso19139/formatter/xsl-view/view.xsl @@ -1151,7 +1151,10 @@ -   + +   diff --git a/schemas/iso19139/src/main/plugin/iso19139/index-fields/index.xsl b/schemas/iso19139/src/main/plugin/iso19139/index-fields/index.xsl index 9e25b4c4d99..3d20f428272 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/index-fields/index.xsl +++ b/schemas/iso19139/src/main/plugin/iso19139/index-fields/index.xsl @@ -641,7 +641,9 @@ - + From e349d89ceb60c3e0debade4c6382a5748ab7c547 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fran=C3=A7ois=20Prunayre?= Date: Mon, 4 Mar 2024 11:30:30 +0100 Subject: [PATCH 005/317] Harvester / WFS / No need to manually managed commit interval (#7737) * Harvester / WFS / No need to manually manged commit interval ES8 BulkIngester takes care of it. * Update EsWFSFeatureIndexer.java * Harvester / WFS / Improve JSON data encoding in query. Save report at the end. Clarify parameter bulk size is now max operations on ES side. --- .../worker/EsWFSFeatureIndexer.java | 74 ++++++------------- 1 file changed, 23 insertions(+), 51 deletions(-) diff --git a/workers/wfsfeature-harvester/src/main/java/org/fao/geonet/harvester/wfsfeatures/worker/EsWFSFeatureIndexer.java b/workers/wfsfeature-harvester/src/main/java/org/fao/geonet/harvester/wfsfeatures/worker/EsWFSFeatureIndexer.java index d3a78f2e28a..dc46c4afdde 100644 --- a/workers/wfsfeature-harvester/src/main/java/org/fao/geonet/harvester/wfsfeatures/worker/EsWFSFeatureIndexer.java +++ b/workers/wfsfeature-harvester/src/main/java/org/fao/geonet/harvester/wfsfeatures/worker/EsWFSFeatureIndexer.java @@ -34,6 +34,8 @@ import co.elastic.clients.elasticsearch.core.bulk.IndexOperation; import co.elastic.clients.json.JsonData; import co.elastic.clients.json.JsonpMapper; +import co.elastic.clients.util.BinaryData; +import co.elastic.clients.util.ContentType; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -281,14 +283,13 @@ public CompletableFuture indexFeatures(Exchange exchange) throws Exception LOGGER.error(msg); throw new RuntimeException(msg); } - final Phaser phaser = new Phaser(); - BulkResutHandler brh = new AsyncBulkResutHandler(phaser, typeName, url, nbOfFeatures, report, state.getParameters().getMetadataUuid()); + BulkResutHandler brh = new AsyncBulkResutHandler(typeName, url, nbOfFeatures, report, state.getParameters().getMetadataUuid()); try { nbOfFeatures = 0; - long begin = System.currentTimeMillis(); + long begin = System.currentTimeMillis(); String epsg = "urn:ogc:def:crs:OGC:1.3:CRS84"; for (String featureType : resolvedTypeNames) { @@ -423,7 +424,7 @@ public CompletableFuture indexFeatures(Exchange exchange) throws Exception } } - nbOfFeatures++; + nbOfFeatures ++; brh.addAction(rootNode, feature); } catch (Exception ex) { @@ -434,28 +435,12 @@ public CompletableFuture indexFeatures(Exchange exchange) throws Exception LOGGER.warn(msg); report.put("error_ss", msg); } - - if (brh.getBulkSize() >= featureCommitInterval) { - brh.launchBulk(client); - brh = new AsyncBulkResutHandler(phaser, typeName, url, nbOfFeatures, report, state.getParameters().getMetadataUuid()); - } } } finally { features.close(); } } - if (brh.getBulkSize() > 0) { - brh.launchBulk(client); - } - - try { - if (nbOfFeatures > 0) { - phaser.awaitAdvanceInterruptibly(0, 3, TimeUnit.HOURS); - } - } catch (TimeoutException e) { - throw new Exception("Timeout when awaiting all bulks to be processed."); - } LOGGER.info("{}: {} features processed in {} ms.", new Object[]{ typeName, nbOfFeatures, System.currentTimeMillis() - begin @@ -467,11 +452,12 @@ public CompletableFuture indexFeatures(Exchange exchange) throws Exception LOGGER.error(e.getMessage()); throw e; } finally { - report.saveHarvesterReport(); brh.close(); + report.saveHarvesterReport(); future.complete(null); } + return future; } @@ -522,7 +508,7 @@ class Report { private String typeName; private boolean pointOnlyForGeoms; - public Report(String url, String typeName) throws UnsupportedEncodingException { + public Report(String url, String typeName) { this.typeName = typeName; this.url = url; pointOnlyForGeoms = true; @@ -556,7 +542,7 @@ public boolean saveHarvesterReport() { try { IndexResponse response = client.getClient().index(request); - if (response.result() == Result.Created) { + if (response.result() == Result.Created || response.result() == Result.Updated) { LOGGER.info("Report saved for service {} and typename {}. Report id is {}", url, typeName, report.get("id")); } else { @@ -584,7 +570,6 @@ private String getIdentifier(String url, String typeName) { abstract class BulkResutHandler { - protected Phaser phaser; protected String typeName; private String url; protected int firstFeatureIndex; @@ -596,8 +581,7 @@ abstract class BulkResutHandler { protected int failuresCount; BulkListener listener; - public BulkResutHandler(Phaser phaser, String typeName, String url, int firstFeatureIndex, Report report, String metadataUuid) { - this.phaser = phaser; + public BulkResutHandler(String typeName, String url, int firstFeatureIndex, Report report, String metadataUuid) { this.typeName = typeName; this.url = url; this.firstFeatureIndex = firstFeatureIndex; @@ -607,8 +591,8 @@ public BulkResutHandler(Phaser phaser, String typeName, String url, int firstFea this.bulkSize = 0; this.failuresCount = 0; - LOGGER.debug(" {} - Indexing bulk (size {}) starting at {} ...", - typeName, featureCommitInterval, firstFeatureIndex); + LOGGER.debug(" {} - Indexing with bulk ingester (with maxOperations {}) ...", + typeName, featureCommitInterval); listener = new BulkListener() { @Override @@ -630,30 +614,30 @@ public void afterBulk(long executionId, BulkRequest request, List contex } }); } - LOGGER.debug(" {} - Features [{}-{}] indexed in {} ms{}.", typeName, firstFeatureIndex, firstFeatureIndex + bulkSize, + LOGGER.debug(" {} - {} features indexed in {} ms{}.", typeName, firstFeatureIndex + bulkSize, System.currentTimeMillis() - begin, bulkResponse.errors() ? " but with " + bulkFailures + " errors" : ""); failuresCount = bulkFailures.get(); - phaser.arriveAndDeregister(); } @Override public void afterBulk(long executionId, BulkRequest request, List contexts, Throwable failure) { String msg = String.format( - " %s - Features [%s-%s] indexed in %s ms but with errors. Exception: %s", - typeName, firstFeatureIndex, firstFeatureIndex + bulkSize, + " %s - %s features indexed in %s ms but with errors. Exception: %s", + typeName, firstFeatureIndex + bulkSize, System.currentTimeMillis() - begin, failure.getMessage() ); report.put("error_ss", msg); LOGGER.error(msg); - phaser.arriveAndDeregister(); } }; this.bulk = BulkIngester.of(b -> b.client(client.getAsynchClient()) .listener(listener) + // .maxConcurrentRequests(1) + // .flushInterval(10, TimeUnit.SECONDS) .maxOperations(featureCommitInterval)); } @@ -674,41 +658,29 @@ public void addAction(ObjectNode rootNode, SimpleFeature feature) throws JsonPro } String id = String.format("%s#%s#%s", url, typeName, featureId); - StringReader reader = new StringReader(jacksonMapper.writeValueAsString(rootNode)); - // https://discuss.elastic.co/t/java-8-1-bulk-request/302423 - JsonpMapper jsonpMapper = client.getClient()._transport().jsonpMapper(); - JsonProvider jsonProvider = jsonpMapper.jsonProvider(); - JsonData jd = JsonData.from(jsonProvider.createParser(reader), jsonpMapper); + BinaryData data = BinaryData.of( + jacksonMapper.writeValueAsString(rootNode).getBytes(StandardCharsets.UTF_8), + ContentType.APPLICATION_JSON); bulk.add(b -> b.index(io -> io .index(index) .id(id) - .document(jd)), id); + .document(data)), id); bulkSize++; } - protected void prepareLaunch() { - phaser.register(); - this.begin = System.currentTimeMillis(); - } public void close() { if (this.bulk != null) { this.bulk.close(); } } - - abstract public void launchBulk(EsRestClient client) throws Exception; } // depending on situation, one can expect going up to 1.5 faster using an async result handler (e.g. huge collection of points) class AsyncBulkResutHandler extends BulkResutHandler { - public AsyncBulkResutHandler(Phaser phaser, String typeName, String url, int firstFeatureIndex, Report report, String metadataUuid) { - super(phaser, typeName, url, firstFeatureIndex, report, metadataUuid); - } - - public void launchBulk(EsRestClient client) throws Exception { - prepareLaunch(); + public AsyncBulkResutHandler(String typeName, String url, int firstFeatureIndex, Report report, String metadataUuid) { + super(typeName, url, firstFeatureIndex, report, metadataUuid); } } From 3265c37ed1361b3dc19fc3a4ccafde2deb8c4aab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fran=C3=A7ois=20Prunayre?= Date: Mon, 4 Mar 2024 11:32:27 +0100 Subject: [PATCH 006/317] Harvester / Localfilesystem / Log properly to harvester log file. (#7660) * Harvester / Localfilesystem / Log properly to harvester log file. Follow up of https://github.com/geonetwork/core-geonetwork/pull/2765 and fix harvester logger which was not reporting any error anymore into the harvester logfile that user can download after harvest. ![image](https://github.com/geonetwork/core-geonetwork/assets/1701393/d84e643f-6713-4821-9d04-bde2e9c1c911) It only reports: ``` 2024-01-25T11:06:22,547 WARN [geonetwork.harvester] - Start of alignment for : SEANOE 2024-01-25T11:08:25,135 INFO [geonetwork.harvester] - Ended harvesting from node : SEANOE (LocalFilesystemHarvester) ``` If using the harvester logger as before, then the log file properly contains errors ``` 2024-01-25T11:06:22,547 WARN [geonetwork.harvester] - Start of alignment for : SEANOE 2024-01-25T11:06:55,675 ERROR [geonetwork.harvester] - Error transforming JSON into XML from file /data/75639.json, ignoring 2024-01-25T11:07:33,893 ERROR [geonetwork.harvester] - Error transforming JSON into XML from file /data/59178.json, ignoring 2024-01-25T11:08:25,135 INFO [geonetwork.harvester] - Ended harvesting from node : SEANOE (LocalFilesystemHarvester) ``` To test, try to import a file which whill trigger error like invalid XML. * Harvester / Localfilesystem / Remove duplicated privileges deletion and add option to append privileges (in case user wants to also manage locally privileges). * Update LocalFilesystemHarvester.java --- .../src/main/java/org/fao/geonet/Logger.java | 53 +++++++++----- .../main/java/org/fao/geonet/utils/Log.java | 73 ++++++++++++++++--- .../jeeves/server/context/BasicContext.java | 43 ++++++++++- .../geonet/kernel/harvest/BaseAligner.java | 4 +- .../harvest/harvester/AbstractHarvester.java | 4 + .../LocalFilesystemHarvester.java | 3 - .../LocalFsHarvesterFileVisitor.java | 73 +++++++++---------- .../admin/harvester/partials/privileges.html | 2 +- .../migrate/v3110/UpdateMetadataStatus.java | 6 +- 9 files changed, 180 insertions(+), 81 deletions(-) diff --git a/common/src/main/java/org/fao/geonet/Logger.java b/common/src/main/java/org/fao/geonet/Logger.java index 920d13e548f..76590b8d698 100644 --- a/common/src/main/java/org/fao/geonet/Logger.java +++ b/common/src/main/java/org/fao/geonet/Logger.java @@ -23,8 +23,6 @@ package org.fao.geonet; -//============================================================================= - import org.apache.logging.log4j.core.appender.FileAppender; /** @@ -37,35 +35,52 @@ public interface Logger { * * @return check if debug logging is enabled */ - public boolean isDebugEnabled(); + boolean isDebugEnabled(); /** * Log debug message used indicate module troubleshoot module activity. * * @param message debug message used to provide in */ - public void debug(String message); + void debug(String message); + + void debug(String message, Throwable throwable); + + void debug(String message, Object... object); /** * Log information message indicating module progress. * * @param message information message indicating progress */ - public void info(String message); + void info(String message); + + void info(String message, Throwable throwable); - /** Log warning message indicating potentially harmful situation, module + void info(String message, Object... object); + + /** + * Log warning message indicating potentially harmful situation, module * will continue to try and complete current activity. * * @param message Warning message indicating potentially harmful situation */ - public void warning(String message); + void warning(String message); + + void warning(String message, Throwable throwable); + + void warning(String message, Object... object); /** * Log error message indicating module cannot continue current activity. * * @param message Error message */ - public void error(String message); + void error(String message); + + void error(String message, Throwable throwable); + + void error(String message, Object... object); /** * Log error message using provided throwable, indicating module cannot continue @@ -73,51 +88,49 @@ public interface Logger { * * @param ex Cause of error condition. */ - public void error(Throwable ex); + void error(Throwable ex); /** * Log severe message, indicating application cannot continue to operate. * * @param message severe message */ - public void fatal(String message); + void fatal(String message); /** * Functional module used for logging messages (for example {@code jeeves.engine}). * * @return functional module used for logging messages. */ - public String getModule(); + String getModule(); /** * Configure logger with log4j {@link FileAppender}, used for output. - * + *

* The file appender is also responsible for log file location provided by {@link #getFileAppender()}. * * @param fileAppender Log4j FileAppender */ - public void setAppender(FileAppender fileAppender); + void setAppender(FileAppender fileAppender); /** * The log file name from the file appender for this module. - * + *

* Note both module and fallback module are provided allowing providing a better opportunity * to learn the log file location. Harvesters use the log file name parent directory as a good * location to create {@code /harvester_logs/} folder. - * + *

* Built-in configuration uses log file location {@code logs/geonetwork.log} relative to the current directory, or relative to system property {@code log_file}. * * @return logfile location of {@code logs/geonetwork.log} file */ - public String getFileAppender(); + String getFileAppender(); /** * Access to omodule logging level, providing + * * @return */ - public org.apache.logging.log4j.Level getThreshold(); + org.apache.logging.log4j.Level getThreshold(); } - -//============================================================================= - diff --git a/common/src/main/java/org/fao/geonet/utils/Log.java b/common/src/main/java/org/fao/geonet/utils/Log.java index 094dfb4942e..df0269aaf14 100644 --- a/common/src/main/java/org/fao/geonet/utils/Log.java +++ b/common/src/main/java/org/fao/geonet/utils/Log.java @@ -24,22 +24,18 @@ package org.fao.geonet.utils; -import org.apache.log4j.Priority; import org.apache.log4j.bridge.AppenderWrapper; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.Appender; +import org.apache.logging.log4j.core.LoggerContext; import org.apache.logging.log4j.core.appender.FileAppender; import org.apache.logging.log4j.core.appender.RollingFileAppender; -import org.apache.logging.log4j.core.LoggerContext; import org.apache.logging.log4j.core.config.Configuration; import org.apache.logging.log4j.core.config.LoggerConfig; import java.io.File; -import java.util.Enumeration; - -//============================================================================= /** * Jeeves logging integration, defining functional logger categories by module @@ -125,8 +121,12 @@ public static void debug(String module, Object message) { LogManager.getLogger(module).debug(message); } - public static void debug(String module, Object message, Exception e) { - LogManager.getLogger(module).debug(message, e); + public static void debug(String module, String message, Object... objects) { + LogManager.getLogger(module).debug(message, objects); + } + + public static void debug(String module, String message, Throwable throwable) { + LogManager.getLogger(module).debug(message, throwable); } public static boolean isDebugEnabled(String module) { @@ -157,10 +157,15 @@ public static void info(String module, Object message) { LogManager.getLogger(module).info(message); } - public static void info(String module, Object message, Throwable t) { - LogManager.getLogger(module).info(message, t); + public static void info(String module, String message, Object... objects) { + LogManager.getLogger(module).info(message, objects); + } + + public static void info(String module, String message, Throwable throwable) { + LogManager.getLogger(module).info(message, throwable); } + //--------------------------------------------------------------------------- public static void warning(String module, Object message) { @@ -182,6 +187,14 @@ public static void error(String module, Object message, Throwable t) { LogManager.getLogger(module).error(message, t); } + public static void error(String module, String message, Object... objects) { + LogManager.getLogger(module).error(message, objects); + } + + public static void error(String module, String message, Throwable throwable) { + LogManager.getLogger(module).error(message, throwable); + } + //--------------------------------------------------------------------------- public static void fatal(String module, Object message) { @@ -225,18 +238,58 @@ public void debug(String message) { Log.debug(module, message); } + @Override + public void debug(String message, Throwable throwable) { + Log.debug(module, message, throwable); + } + + @Override + public void debug(String message, Object... object) { + Log.debug(module, message, object); + } + public void info(String message) { Log.info(module, message); } + @Override + public void info(String message, Throwable throwable) { + Log.info(module, message, throwable); + } + + @Override + public void info(String message, Object... object) { + Log.info(module, message, object); + } + public void warning(String message) { Log.warning(module, message); } + @Override + public void warning(String message, Throwable throwable) { + Log.warning(module, message, throwable); + } + + @Override + public void warning(String message, Object... object) { + + } + public void error(String message) { Log.error(module, message); } + @Override + public void error(String message, Throwable throwable) { + Log.error(module, message, throwable); + } + + @Override + public void error(String message, Object... object) { + Log.error(module, message, object); + } + public void fatal(String message) { Log.fatal(module, message); } @@ -279,7 +332,7 @@ public String getFileAppender() { } } LoggerConfig fallbackConfig = configuration.getLoggers().get(fallbackModule); - if( fallbackConfig != null) { + if (fallbackConfig != null) { for (Appender appender : fallbackConfig.getAppenders().values()) { File file = toLogFile(appender); if (file != null && file.exists()) { diff --git a/core/src/main/java/jeeves/server/context/BasicContext.java b/core/src/main/java/jeeves/server/context/BasicContext.java index da210ed0ecf..00d1769b4c6 100644 --- a/core/src/main/java/jeeves/server/context/BasicContext.java +++ b/core/src/main/java/jeeves/server/context/BasicContext.java @@ -143,21 +143,61 @@ public void debug(final String message) { logger.debug(message); } + @Override + public void debug(String message, Throwable throwable) { + logger.debug(message, throwable); + } + + @Override + public void debug(String message, Object... object) { + logger.debug(message, object); + } + @Override public void info(final String message) { logger.info(message); } + @Override + public void info(String message, Throwable throwable) { + logger.info(message, throwable); + } + + @Override + public void info(String message, Object... object) { + logger.info(message, object); + } + @Override public void warning(final String message) { logger.warning(message); } + @Override + public void warning(String message, Throwable throwable) { + logger.warning(message, throwable); + } + + @Override + public void warning(String message, Object... object) { + logger.warning(message, object); + } + @Override public void error(final String message) { logger.error(message); } + @Override + public void error(String message, Throwable throwable) { + logger.error(message, throwable); + } + + @Override + public void error(String message, Object... object) { + logger.error(message, object); + } + @Override public void error(Throwable ex) { logger.error(ex); @@ -200,6 +240,3 @@ public String getNodeId() { return NodeInfo.DEFAULT_NODE; } } - -//============================================================================= - diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/BaseAligner.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/BaseAligner.java index 09502913bfb..ed65e8b52a1 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/BaseAligner.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/BaseAligner.java @@ -119,9 +119,9 @@ public void addPrivileges(String id, Iterable privilegesIterable, Gr String name = localGroups.getName(priv.getGroupId()); if (name == null) { - LOGGER.debug(" - Skipping removed group with id:{}", priv.getGroupId()); + LOGGER.debug(" - Skipping removed group with id: {}", priv.getGroupId()); } else { - LOGGER.debug(" - Setting privileges for group : {}", name); + LOGGER.debug(" - Setting privileges for group: {}", name); for (int opId : priv.getOperations()) { name = dataManager.getAccessManager().getPrivilegeName(opId); //--- all existing operation diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractHarvester.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractHarvester.java index bec3d2cda1a..8a3f270a826 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractHarvester.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractHarvester.java @@ -140,8 +140,12 @@ public abstract class AbstractHarvester { - private Logger LOGGER = LoggerFactory.getLogger(Geonet.HARVESTER); private final LocalFilesystemParams params; private final DataManager dataMan; @@ -110,9 +107,7 @@ public LocalFsHarvesterFileVisitor(AtomicBoolean cancelMonitor, ServiceContext c this.repo = context.getBean(IMetadataUtils.class); this.startTime = System.currentTimeMillis(); - String harvesterName = params.getName().replaceAll("\\W+", "_"); - LOGGER = LoggerFactory.getLogger(harvesterName); - LOGGER.debug("Start visiting files at {}.", this.startTime); + harvester.getLogger().debug(String.format("Start visiting files at %s.", this.startTime)); } @Override @@ -136,9 +131,9 @@ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IO try { result.totalMetadata++; - if (LOGGER.isDebugEnabled() && result.totalMetadata % 1000 == 0) { + if (harvester.getLogger().isDebugEnabled() && result.totalMetadata % 1000 == 0) { long elapsedTime = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - startTime); - LOGGER.debug("{} records inserted in {} s ({} records/s).", new Object[] { + harvester.getLogger().debug("{} records inserted in {} s ({} records/s).", new Object[] { result.totalMetadata, elapsedTime, result.totalMetadata / elapsedTime}); @@ -152,7 +147,7 @@ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IO processXml(file); } } catch (Exception e) { - LOGGER.error("An error occurred while harvesting file {}. Error is: {}.", + harvester.getLogger().error("An error occurred while harvesting file {}. Error is: {}.", file.toAbsolutePath().normalize(), e.getMessage()); } return FileVisitResult.CONTINUE; @@ -168,7 +163,7 @@ private void processJson(Path file) throws Exception { ObjectMapper objectMapper = new ObjectMapper(); Element recordAsElement; try { - LOGGER.debug("reading file: {}", filePath); + harvester.getLogger().debug("reading file: {}", filePath); String uuid = com.google.common.io.Files.getNameWithoutExtension(file.getFileName().toString()); String recordAsJson = objectMapper.readTree(filePath.toFile()).toString(); JSONObject sanitizedJson = sanitize(new JSONObject(recordAsJson)); @@ -180,18 +175,18 @@ private void processJson(Path file) throws Exception { recordAsElement = Xml.loadString(recordAsXml, false); recordAsElement.addContent(new Element("uuid").setText(uuid)); } catch (JsonProcessingException e) { - LOGGER.error("Error processing JSON from file {}, ignoring", filePath); - LOGGER.error("full stack", e); + harvester.getLogger().error("Error processing JSON from file {}, ignoring", filePath); + harvester.getLogger().error("full stack", e); result.badFormat++; return; } catch (JDOMException e) { - LOGGER.error("Error transforming JSON into XML from file {}, ignoring", filePath); - LOGGER.error("full stack", e); + harvester.getLogger().error("Error transforming JSON into XML from file {}, ignoring", filePath); + harvester.getLogger().error("full stack", e); result.badFormat++; return; } catch (Exception e) { - LOGGER.error("Error retrieving JSON from file {}, ignoring", filePath); - LOGGER.error("full stack", e); + harvester.getLogger().error("Error retrieving JSON from file {}, ignoring", filePath); + harvester.getLogger().error("full stack", e); result.unretrievable++; return; } @@ -241,16 +236,16 @@ private void processXml(Path file) throws Exception { Element xml; try { - LOGGER.debug("reading file: {}", filePath); + harvester.getLogger().debug(String.format("reading file: %s", filePath)); xml = Xml.loadFile(file); } catch (JDOMException e) { - LOGGER.error("Error loading XML from file {}, ignoring", filePath); - LOGGER.error("full stack", e); + harvester.getLogger().error("Error loading XML from file {}, ignoring", filePath); + harvester.getLogger().error("full stack", e); result.badFormat++; return; } catch (Exception e) { - LOGGER.error("Error retrieving XML from file {}, ignoring", filePath); - LOGGER.error("full stack", e); + harvester.getLogger().error("Error retrieving XML from file {}, ignoring", filePath); + harvester.getLogger().error("full stack", e); result.unretrievable++; return; } @@ -266,7 +261,7 @@ private void processXmlData(Path file, Element rawXml) throws Exception { try { xml = Xml.transform(xml, thisXslt); } catch (Exception e) { - LOGGER.error("Cannot transform XML from file {}, ignoring. Error was: {}", filePath, e.getMessage()); + harvester.getLogger().error("Cannot transform XML from file {}, ignoring. Error was: {}", filePath, e.getMessage()); result.badFormat++; return; } @@ -288,7 +283,7 @@ private void processXmlData(Path file, Element rawXml) throws Exception { params.getValidate().validate(dataMan, context, xml, groupIdVal); } catch (Exception e) { - LOGGER.error("Cannot validate XML from file {}, ignoring. Error was: {}", filePath, e.getMessage()); + harvester.getLogger().error("Cannot validate XML from file {}, ignoring. Error was: {}", filePath, e.getMessage()); result.doesNotValidate++; return; } @@ -315,14 +310,14 @@ private void processXmlData(Path file, Element rawXml) throws Exception { updateMetadata(file, filePath, xml, schema, id, metadata, true); break; case RANDOM: - LOGGER.debug("Generating random uuid for remote record with uuid " + metadata.getUuid()); + harvester.getLogger().debug("Generating random uuid for remote record with uuid " + metadata.getUuid()); String createDate = getCreateDate(file, xml, schema, uuid); String newUuid = UUID.randomUUID().toString(); id = addMetadata(xml, schema, newUuid, createDate); break; case SKIP: - LOGGER.debug("Skipping record with uuid " + metadata.getUuid()); + harvester.getLogger().debug("Skipping record with uuid " + metadata.getUuid()); result.uuidSkipped++; result.unchangedMetadata++; @@ -351,7 +346,7 @@ private String getCreateDate(Path file, Element xml, String schema, String uuid) try { createDate = dataMan.extractDateModified(schema, xml); } catch (Exception ex) { - LOGGER.error("LocalFilesystemHarvester - addMetadata - can't get metadata modified date for metadata uuid= {} " + + harvester.getLogger().error("LocalFilesystemHarvester - addMetadata - can't get metadata modified date for metadata uuid= {} " + "using current date for modified date", uuid); createDate = new ISODate().toString(); } @@ -376,25 +371,25 @@ private void updateMetadata(Path file, Path filePath, Element xml, String schema String changeDate = new ISODate(fileDate.getTime(), false).getDateAndTime(); - LOGGER.debug(" File date is: {} / record date is: {}", filePath, modified); + harvester.getLogger().debug(" File date is: {} / record date is: {}", filePath, modified); if (DateUtils.truncate(recordDate, Calendar.SECOND) .before(DateUtils.truncate(fileDate, Calendar.SECOND))) { - LOGGER.debug(" Db record is older than file. Updating record with id: {}", id); + harvester.getLogger().debug(String.format(" Db record is older than file. Updating record with id: %s", id)); updateMedata(xml, id, changeDate, force); } else { - LOGGER.debug(" Db record is not older than last modified date of file. No need for update."); + harvester.getLogger().debug(" Db record is not older than last modified date of file. No need for update."); result.unchangedMetadata++; } } else { - LOGGER.debug(" updating existing metadata, id is: " + id); + harvester.getLogger().debug(" updating existing metadata, id is: " + id); String changeDate; try { changeDate = dataMan.extractDateModified(schema, xml); } catch (Exception ex) { - LOGGER.error("LocalFilesystemHarvester - updateMetadata - can't get metadata modified date for " + + harvester.getLogger().error("LocalFilesystemHarvester - updateMetadata - can't get metadata modified date for " + "metadata id= {}, using current date for modified date", id); changeDate = new ISODate().toString(); } @@ -406,7 +401,7 @@ private void updateMetadata(Path file, Path filePath, Element xml, String schema private void processMef(Path file) { Path filePath = file.toAbsolutePath().normalize(); - LOGGER.debug("reading file: {}", filePath); + harvester.getLogger().debug(String.format("reading file: %s", filePath)); try { String xsl = params.getImportXslt(); MEFLib.Version version = MEFLib.getMEFVersion(file); @@ -439,7 +434,7 @@ private void processMef(Path file) { params.getValidate() != NOVALIDATION, false, context, file); for (String id : ids) { - LOGGER.debug("Metadata imported from MEF: {}", id); + harvester.getLogger().debug(String.format("Metadata imported from MEF: %s", id)); context.getBean(MetadataRepository.class).update(Integer.valueOf(id), new Updater() { @Override public void apply(@Nonnull final Metadata metadata) { @@ -454,8 +449,8 @@ public void apply(@Nonnull final Metadata metadata) { result.addedMetadata++; } } catch (Exception e) { - LOGGER.error("Error retrieving MEF from file {}, ignoring", filePath); - LOGGER.error("Error: ", e); + harvester.getLogger().error("Error retrieving MEF from file {}, ignoring", filePath); + harvester.getLogger().error("Error: ", e); result.unretrievable++; } } @@ -465,26 +460,26 @@ private String getUuidFromFile(Element xml, Path filePath, String schema) { try { uuid = dataMan.extractUUID(schema, xml); } catch (Exception e) { - LOGGER.debug("Failed to extract metadata UUID for file {}" + + harvester.getLogger().debug("Failed to extract metadata UUID for file {}" + " using XSL extract-uuid. The record is probably " + "a subtemplate. Will check uuid attribute on root element.", filePath); // Extract UUID from uuid attribute in subtemplates String uuidAttribute = xml.getAttributeValue("uuid"); if (uuidAttribute != null) { - LOGGER.debug("Found uuid attribute {} for file {}.", uuidAttribute, filePath); + harvester.getLogger().debug("Found uuid attribute {} for file {}.", uuidAttribute, filePath); uuid = uuidAttribute; } else { // Assigning a new UUID uuid = UUID.randomUUID().toString(); - LOGGER.debug("No UUID found, the record will be assigned a random uuid {} for file {}.", uuid, filePath); + harvester.getLogger().debug("No UUID found, the record will be assigned a random uuid {} for file {}.", uuid, filePath); } } return uuid; } private String addMetadata(Element xml, String schema, String uuid, String createDate) throws Exception { - LOGGER.debug("adding new metadata"); + harvester.getLogger().debug("adding new metadata"); String id = harvester.addMetadata(xml, uuid, schema, localGroups, localCateg, createDate, aligner, false); listOfRecordsToIndex.add(Integer.valueOf(id)); result.addedMetadata++; diff --git a/web-ui/src/main/resources/catalog/components/admin/harvester/partials/privileges.html b/web-ui/src/main/resources/catalog/components/admin/harvester/partials/privileges.html index 768b2bece86..deff6842fe6 100644 --- a/web-ui/src/main/resources/catalog/components/admin/harvester/partials/privileges.html +++ b/web-ui/src/main/resources/catalog/components/admin/harvester/partials/privileges.html @@ -6,7 +6,7 @@