diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 40ddc29fa23..36dcfa3ff48 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -43,7 +43,7 @@ jobs: show-progress: 'false' - name: Setup Java JDK - uses: actions/setup-java@v4.1.0 + uses: actions/setup-java@v4.2.1 with: java-version: 11 # Java distribution. See the list of supported distributions in README file diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index c65fbd51148..0d596bc06df 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -33,9 +33,9 @@ jobs: if: ${{ github.event_name == 'pull_request' }} working-directory: docs/manual run: | - mike deploy --title "4.4 Latest" --alias-type=copy --update-aliases 4.4 latest + mike deploy --title "4.4" --alias-type=copy --update-aliases 4.4 latest - name: deploy latest docs to gh-pages branch if: ${{ github.event_name != 'pull_request' }} working-directory: docs/manual run: | - mike deploy --push --title "4.4 Latest" --alias-type=copy --update-aliases 4.4 latest + mike deploy --push --title "4.4" --alias-type=copy --update-aliases 4.4 latest diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml index cff43db21d2..4d1d6d27eb1 100644 --- a/.github/workflows/linux.yml +++ b/.github/workflows/linux.yml @@ -22,7 +22,7 @@ jobs: submodules: 'recursive' show-progress: 'false' - name: Set up JDK - uses: actions/setup-java@v4.1.0 + uses: actions/setup-java@v4.2.1 with: distribution: 'temurin' java-version: ${{ matrix.jdk }} @@ -37,7 +37,7 @@ jobs: - name: Set up Maven uses: stCarolas/setup-maven@v5 with: - maven-version: 3.6.3 + maven-version: 3.8.3 - name: Build with Maven run: | mvn -B -ntp -V install -DskipTests=true -Dmaven.javadoc.skip=true -Drelease -Pwith-doc @@ -55,7 +55,7 @@ jobs: submodules: 'recursive' show-progress: 'false' - name: Set up JDK - uses: actions/setup-java@v4.1.0 + uses: actions/setup-java@v4.2.1 with: distribution: 'temurin' java-version: 11 @@ -63,7 +63,7 @@ jobs: - name: Set up Maven uses: stCarolas/setup-maven@v5 with: - maven-version: 3.6.3 + maven-version: 3.8.3 - name: Test with maven run: | mvn -B resources:resources@copy-index-schema-to-source -f web diff --git a/.github/workflows/mvn-dep-tree.yml b/.github/workflows/mvn-dep-tree.yml index 0ac94031a83..75b02b2fc57 100644 --- a/.github/workflows/mvn-dep-tree.yml +++ b/.github/workflows/mvn-dep-tree.yml @@ -20,7 +20,7 @@ jobs: show-progress: 'false' - name: Setup Java JDK - uses: actions/setup-java@v4.1.0 + uses: actions/setup-java@v4.2.1 with: java-version: 11 # Java distribution. See the list of supported distributions in README file diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml new file mode 100644 index 00000000000..98cf13704c4 --- /dev/null +++ b/.github/workflows/scorecard.yml @@ -0,0 +1,72 @@ +# This workflow uses actions that are not certified by GitHub. They are provided +# by a third-party and are governed by separate terms of service, privacy +# policy, and support documentation. + +name: Scorecard supply-chain security +on: + # For Branch-Protection check. Only the default branch is supported. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection + branch_protection_rule: + # To guarantee Maintained check is occasionally updated. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained + schedule: + - cron: '26 10 * * 5' + push: + branches: [ "main" ] + +# Declare default permissions as read only. +permissions: read-all + +jobs: + analysis: + name: Scorecard analysis + runs-on: ubuntu-latest + permissions: + # Needed to upload the results to code-scanning dashboard. + security-events: write + # Needed to publish results and get a badge (see publish_results below). + id-token: write + # Uncomment the permissions below if installing in a private repository. + # contents: read + # actions: read + + steps: + - name: "Checkout code" + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # v3.1.0 + with: + persist-credentials: false + + - name: "Run analysis" + uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # v2.3.1 + with: + results_file: results.sarif + results_format: sarif + # (Optional) "write" PAT token. Uncomment the `repo_token` line below if: + # - you want to enable the Branch-Protection check on a *public* repository, or + # - you are installing Scorecard on a *private* repository + # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat. + # repo_token: ${{ secrets.SCORECARD_TOKEN }} + + # Public repositories: + # - Publish results to OpenSSF REST API for easy access by consumers + # - Allows the repository to include the Scorecard badge. + # - See https://github.com/ossf/scorecard-action#publishing-results. + # For private repositories: + # - `publish_results` will always be set to `false`, regardless + # of the value entered here. + publish_results: true + + # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF + # format to the repository Actions tab. + - name: "Upload artifact" + uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 + with: + name: SARIF file + path: results.sarif + retention-days: 5 + + # Upload the results to GitHub's code scanning dashboard. + - name: "Upload to code-scanning" + uses: github/codeql-action/upload-sarif@17573ee1cc1b9d061760f3a006fc4aac4f944fd5 # v2.2.4 + with: + sarif_file: results.sarif diff --git a/.github/workflows/sonarcloud.yml b/.github/workflows/sonarcloud.yml index 5788e6859bd..6f1285d3e23 100644 --- a/.github/workflows/sonarcloud.yml +++ b/.github/workflows/sonarcloud.yml @@ -19,7 +19,7 @@ jobs: submodules: 'recursive' show-progress: 'false' - name: Set up JDK 11 - uses: actions/setup-java@v4.1.0 + uses: actions/setup-java@v4.2.1 with: distribution: 'temurin' java-version: '11' @@ -34,7 +34,7 @@ jobs: run: mvn -B package -DskipTests - name: Set up JDK 21 # Sonarcloud analyzer needs at least JDK 17 - uses: actions/setup-java@v4.1.0 + uses: actions/setup-java@v4.2.1 with: distribution: 'temurin' java-version: '21' diff --git a/.gitignore b/.gitignore index 5bb1036a3a4..84f282d0af7 100644 --- a/.gitignore +++ b/.gitignore @@ -40,7 +40,12 @@ release/jetty/* schemas/*/doc/*/*.rst schematrons/.build target/ + +# build and release transifex/transifex-format/ +build/ +web-ui/LICENSE +web-ui/tx # web-app, clear using: mvn -f web/pom.xml clean:clean@reset @@ -59,11 +64,7 @@ web/src/main/webapp/META-INF/MANIFEST.MF web/src/main/webapp/WEB-INF/data/0* web/src/main/webapp/WEB-INF/data/config/encryptor.properties web/src/main/webapp/WEB-INF/data/config/index/records.json -web/src/main/webapp/WEB-INF/data/config/schema_plugins/*/schematron/schematron*.xsl -web/src/main/webapp/WEB-INF/data/config/schema_plugins/csw-record -web/src/main/webapp/WEB-INF/data/config/schema_plugins/dublin-core -web/src/main/webapp/WEB-INF/data/config/schema_plugins/iso19* -web/src/main/webapp/WEB-INF/data/config/schema_plugins/schemaplugin-uri-catalog.xml +web/src/main/webapp/WEB-INF/data/config/schema_plugins/* web/src/main/webapp/WEB-INF/data/config/schemaplugin-uri-catalog.xml web/src/main/webapp/WEB-INF/data/data/backup web/src/main/webapp/WEB-INF/data/data/metadata_data diff --git a/CITATION.cff b/CITATION.cff new file mode 100644 index 00000000000..1cdaa3768cf --- /dev/null +++ b/CITATION.cff @@ -0,0 +1,88 @@ +# This CITATION.cff file was generated with cffinit. +# Visit https://bit.ly/cffinit to generate yours today! + +cff-version: 1.2.0 +title: GeoNetwork opensource +message: >- + If you use this software, please cite it using the + metadata from this file. +type: software +authors: + - given-names: François + family-names: Prunayre + affiliation: Titellus + - given-names: Jose + family-names: García + affiliation: GeoCat BV + - given-names: Jeroen + family-names: Ticheler + affiliation: GeoCat BV + orcid: 'https://orcid.org/0009-0003-3896-0437' + email: jeroen.ticheler@geocat.net + - given-names: Florent + family-names: Gravin + affiliation: CamptoCamp + - given-names: Simon + family-names: Pigot + affiliation: CSIRO Australia + - name: GeoCat BV + address: Veenderweg 13 + city: Bennekom + country: NL + post-code: 6721 WD + tel: +31 (0) 318 416 664 + website: 'https://www.geocat.net/' + email: info@geocat.net + - name: Titellus + address: 321 Route de la Mollière + city: Saint Pierre de Genebroz + country: FR + post-code: 73360 + website: 'https://titellus.net/' + email: fx.prunayre@titellus.net + - name: CamptoCamp + address: QG Center Rte de la Chaux 4 + city: Bussigny + country: CH + post-code: 1030 + tel: +41 (21) 619 10 10 + website: 'https://camptocamp.com/' + email: info@camptocamp.com + - name: Open Source Geospatial Foundation - OSGeo + address: '9450 SW Gemini Dr. #42523' + location: Beaverton + region: Oregon + post-code: '97008' + country: US + email: info@osgeo.org + website: 'https://www.osgeo.org/' +repository-code: 'http://github.com/geonetwork/core-geonetwork' +url: 'https://geonetwork-opensource.org' +repository-artifact: >- + https://sourceforge.net/projects/geonetwork/files/GeoNetwork_opensource/ +abstract: >- + GeoNetwork is a catalog application to manage spatial and + non-spatial resources. It is compliant with critical + international standards from ISO, OGC and INSPIRE. It + provides powerful metadata editing and search functions as + well as an interactive web map viewer. +keywords: + - catalog + - gis + - sdi + - spatial data infrastructure + - dataspace + - search + - open data + - standards + - spatial + - CSW + - OGCAPI Records + - DCAT + - GeoDCAT-AP + - Catalog Service + - OGC + - open geospatial consortium + - osgeo + - open source geospatial foundation +license: GPL-2.0 diff --git a/README.md b/README.md index ed6c54d3596..65f57590c6a 100644 --- a/README.md +++ b/README.md @@ -30,3 +30,6 @@ Developer documentation located in ``README.md`` files in the code-base: * General documentation for the project as a whole is in this [README.md](README.md) * [Software Development Documentation](/software_development/) provides instructions for setting up a development environment, building GeoNetwork, compiling user documentation, and making a releases. * Module specific documentation can be found in each module: + +## Open Source Security Foundation (OpenSSF) best practices status +[![OpenSSF Best Practices](https://www.bestpractices.dev/projects/8626/badge)](https://www.bestpractices.dev/projects/8626) diff --git a/SECURITY.md b/SECURITY.md index fda55f12dad..8ca2726ee51 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -11,11 +11,11 @@ Each GeoNetwork release is supported with bug fixes for a limited period, with p - We recommend to update to latest incremental release as soon as possible to address security vulnerabilities. - Some overlap is provided when major versions are announced with both a current version and a maintenance version being made available to provide time for organizations to upgrade. -| Version | Supported | Comment | -|---------|--------------------|---------------------| -| 4.4.x | :white_check_mark: | Latest version | -| 4.2.x | :white_check_mark: | Stable version | -| 3.12.x | :white_check_mark: | Maintenance version | +| Version | Supported | Comment | +|---------|--------------------|---------------------------------------------| +| 4.4.x | :white_check_mark: | Latest version | +| 4.2.x | :white_check_mark: | Stable version | +| 3.12.x | ❌ | End Of Life 2024-03-31 | If your organisation is making use of a GeoNetwork version that is no longer in use by the community all is not lost. You can volunteer on the developer list to make additional releases, or engage with one of our [Commercial Support](https://www.osgeo.org/service-providers/?p=geonetwork) providers. diff --git a/add-schema.sh b/add-schema.sh index 2a268428530..4f1ecc8c92d 100755 --- a/add-schema.sh +++ b/add-schema.sh @@ -83,7 +83,7 @@ then ${insertLine} a\\ \ \\ \ org.geonetwork-opensource.schemas\\ -\ schema-${schema}\\ +\ gn-schema-${schema}\\ \ ${gnSchemasVersion}\\ \ SED_SCRIPT @@ -103,7 +103,7 @@ SED_SCRIPT \ \\ \ \\ \ org.geonetwork-opensource.schemas\\ -\ schema-${schema}\\ +\ gn-schema-${schema}\\ \ ${gnSchemasVersion}\\ \ \\ \ \\ @@ -121,7 +121,7 @@ SED_SCRIPT \ \\ \ \\ \ org.geonetwork-opensource.schemas\\ -\ schema-${schema}\\ +\ gn-schema-${schema}\\ \ zip\\ \ false\\ \ \$\{schema-plugins.dir\}\\ @@ -138,7 +138,7 @@ SED_SCRIPT fi # Add schema resources in service/pom.xml with test scope for unit tests -line=$(grep -n "schema-${schema}" services/pom.xml | cut -d: -f1) +line=$(grep -n "gn-schema-${schema}" services/pom.xml | cut -d: -f1) if [ ! $line ] then @@ -154,7 +154,7 @@ then ${finalLine} a\\ \ \\ \ ${projectGroupId}\\ -\ schema-${schema}\\ +\ gn-schema-${schema}\\ \ ${gnSchemasVersion}\\ \ test\\ \ diff --git a/cachingxslt/pom.xml b/cachingxslt/pom.xml index d654a3e9917..897ff970754 100644 --- a/cachingxslt/pom.xml +++ b/cachingxslt/pom.xml @@ -31,7 +31,7 @@ org.geonetwork-opensource geonetwork - 4.4.3-SNAPSHOT + 4.4.7-SNAPSHOT diff --git a/common/pom.xml b/common/pom.xml index 0dbdf0ab22d..2c2e9083ba9 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -31,7 +31,7 @@ org.geonetwork-opensource geonetwork - 4.4.3-SNAPSHOT + 4.4.7-SNAPSHOT diff --git a/common/src/main/java/org/fao/geonet/Logger.java b/common/src/main/java/org/fao/geonet/Logger.java index 920d13e548f..76590b8d698 100644 --- a/common/src/main/java/org/fao/geonet/Logger.java +++ b/common/src/main/java/org/fao/geonet/Logger.java @@ -23,8 +23,6 @@ package org.fao.geonet; -//============================================================================= - import org.apache.logging.log4j.core.appender.FileAppender; /** @@ -37,35 +35,52 @@ public interface Logger { * * @return check if debug logging is enabled */ - public boolean isDebugEnabled(); + boolean isDebugEnabled(); /** * Log debug message used indicate module troubleshoot module activity. * * @param message debug message used to provide in */ - public void debug(String message); + void debug(String message); + + void debug(String message, Throwable throwable); + + void debug(String message, Object... object); /** * Log information message indicating module progress. * * @param message information message indicating progress */ - public void info(String message); + void info(String message); + + void info(String message, Throwable throwable); - /** Log warning message indicating potentially harmful situation, module + void info(String message, Object... object); + + /** + * Log warning message indicating potentially harmful situation, module * will continue to try and complete current activity. * * @param message Warning message indicating potentially harmful situation */ - public void warning(String message); + void warning(String message); + + void warning(String message, Throwable throwable); + + void warning(String message, Object... object); /** * Log error message indicating module cannot continue current activity. * * @param message Error message */ - public void error(String message); + void error(String message); + + void error(String message, Throwable throwable); + + void error(String message, Object... object); /** * Log error message using provided throwable, indicating module cannot continue @@ -73,51 +88,49 @@ public interface Logger { * * @param ex Cause of error condition. */ - public void error(Throwable ex); + void error(Throwable ex); /** * Log severe message, indicating application cannot continue to operate. * * @param message severe message */ - public void fatal(String message); + void fatal(String message); /** * Functional module used for logging messages (for example {@code jeeves.engine}). * * @return functional module used for logging messages. */ - public String getModule(); + String getModule(); /** * Configure logger with log4j {@link FileAppender}, used for output. - * + *

* The file appender is also responsible for log file location provided by {@link #getFileAppender()}. * * @param fileAppender Log4j FileAppender */ - public void setAppender(FileAppender fileAppender); + void setAppender(FileAppender fileAppender); /** * The log file name from the file appender for this module. - * + *

* Note both module and fallback module are provided allowing providing a better opportunity * to learn the log file location. Harvesters use the log file name parent directory as a good * location to create {@code /harvester_logs/} folder. - * + *

* Built-in configuration uses log file location {@code logs/geonetwork.log} relative to the current directory, or relative to system property {@code log_file}. * * @return logfile location of {@code logs/geonetwork.log} file */ - public String getFileAppender(); + String getFileAppender(); /** * Access to omodule logging level, providing + * * @return */ - public org.apache.logging.log4j.Level getThreshold(); + org.apache.logging.log4j.Level getThreshold(); } - -//============================================================================= - diff --git a/common/src/main/java/org/fao/geonet/utils/Log.java b/common/src/main/java/org/fao/geonet/utils/Log.java index 094dfb4942e..df0269aaf14 100644 --- a/common/src/main/java/org/fao/geonet/utils/Log.java +++ b/common/src/main/java/org/fao/geonet/utils/Log.java @@ -24,22 +24,18 @@ package org.fao.geonet.utils; -import org.apache.log4j.Priority; import org.apache.log4j.bridge.AppenderWrapper; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.Appender; +import org.apache.logging.log4j.core.LoggerContext; import org.apache.logging.log4j.core.appender.FileAppender; import org.apache.logging.log4j.core.appender.RollingFileAppender; -import org.apache.logging.log4j.core.LoggerContext; import org.apache.logging.log4j.core.config.Configuration; import org.apache.logging.log4j.core.config.LoggerConfig; import java.io.File; -import java.util.Enumeration; - -//============================================================================= /** * Jeeves logging integration, defining functional logger categories by module @@ -125,8 +121,12 @@ public static void debug(String module, Object message) { LogManager.getLogger(module).debug(message); } - public static void debug(String module, Object message, Exception e) { - LogManager.getLogger(module).debug(message, e); + public static void debug(String module, String message, Object... objects) { + LogManager.getLogger(module).debug(message, objects); + } + + public static void debug(String module, String message, Throwable throwable) { + LogManager.getLogger(module).debug(message, throwable); } public static boolean isDebugEnabled(String module) { @@ -157,10 +157,15 @@ public static void info(String module, Object message) { LogManager.getLogger(module).info(message); } - public static void info(String module, Object message, Throwable t) { - LogManager.getLogger(module).info(message, t); + public static void info(String module, String message, Object... objects) { + LogManager.getLogger(module).info(message, objects); + } + + public static void info(String module, String message, Throwable throwable) { + LogManager.getLogger(module).info(message, throwable); } + //--------------------------------------------------------------------------- public static void warning(String module, Object message) { @@ -182,6 +187,14 @@ public static void error(String module, Object message, Throwable t) { LogManager.getLogger(module).error(message, t); } + public static void error(String module, String message, Object... objects) { + LogManager.getLogger(module).error(message, objects); + } + + public static void error(String module, String message, Throwable throwable) { + LogManager.getLogger(module).error(message, throwable); + } + //--------------------------------------------------------------------------- public static void fatal(String module, Object message) { @@ -225,18 +238,58 @@ public void debug(String message) { Log.debug(module, message); } + @Override + public void debug(String message, Throwable throwable) { + Log.debug(module, message, throwable); + } + + @Override + public void debug(String message, Object... object) { + Log.debug(module, message, object); + } + public void info(String message) { Log.info(module, message); } + @Override + public void info(String message, Throwable throwable) { + Log.info(module, message, throwable); + } + + @Override + public void info(String message, Object... object) { + Log.info(module, message, object); + } + public void warning(String message) { Log.warning(module, message); } + @Override + public void warning(String message, Throwable throwable) { + Log.warning(module, message, throwable); + } + + @Override + public void warning(String message, Object... object) { + + } + public void error(String message) { Log.error(module, message); } + @Override + public void error(String message, Throwable throwable) { + Log.error(module, message, throwable); + } + + @Override + public void error(String message, Object... object) { + Log.error(module, message, object); + } + public void fatal(String message) { Log.fatal(module, message); } @@ -279,7 +332,7 @@ public String getFileAppender() { } } LoggerConfig fallbackConfig = configuration.getLoggers().get(fallbackModule); - if( fallbackConfig != null) { + if (fallbackConfig != null) { for (Appender appender : fallbackConfig.getAppenders().values()) { File file = toLogFile(appender); if (file != null && file.exists()) { diff --git a/common/src/main/java/org/fao/geonet/utils/Xml.java b/common/src/main/java/org/fao/geonet/utils/Xml.java index c796fbfe4ec..9e0b20ee6fd 100644 --- a/common/src/main/java/org/fao/geonet/utils/Xml.java +++ b/common/src/main/java/org/fao/geonet/utils/Xml.java @@ -39,14 +39,7 @@ import org.fao.geonet.utils.nio.NioPathAwareEntityResolver; import org.fao.geonet.utils.nio.NioPathHolder; import org.fao.geonet.utils.nio.PathStreamSource; -import org.jdom.Attribute; -import org.jdom.Content; -import org.jdom.DocType; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.JDOMException; -import org.jdom.Namespace; -import org.jdom.Text; +import org.jdom.*; import org.jdom.filter.ElementFilter; import org.jdom.input.SAXBuilder; import org.jdom.output.Format; @@ -64,27 +57,14 @@ import javax.xml.XMLConstants; import javax.xml.bind.JAXBContext; import javax.xml.bind.Unmarshaller; -import javax.xml.transform.Result; -import javax.xml.transform.Source; -import javax.xml.transform.Transformer; -import javax.xml.transform.TransformerException; -import javax.xml.transform.TransformerFactory; -import javax.xml.transform.URIResolver; +import javax.xml.transform.*; import javax.xml.transform.sax.SAXResult; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import javax.xml.validation.Schema; import javax.xml.validation.SchemaFactory; import javax.xml.validation.ValidatorHandler; -import java.io.BufferedOutputStream; -import java.io.ByteArrayInputStream; -import java.io.DataInputStream; -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.PrintStream; -import java.io.StringReader; +import java.io.*; import java.net.HttpURLConnection; import java.net.URI; import java.net.URISyntaxException; @@ -99,14 +79,7 @@ import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.nio.file.Path; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; +import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -403,22 +376,16 @@ public static Element transform(Element xml, Path styleSheetPath, Map params, OutputStream out) throws Exception { StreamResult resStream = new StreamResult(out); - transform(xml, styleSheetPath, resStream, null); + transform(xml, styleSheetPath, resStream, params); out.flush(); } - - public static void transformXml(Element xml, Path styleSheetPath, OutputStream out) throws Exception { - StreamResult resStream = new StreamResult(out); - Map map = new HashMap<>(); - map.put("geonet-force-xml", "xml"); - transform(xml, styleSheetPath, resStream, map); - out.flush(); + public static void transform(Element xml, Path styleSheetPath, OutputStream out) throws Exception { + transform(xml, styleSheetPath, new HashMap<>(), out); } - //-------------------------------------------------------------------------- /** * Transforms an xml tree putting the result to a stream - no parameters. @@ -484,6 +451,9 @@ protected static Path resolvePath(Source s) throws URISyntaxException { /** * Transforms an xml tree putting the result to a stream with optional parameters. + *

+ * Add a geonet-force-xml parameter to force the formatting to be xml. + * The preferred method is to define it using xsl:output. */ public static void transform(Element xml, Path styleSheetPath, Result result, Map params) throws Exception { @@ -515,13 +485,13 @@ protected static Path resolvePath(Source s) throws URISyntaxException { t.setParameter(param.getKey(), param.getValue()); } - if (params.containsKey("geonet-force-xml")) { - ((Controller) t).setOutputProperty("indent", "yes"); - ((Controller) t).setOutputProperty("method", "xml"); - ((Controller) t).setOutputProperty("{http://saxon.sf.net/}indent-spaces", "3"); + if (params.containsKey("geonet-force-xml")) { + ((Controller) t).setOutputProperty("indent", "yes"); + ((Controller) t).setOutputProperty("method", "xml"); + ((Controller) t).setOutputProperty("{http://saxon.sf.net/}indent-spaces", "2"); + } } - } t.transform(srcXml, result); } } diff --git a/common/src/main/java/org/fao/geonet/utils/XmlRequest.java b/common/src/main/java/org/fao/geonet/utils/XmlRequest.java index 7b6a3b69c59..cba8608a556 100644 --- a/common/src/main/java/org/fao/geonet/utils/XmlRequest.java +++ b/common/src/main/java/org/fao/geonet/utils/XmlRequest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2016 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -124,13 +124,13 @@ protected final Element executeAndReadResponse(HttpRequestBase httpMethod) throw " -- Response Code: " + httpResponse.getRawStatusCode()); } - byte[] data = null; + byte[] data; try { data = IOUtils.toByteArray(httpResponse.getBody()); return Xml.loadStream(new ByteArrayInputStream(data)); } catch (JDOMException e) { - throw new BadXmlResponseEx("Response: '" + new String(data, "UTF8") + "' (from URI " + httpMethod.getURI() + ")"); + throw new BadXmlResponseEx("Invalid XML document from URI: " + httpMethod.getURI()); } finally { httpMethod.releaseConnection(); diff --git a/core/pom.xml b/core/pom.xml index f9a6f76215e..7b724db211c 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 @@ -552,7 +552,7 @@ org.owasp.esapi esapi - 2.4.0.0 + 2.5.4.0 log4j diff --git a/core/src/main/java/jeeves/server/context/BasicContext.java b/core/src/main/java/jeeves/server/context/BasicContext.java index da210ed0ecf..00d1769b4c6 100644 --- a/core/src/main/java/jeeves/server/context/BasicContext.java +++ b/core/src/main/java/jeeves/server/context/BasicContext.java @@ -143,21 +143,61 @@ public void debug(final String message) { logger.debug(message); } + @Override + public void debug(String message, Throwable throwable) { + logger.debug(message, throwable); + } + + @Override + public void debug(String message, Object... object) { + logger.debug(message, object); + } + @Override public void info(final String message) { logger.info(message); } + @Override + public void info(String message, Throwable throwable) { + logger.info(message, throwable); + } + + @Override + public void info(String message, Object... object) { + logger.info(message, object); + } + @Override public void warning(final String message) { logger.warning(message); } + @Override + public void warning(String message, Throwable throwable) { + logger.warning(message, throwable); + } + + @Override + public void warning(String message, Object... object) { + logger.warning(message, object); + } + @Override public void error(final String message) { logger.error(message); } + @Override + public void error(String message, Throwable throwable) { + logger.error(message, throwable); + } + + @Override + public void error(String message, Object... object) { + logger.error(message, object); + } + @Override public void error(Throwable ex) { logger.error(ex); @@ -200,6 +240,3 @@ public String getNodeId() { return NodeInfo.DEFAULT_NODE; } } - -//============================================================================= - diff --git a/core/src/main/java/org/fao/geonet/analytics/WebAnalyticsConfiguration.java b/core/src/main/java/org/fao/geonet/analytics/WebAnalyticsConfiguration.java new file mode 100644 index 00000000000..d9c2799076e --- /dev/null +++ b/core/src/main/java/org/fao/geonet/analytics/WebAnalyticsConfiguration.java @@ -0,0 +1,45 @@ +/* + * Copyright (C) 2001-2023 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.analytics; + +public class WebAnalyticsConfiguration { + private String service; + private String javascriptCode; + + public String getService() { + return service; + } + + public void setService(String service) { + this.service = service; + } + + public String getJavascriptCode() { + return javascriptCode; + } + + public void setJavascriptCode(String javascriptCode) { + this.javascriptCode = javascriptCode; + } +} diff --git a/core/src/main/java/org/fao/geonet/api/records/attachments/AbstractStore.java b/core/src/main/java/org/fao/geonet/api/records/attachments/AbstractStore.java index c2c20f8c898..168e4e2a63c 100644 --- a/core/src/main/java/org/fao/geonet/api/records/attachments/AbstractStore.java +++ b/core/src/main/java/org/fao/geonet/api/records/attachments/AbstractStore.java @@ -1,6 +1,6 @@ /* * ============================================================================= - * === Copyright (C) 2019 Food and Agriculture Organization of the + * === Copyright (C) 2024 Food and Agriculture Organization of the * === United Nations (FAO-UN), United Nations World Food Programme (WFP) * === and United Nations Environment Programme (UNEP) * === @@ -44,12 +44,16 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; +import java.util.Base64; import java.util.List; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; public abstract class AbstractStore implements Store { + protected static final String RESOURCE_MANAGEMENT_EXTERNAL_PROPERTIES_SEPARATOR = ":"; + protected static final String RESOURCE_MANAGEMENT_EXTERNAL_PROPERTIES_ESCAPED_SEPARATOR = "\\:"; + @Override public final List getResources(final ServiceContext context, final String metadataUuid, final Sort sort, final String filter) throws Exception { @@ -202,6 +206,12 @@ public String delResources(final ServiceContext context, final String metadataUu return delResources(context, metadataUuid, true); } + @Override + public String delResources(final ServiceContext context, final String metadataUuid, Boolean approved) throws Exception { + int metadataId = canEdit(context, metadataUuid, approved); + return delResources(context, metadataId); + } + @Override public String delResource(final ServiceContext context, final String metadataUuid, final String resourceId) throws Exception { return delResource(context, metadataUuid, resourceId, true); @@ -273,4 +283,28 @@ public String toString() { } }; } + + private String escapeResourceManagementExternalProperties(String value) { + return value.replace(RESOURCE_MANAGEMENT_EXTERNAL_PROPERTIES_SEPARATOR, RESOURCE_MANAGEMENT_EXTERNAL_PROPERTIES_ESCAPED_SEPARATOR); +} + + /** + * Create an encoded base 64 object id contains the following fields to uniquely identify the resource + * The fields are separated by a colon ":" + * @param type to identify type of storage - document/folder + * @param visibility of the resource public/private + * @param metadataId internal metadata id + * @param version identifier which can be used to directly get this version. + * @param resourceId or filename of the resource + * @return based 64 object id + */ + protected String getResourceManagementExternalPropertiesObjectId(final String type, final MetadataResourceVisibility visibility, final Integer metadataId, final String version, + final String resourceId) { + return Base64.getEncoder().encodeToString( + ((type + RESOURCE_MANAGEMENT_EXTERNAL_PROPERTIES_SEPARATOR + + escapeResourceManagementExternalProperties(visibility == null ? "" : visibility.toString().toLowerCase()) + RESOURCE_MANAGEMENT_EXTERNAL_PROPERTIES_SEPARATOR + + metadataId + RESOURCE_MANAGEMENT_EXTERNAL_PROPERTIES_SEPARATOR + + escapeResourceManagementExternalProperties(version == null ? "" : version) + RESOURCE_MANAGEMENT_EXTERNAL_PROPERTIES_SEPARATOR + + escapeResourceManagementExternalProperties(resourceId)).getBytes())); + } } diff --git a/core/src/main/java/org/fao/geonet/api/records/attachments/FilesystemStore.java b/core/src/main/java/org/fao/geonet/api/records/attachments/FilesystemStore.java index 4f2c16ec739..469bfc296ea 100644 --- a/core/src/main/java/org/fao/geonet/api/records/attachments/FilesystemStore.java +++ b/core/src/main/java/org/fao/geonet/api/records/attachments/FilesystemStore.java @@ -229,25 +229,31 @@ private Path getPath(ServiceContext context, int metadataId, MetadataResourceVis } @Override - public String delResources(ServiceContext context, String metadataUuid, Boolean approved) throws Exception { - int metadataId = canEdit(context, metadataUuid, approved); + public String delResources(ServiceContext context, int metadataId) throws Exception { Path metadataDir = Lib.resource.getMetadataDir(getDataDirectory(context), metadataId); try { + Log.info(Geonet.RESOURCES, String.format("Deleting all files from metadataId '%d'", metadataId)); IO.deleteFileOrDirectory(metadataDir, true); - return String.format("Metadata '%s' directory removed.", metadataId); + Log.info(Geonet.RESOURCES, + String.format("Metadata '%d' directory removed.", metadataId)); + return String.format("Metadata '%d' directory removed.", metadataId); } catch (Exception e) { - return String.format("Unable to remove metadata '%s' directory.", metadataId); + return String.format("Unable to remove metadata '%d' directory.", metadataId); } } @Override public String delResource(ServiceContext context, String metadataUuid, String resourceId, Boolean approved) throws Exception { - canEdit(context, metadataUuid, approved); + int metadataId = canEdit(context, metadataUuid, approved); try (ResourceHolder filePath = getResource(context, metadataUuid, resourceId, approved)) { Files.deleteIfExists(filePath.getPath()); - return String.format("MetadataResource '%s' removed.", resourceId); + Log.info(Geonet.RESOURCES, + String.format("Resource '%s' removed for metadata %d (%s).", resourceId, metadataId, metadataUuid)); + return String.format("Metadata resource '%s' removed.", resourceId); } catch (IOException e) { + Log.warning(Geonet.RESOURCES, + String.format("Unable to remove resource '%s' for metadata %d (%s). %s", resourceId, metadataId, metadataUuid, e.getMessage())); return String.format("Unable to remove resource '%s'.", resourceId); } } @@ -255,12 +261,16 @@ public String delResource(ServiceContext context, String metadataUuid, String re @Override public String delResource(final ServiceContext context, final String metadataUuid, final MetadataResourceVisibility visibility, final String resourceId, Boolean approved) throws Exception { - canEdit(context, metadataUuid, approved); + int metadataId = canEdit(context, metadataUuid, approved); try (ResourceHolder filePath = getResource(context, metadataUuid, visibility, resourceId, approved)) { Files.deleteIfExists(filePath.getPath()); - return String.format("MetadataResource '%s' removed.", resourceId); + Log.info(Geonet.RESOURCES, + String.format("Resource '%s' removed for metadata %d (%s).", resourceId, metadataId, metadataUuid)); + return String.format("Metadata resource '%s' removed.", resourceId); } catch (IOException e) { + Log.warning(Geonet.RESOURCES, + String.format("Unable to remove resource '%s' for metadata %d (%s). %s", resourceId, metadataId, metadataUuid, e.getMessage())); return String.format("Unable to remove resource '%s'.", resourceId); } } diff --git a/core/src/main/java/org/fao/geonet/api/records/attachments/ResourceLoggerStore.java b/core/src/main/java/org/fao/geonet/api/records/attachments/ResourceLoggerStore.java index 75876ab95ff..14d9e74ce56 100644 --- a/core/src/main/java/org/fao/geonet/api/records/attachments/ResourceLoggerStore.java +++ b/core/src/main/java/org/fao/geonet/api/records/attachments/ResourceLoggerStore.java @@ -125,6 +125,13 @@ public String delResources(ServiceContext context, String metadataUuid, Boolean return null; } + public String delResources(ServiceContext context, int metadataId) throws Exception { + if (decoratedStore != null) { + return decoratedStore.delResources(context, metadataId); + } + return null; + } + @Override public String delResource(ServiceContext context, String metadataUuid, String resourceId, Boolean approved) throws Exception { if (decoratedStore != null) { diff --git a/core/src/main/java/org/fao/geonet/api/records/attachments/Store.java b/core/src/main/java/org/fao/geonet/api/records/attachments/Store.java index 3e6ad03011a..41dc645b0d1 100644 --- a/core/src/main/java/org/fao/geonet/api/records/attachments/Store.java +++ b/core/src/main/java/org/fao/geonet/api/records/attachments/Store.java @@ -278,12 +278,24 @@ MetadataResource putResource(ServiceContext context, String metadataUuid, String /** * Delete all resources for a metadata * + * @deprecated it is possible that the metadata draft was deleted during the transaction. Use + * String delResources(ServiceContext context, int metadataId) throws Exception; instead. + * * @param context * @param metadataUuid The metadata UUID * @param approved Return the approved version or not */ + @Deprecated String delResources(ServiceContext context, String metadataUuid, Boolean approved) throws Exception; + /** + * Delete all resources for a metadata + * + * @param context + * @param metadataId The metadata ID + */ + String delResources(ServiceContext context, int metadataId) throws Exception; + /** * Delete a resource from the metadata store * diff --git a/core/src/main/java/org/fao/geonet/kernel/AllThesaurus.java b/core/src/main/java/org/fao/geonet/kernel/AllThesaurus.java index 971e1c072a0..361c7fc816b 100644 --- a/core/src/main/java/org/fao/geonet/kernel/AllThesaurus.java +++ b/core/src/main/java/org/fao/geonet/kernel/AllThesaurus.java @@ -27,15 +27,13 @@ import com.google.common.base.Function; import com.google.common.collect.Lists; import com.google.common.collect.Maps; - -import org.locationtech.jts.util.Assert; - import org.fao.geonet.Constants; import org.fao.geonet.constants.Geonet; import org.fao.geonet.exceptions.TermNotFoundException; import org.fao.geonet.kernel.search.keyword.KeywordRelation; import org.fao.geonet.languages.IsoLanguagesMapper; import org.fao.geonet.utils.Log; +import org.locationtech.jts.util.Assert; import org.openrdf.model.GraphException; import org.openrdf.model.URI; import org.openrdf.sesame.config.AccessDeniedException; @@ -46,6 +44,8 @@ import org.openrdf.sesame.repository.local.LocalRepository; import org.springframework.beans.factory.annotation.Autowired; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; @@ -59,9 +59,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; - /** * @author Jesse on 2/27/2015. */ @@ -221,8 +218,7 @@ public synchronized URI addElement(KeywordBean keyword) throws IOException, Acce } @Override - public synchronized Thesaurus removeElement(KeywordBean keyword) throws MalformedQueryException, QueryEvaluationException, - IOException, AccessDeniedException { + public synchronized Thesaurus removeElement(KeywordBean keyword) throws AccessDeniedException { throw new UnsupportedOperationException(); } @@ -237,8 +233,7 @@ public synchronized Thesaurus removeElement(String uri) throws AccessDeniedExcep } @Override - public synchronized URI updateElement(KeywordBean keyword, boolean replace) throws AccessDeniedException, IOException, - MalformedQueryException, QueryEvaluationException, GraphException { + public synchronized URI updateElement(KeywordBean keyword, boolean replace) throws AccessDeniedException { throw new UnsupportedOperationException(); } @@ -266,12 +261,12 @@ public Thesaurus updateCode(KeywordBean bean, String newcode) throws AccessDenie } @Override - public synchronized Thesaurus updateCode(String namespace, String oldcode, String newcode) throws AccessDeniedException, IOException { + public synchronized Thesaurus updateCode(String namespace, String oldcode, String newcode) throws AccessDeniedException { throw new UnsupportedOperationException(); } @Override - public synchronized Thesaurus updateCodeByURI(String olduri, String newuri) throws AccessDeniedException, IOException { + public synchronized Thesaurus updateCodeByURI(String olduri, String newuri) throws AccessDeniedException { throw new UnsupportedOperationException(); } @@ -287,8 +282,7 @@ public IsoLanguagesMapper getIsoLanguageMapper() { } @Override - public synchronized void addRelation(String subject, KeywordRelation related, String relatedSubject) throws AccessDeniedException, - IOException, MalformedQueryException, QueryEvaluationException, GraphException { + public synchronized void addRelation(String subject, KeywordRelation related, String relatedSubject) throws AccessDeniedException { throw new UnsupportedOperationException(); } diff --git a/core/src/main/java/org/fao/geonet/kernel/EditLib.java b/core/src/main/java/org/fao/geonet/kernel/EditLib.java index ae70dcd5e62..873b9c3bcdf 100644 --- a/core/src/main/java/org/fao/geonet/kernel/EditLib.java +++ b/core/src/main/java/org/fao/geonet/kernel/EditLib.java @@ -33,18 +33,10 @@ import java.io.IOException; import java.io.StringReader; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.BitSet; -import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; -import java.util.Vector; +import java.util.*; import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; +import java.util.stream.Stream; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.jxpath.ri.parser.Token; @@ -272,7 +264,7 @@ private void addChildToParent(MetadataSchema mdSchema, Element targetElement, El // remove everything and then, depending on removeExisting // readd all children to the element and assure a correct position for the new one: at the end of the others // or just add the new one - List existingAllType = new ArrayList(targetElement.getChildren()); + List existingAllType = new ArrayList(targetElement.getChildren()); targetElement.removeContent(); for (String singleType: type.getAlElements()) { List existingForThisType = filterOnQname(existingAllType, singleType); @@ -283,9 +275,22 @@ private void addChildToParent(MetadataSchema mdSchema, Element targetElement, El LOGGER_ADD_ELEMENT.debug("#### - add child {}", existingChild.toString()); } } - if (qname.equals(singleType)) + if (qname.equals(singleType)) { targetElement.addContent(childToAdd); + } + + filterOnQname(existingAllType, "geonet:child") + .stream() + .filter(gnChild -> (gnChild.getAttributeValue("prefix") + ":" + gnChild.getAttributeValue("name")).equals(singleType)) + .findFirst() + .ifPresent(targetElement::addContent); } + + Stream.concat( + filterOnQname(existingAllType, "geonet:element").stream(), + filterOnQname(existingAllType, "geonet:attribute").stream() + ).forEach(targetElement::addContent); + } public void addXMLFragments(String schema, Element md, Map xmlInputs) throws Exception { diff --git a/core/src/main/java/org/fao/geonet/kernel/GeonetworkDataDirectory.java b/core/src/main/java/org/fao/geonet/kernel/GeonetworkDataDirectory.java index 86a0cdca444..cc5296232bd 100644 --- a/core/src/main/java/org/fao/geonet/kernel/GeonetworkDataDirectory.java +++ b/core/src/main/java/org/fao/geonet/kernel/GeonetworkDataDirectory.java @@ -27,8 +27,11 @@ import jeeves.server.sources.http.JeevesServlet; import org.fao.geonet.ApplicationContextHolder; import org.fao.geonet.constants.Geonet; +import org.fao.geonet.exceptions.BadParameterEx; +import org.fao.geonet.utils.FilePathChecker; import org.fao.geonet.utils.IO; import org.fao.geonet.utils.Log; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationEvent; import org.springframework.context.ConfigurableApplicationContext; @@ -63,6 +66,9 @@ public class GeonetworkDataDirectory { */ public static final String GEONETWORK_BEAN_KEY = "GeonetworkDataDirectory"; + @Autowired + SchemaManager schemaManager; + private Path webappDir; private Path systemDataDir; private Path indexConfigDir; @@ -797,11 +803,18 @@ public Path getXsltConversion(String conversionId) { if (conversionId.startsWith(IMPORT_STYLESHEETS_SCHEMA_PREFIX)) { String[] pathToken = conversionId.split(":"); if (pathToken.length == 3) { + String schema = pathToken[1]; + if (!schemaManager.existsSchema(schema)) { + throw new BadParameterEx(String.format( + "Conversion not found. Schema '%s' is not registered in this catalog.", schema)); + } + FilePathChecker.verify(pathToken[2]); return this.getSchemaPluginsDir() .resolve(pathToken[1]) .resolve(pathToken[2] + ".xsl"); } } else { + FilePathChecker.verify(conversionId); return this.getWebappDir().resolve(Geonet.Path.IMPORT_STYLESHEETS). resolve(conversionId + ".xsl"); } diff --git a/core/src/main/java/org/fao/geonet/kernel/SchemaManager.java b/core/src/main/java/org/fao/geonet/kernel/SchemaManager.java index 14e09a62b12..18742c86494 100644 --- a/core/src/main/java/org/fao/geonet/kernel/SchemaManager.java +++ b/core/src/main/java/org/fao/geonet/kernel/SchemaManager.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2023 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -76,6 +76,7 @@ import java.util.Map; import java.util.Set; import java.util.regex.Pattern; +import java.util.stream.Collectors; /** * Class that handles all functions relating to metadata schemas. This includes @@ -106,6 +107,7 @@ public class SchemaManager { private static int activeWriters = 0; private Map hmSchemas = new HashMap<>(); private Map hmSchemasTypenames = new HashMap<>(); + private Map cswOutputSchemas = new HashMap<>(); private String[] fnames = {"labels.xml", "codelists.xml", "strings.xml"}; private Path schemaPluginsDir; private Path schemaPluginsCat; @@ -958,6 +960,7 @@ private void addSchema(ApplicationContext applicationContext, Path schemaDir, El if (mds.getSchemaPlugin() != null && mds.getSchemaPlugin().getCswTypeNames() != null) { hmSchemasTypenames.putAll(mds.getSchemaPlugin().getCswTypeNames()); + cswOutputSchemas.putAll(mds.getSchemaPlugin().getOutputSchemas()); } // -- add cached xml files (schema codelists and label files) @@ -1383,7 +1386,6 @@ private void checkAppSupported(Element schemaPluginCatRoot) throws Exception { " requires max Geonetwork version: " + majorAppVersionSupported + ", current is: " + version + ". Skip load schema."); removes.add(schemaInfo.getKey()); - continue; } } @@ -1901,7 +1903,7 @@ public boolean accept(Path entry) throws IOException { try (DirectoryStream schemaplugins = Files.newDirectoryStream(schemaPluginDir, xsdFilter)) { boolean missingXsdFiles = true; for (Path schemaplugin : schemaplugins) { - IO.copyDirectoryOrFile(schemaplugin, webAppDirSchemaXSD.resolve(schemaplugin), false); + IO.copyDirectoryOrFile(schemaplugin, webAppDirSchemaXSD.resolve(schemaplugin.getFileName()), false); missingXsdFiles = false; } @@ -1926,17 +1928,17 @@ public Map getHmSchemasTypenames() { } /** - * Return the list of namespace URI of all typenames declared in all schema plugins. + * Return the list of outputSchema declared in all schema plugins. + */ + public Map getOutputSchemas() { + return cswOutputSchemas; + } + + /** + * Return the list of namespace URI of all outputSchema declared in all schema plugins. */ public List getListOfOutputSchemaURI() { - Iterator iterator = hmSchemasTypenames.keySet().iterator(); - List listOfSchemaURI = new ArrayList<>(); - while (iterator.hasNext()) { - String typeLocalName = iterator.next(); - Namespace ns = hmSchemasTypenames.get(typeLocalName); - listOfSchemaURI.add(ns.getURI()); - } - return listOfSchemaURI; + return new ArrayList<>(cswOutputSchemas.values()); } /** diff --git a/core/src/main/java/org/fao/geonet/kernel/Thesaurus.java b/core/src/main/java/org/fao/geonet/kernel/Thesaurus.java index d9937ed8958..a9f2d57230f 100644 --- a/core/src/main/java/org/fao/geonet/kernel/Thesaurus.java +++ b/core/src/main/java/org/fao/geonet/kernel/Thesaurus.java @@ -76,6 +76,7 @@ public class Thesaurus { private static final String DEFAULT_THESAURUS_NAMESPACE = "http://custom.shared.obj.ch/concept#"; private static final String RDF_NAMESPACE = "http://www.w3.org/1999/02/22-rdf-syntax-ns#"; + private static final String RDF_SCHEMA_NAMESPACE = "http://www.w3.org/2000/01/rdf-schema#"; private static final String SKOS_NAMESPACE = "http://www.w3.org/2004/02/skos/core#"; @@ -120,7 +121,7 @@ public class Thesaurus { // map of lang -> dictionary of values // key is a dublinCore element (i.e. https://guides.library.ucsc.edu/c.php?g=618773&p=4306386) // see #retrieveDublinCore() for example - private Map> dublinCoreMultilingual = new Hashtable<>(); + private Map> dublinCoreMultilingual = new Hashtable<>(); private Cache THESAURUS_SEARCH_CACHE; @@ -132,14 +133,15 @@ protected Thesaurus() { } /** - * @param fname file name - * @param dname category/domain name of thesaurus + * @param fname file name + * @param dname category/domain name of thesaurus * @param thesaurusCacheMaxSize */ public Thesaurus(IsoLanguagesMapper isoLanguageMapper, String fname, String type, String dname, Path thesaurusFile, String siteUrl, int thesaurusCacheMaxSize) { this(isoLanguageMapper, fname, null, null, type, dname, thesaurusFile, siteUrl, false, thesaurusCacheMaxSize); } + public Thesaurus(IsoLanguagesMapper isoLanguageMapper, String fname, String tname, String tnamespace, String type, String dname, Path thesaurusFile, String siteUrl, boolean ignoreMissingError, int thesaurusCacheMaxSize) { this(isoLanguageMapper, fname, null, null, null, type, dname, thesaurusFile, siteUrl, false, thesaurusCacheMaxSize); } @@ -151,9 +153,9 @@ public Thesaurus(IsoLanguagesMapper isoLanguageMapper, String fname, super(); THESAURUS_SEARCH_CACHE = CacheBuilder.newBuilder() - .maximumSize(thesaurusCacheMaxSize) - .expireAfterAccess(25, TimeUnit.HOURS) - .build(); + .maximumSize(thesaurusCacheMaxSize) + .expireAfterAccess(25, TimeUnit.HOURS) + .build(); this.isoLanguageMapper = isoLanguageMapper; this.fname = fname; @@ -192,7 +194,6 @@ public Thesaurus(IsoLanguagesMapper isoLanguageMapper, String fname, } /** - * * @param fname * @param type * @param dname @@ -210,7 +211,7 @@ public Map getMultilingualTitles() { return Collections.unmodifiableMap(this.multilingualTitles); } - public Map> getDublinCoreMultilingual() { + public Map> getDublinCoreMultilingual() { return Collections.unmodifiableMap(this.dublinCoreMultilingual); } @@ -331,7 +332,7 @@ public synchronized Thesaurus initRepository() throws ConfigurationException, IO SailConfig syncSail = new SailConfig("org.openrdf.sesame.sailimpl.sync.SyncRdfSchemaRepository"); SailConfig memSail = new org.openrdf.sesame.sailimpl.memory.RdfSchemaRepositoryConfig(getFile().toString(), - RDFFormat.RDFXML); + RDFFormat.RDFXML); repConfig.addSail(syncSail); repConfig.addSail(memSail); repConfig.setWorldReadable(true); @@ -343,7 +344,7 @@ public synchronized Thesaurus initRepository() throws ConfigurationException, IO } public synchronized QueryResultsTable performRequest(String query) throws IOException, MalformedQueryException, - QueryEvaluationException, AccessDeniedException { + QueryEvaluationException, AccessDeniedException { if (Log.isDebugEnabled(Geonet.THESAURUS)) Log.debug(Geonet.THESAURUS, "Query : " + query); @@ -353,14 +354,15 @@ public synchronized QueryResultsTable performRequest(String query) throws IOExce public boolean hasConceptScheme(String uri) { String query = "SELECT conceptScheme" - + " FROM {conceptScheme} rdf:type {skos:ConceptScheme}" - + " WHERE conceptScheme = <" + uri + ">" - + " USING NAMESPACE skos = "; + + " FROM {conceptScheme} rdf:type {skos:ConceptScheme}" + + " WHERE conceptScheme = <" + uri + ">" + + " USING NAMESPACE skos = "; try { return performRequest(query).getRowCount() > 0; } catch (Exception e) { - Log.error(Geonet.THESAURUS_MAN, "Error retrieving concept scheme for " + thesaurusFile + ". Error is: " + e.getMessage()); + Log.error(Geonet.THESAURUS_MAN, + String.format("Error retrieving concept scheme for %s. Error is: %s", thesaurusFile, e.getMessage())); throw new RuntimeException(e); } } @@ -368,8 +370,8 @@ public boolean hasConceptScheme(String uri) { public List getConceptSchemes() { String query = "SELECT conceptScheme" - + " FROM {conceptScheme} rdf:type {skos:ConceptScheme}" - + " USING NAMESPACE skos = "; + + " FROM {conceptScheme} rdf:type {skos:ConceptScheme}" + + " USING NAMESPACE skos = "; try { List ret = new ArrayList<>(); @@ -380,7 +382,8 @@ public List getConceptSchemes() { } return ret; } catch (Exception e) { - Log.error(Geonet.THESAURUS_MAN, "Error retrieving concept schemes for " + thesaurusFile + ". Error is: " + e.getMessage()); + Log.error(Geonet.THESAURUS_MAN, String.format( + "Error retrieving concept schemes for %s. Error is: %s", thesaurusFile, e.getMessage())); return Collections.emptyList(); } } @@ -403,34 +406,28 @@ public synchronized URI addElement(KeywordBean keyword) throws IOException, Acce URI mySubject = myFactory.createURI(keyword.getUriCode()); URI skosClass = myFactory.createURI(SKOS_NAMESPACE, "Concept"); + URI rdfType = myFactory.createURI(org.openrdf.vocabulary.RDF.TYPE); + mySubject.addProperty(rdfType, skosClass); + URI predicatePrefLabel = myFactory - .createURI(SKOS_NAMESPACE, "prefLabel"); + .createURI(SKOS_NAMESPACE, "prefLabel"); URI predicateScopeNote = myFactory - .createURI(SKOS_NAMESPACE, "scopeNote"); - - URI predicateBoundedBy = myFactory.createURI(namespaceGml, "BoundedBy"); - URI predicateEnvelope = myFactory.createURI(namespaceGml, "Envelope"); - URI predicateSrsName = myFactory.createURI(namespaceGml, "srsName"); - URI srsNameURI = myFactory - .createURI("http://www.opengis.net/gml/srs/epsg.xml#epsg:4326"); - BNode gmlNode = myFactory.createBNode(); - URI predicateLowerCorner = myFactory.createURI(namespaceGml, - "lowerCorner"); - URI predicateUpperCorner = myFactory.createURI(namespaceGml, - "upperCorner"); - - Literal lowerCorner = myFactory.createLiteral(keyword.getCoordWest() + " " + keyword.getCoordSouth()); - Literal upperCorner = myFactory.createLiteral(keyword.getCoordEast() + " " + keyword.getCoordNorth()); + .createURI(SKOS_NAMESPACE, "scopeNote"); + + URI predicateInScheme = myFactory + .createURI(SKOS_NAMESPACE, "inScheme"); + myGraph.add(mySubject, + predicateInScheme, + myFactory.createURI(this.getDefaultNamespace())); - mySubject.addProperty(rdfType, skosClass); Set> values = keyword.getValues().entrySet(); for (Entry entry : values) { String language = toiso639_1_Lang(entry.getKey()); Value valueObj = myFactory.createLiteral(entry.getValue(), language); myGraph.add(mySubject, predicatePrefLabel, valueObj); - } + Set> definitions = keyword.getDefinitions().entrySet(); for (Entry entry : definitions) { String language = toiso639_1_Lang(entry.getKey()); @@ -438,12 +435,29 @@ public synchronized URI addElement(KeywordBean keyword) throws IOException, Acce myGraph.add(mySubject, predicateScopeNote, definitionObj); } - myGraph.add(mySubject, predicateBoundedBy, gmlNode); - gmlNode.addProperty(rdfType, predicateEnvelope); - myGraph.add(gmlNode, predicateLowerCorner, lowerCorner); - myGraph.add(gmlNode, predicateUpperCorner, upperCorner); - myGraph.add(gmlNode, predicateSrsName, srsNameURI); + if (!(keyword.getCoordEast() + keyword.getCoordNorth() + keyword.getCoordWest() + keyword.getCoordSouth()).trim().isEmpty()) { + URI predicateBoundedBy = myFactory.createURI(namespaceGml, "BoundedBy"); + URI predicateEnvelope = myFactory.createURI(namespaceGml, "Envelope"); + URI predicateSrsName = myFactory.createURI(namespaceGml, "srsName"); + URI srsNameURI = myFactory + .createURI("http://www.opengis.net/gml/srs/epsg.xml#epsg:4326"); + BNode gmlNode = myFactory.createBNode(); + URI predicateLowerCorner = myFactory.createURI(namespaceGml, + "lowerCorner"); + URI predicateUpperCorner = myFactory.createURI(namespaceGml, + "upperCorner"); + + Literal lowerCorner = myFactory.createLiteral(keyword.getCoordWest() + " " + keyword.getCoordSouth()); + Literal upperCorner = myFactory.createLiteral(keyword.getCoordEast() + " " + keyword.getCoordNorth()); + + myGraph.add(mySubject, predicateBoundedBy, gmlNode); + + gmlNode.addProperty(rdfType, predicateEnvelope); + myGraph.add(gmlNode, predicateLowerCorner, lowerCorner); + myGraph.add(gmlNode, predicateUpperCorner, upperCorner); + myGraph.add(gmlNode, predicateSrsName, srsNameURI); + } repository.addGraph(myGraph); return mySubject; @@ -452,8 +466,7 @@ public synchronized URI addElement(KeywordBean keyword) throws IOException, Acce /** * Remove keyword from thesaurus. */ - public synchronized Thesaurus removeElement(KeywordBean keyword) throws MalformedQueryException, - QueryEvaluationException, IOException, AccessDeniedException { + public synchronized Thesaurus removeElement(KeywordBean keyword) throws AccessDeniedException { String namespace = keyword.getNameSpaceCode(); String code = keyword.getRelativeCode(); @@ -483,7 +496,7 @@ public synchronized Thesaurus removeElement(String uri) throws AccessDeniedExcep } private Thesaurus removeElement(Graph myGraph, URI subject) - throws AccessDeniedException { + throws AccessDeniedException { StatementIterator iter = myGraph.getStatements(subject, null, null); while (iter.hasNext()) { AtomicReference st = new AtomicReference(iter.next()); @@ -502,8 +515,8 @@ private Thesaurus removeElement(Graph myGraph, URI subject) private String toiso639_1_Lang(String lang) { String defaultCode = getIsoLanguageMapper().iso639_2_to_iso639_1( - Geonet.DEFAULT_LANGUAGE, - Geonet.DEFAULT_LANGUAGE.substring(0, 2)); + Geonet.DEFAULT_LANGUAGE, + Geonet.DEFAULT_LANGUAGE.substring(0, 2)); return getIsoLanguageMapper().iso639_2_to_iso639_1(lang, defaultCode); } @@ -518,8 +531,7 @@ private String toiso639_1_Lang(String lang) { * languages) and the coordinates will only be updated if they are non-empty * strings. */ - public synchronized URI updateElement(KeywordBean keyword, boolean replace) throws AccessDeniedException, IOException, - MalformedQueryException, QueryEvaluationException, GraphException { + public synchronized URI updateElement(KeywordBean keyword, boolean replace) throws AccessDeniedException { THESAURUS_SEARCH_CACHE.invalidateAll(); // Get thesaurus graph @@ -547,15 +559,14 @@ public synchronized URI updateElement(KeywordBean keyword, boolean replace) thro String language = toiso639_1_Lang(entry.getKey()); Value valueObj = myFactory.createLiteral(entry.getValue(), language); myGraph.add(subject, predicatePrefLabel, valueObj); - } + // add updated Definitions/Notes Set> definitions = keyword.getDefinitions().entrySet(); for (Entry entry : definitions) { String language = toiso639_1_Lang(entry.getKey()); Value definitionObj = myFactory.createLiteral(entry.getValue(), language); myGraph.add(subject, predicateScopeNote, definitionObj); - } // update bbox @@ -661,7 +672,7 @@ public Thesaurus updateCode(KeywordBean bean, String newcode) throws AccessDenie * Update concept code by creating URI from namespace and code. This is recommended when * thesaurus concept identifiers contains # eg. http://vocab.nerc.ac.uk/collection/P07/current#CFV13N44 */ - public synchronized Thesaurus updateCode(String namespace, String oldcode, String newcode) throws AccessDeniedException, IOException { + public synchronized Thesaurus updateCode(String namespace, String oldcode, String newcode) throws AccessDeniedException { Graph myGraph = repository.getGraph(); ValueFactory myFactory = myGraph.getValueFactory(); @@ -676,10 +687,10 @@ public synchronized Thesaurus updateCode(String namespace, String oldcode, Strin /** * Update concept code using its URI. This is recommended when concept identifier may not be * based on thesaurus namespace and does not contains #. - * + *

* eg. http://vocab.nerc.ac.uk/collection/P07/current/CFV13N44/ */ - public synchronized Thesaurus updateCodeByURI(String olduri, String newuri) throws AccessDeniedException, IOException { + public synchronized Thesaurus updateCodeByURI(String olduri, String newuri) throws AccessDeniedException { Graph myGraph = repository.getGraph(); ValueFactory myFactory = myGraph.getValueFactory(); @@ -728,13 +739,13 @@ public void createConceptScheme(String thesaurusTitle, Graph myGraph = new org.openrdf.model.impl.GraphImpl(); writeConceptScheme(myGraph, - thesaurusTitle, - multilingualTitles, - thesaurusDescription, - multilingualDescriptions, - identifier, - type, - namespace); + thesaurusTitle, + multilingualTitles, + thesaurusDescription, + multilingualDescriptions, + identifier, + type, + namespace); repository.addGraph(myGraph); } @@ -754,13 +765,13 @@ public void updateConceptScheme(String thesaurusTitle, removeElement(getConceptSchemes().get(0)); writeConceptScheme(myGraph, - thesaurusTitle, - multilingualTitles, - thesaurusDescription, - multilingualDescriptions, - identifier, - type, - namespace); + thesaurusTitle, + multilingualTitles, + thesaurusDescription, + multilingualDescriptions, + identifier, + type, + namespace); } public void writeConceptScheme(Graph myGraph, String thesaurusTitle, @@ -822,9 +833,6 @@ public void writeConceptScheme(Graph myGraph, String thesaurusTitle, } - - - private void addElement(String name, String value, Graph myGraph, ValueFactory myFactory, URI mySubject) { if (StringUtils.isNotEmpty(value)) { URI uri = myFactory.createURI(DC_NAMESPACE, name); @@ -860,22 +868,22 @@ private void addElement(String name, String value, Graph myGraph, ValueFactory m private void retrieveDublinCore(Element thesaurusEl) { List theNSs = getThesaurusNamespaces(); - Namespace xmlNS = Namespace.getNamespace("xml","http://www.w3.org/XML/1998/namespace"); + Namespace xmlNS = Namespace.getNamespace("xml", "http://www.w3.org/XML/1998/namespace"); try { List multiLingualTitles = (List) Xml.selectNodes(thesaurusEl, - "skos:ConceptScheme/dc:*[@xml:lang]|skos:ConceptScheme/dcterms:*[@xml:lang]", theNSs); + "skos:ConceptScheme/dc:*[@xml:lang]|skos:ConceptScheme/dcterms:*[@xml:lang]", theNSs); dublinCoreMultilingual.clear(); - for (Element el: multiLingualTitles) { + for (Element el : multiLingualTitles) { String lang = isoLanguageMapper.iso639_2_to_iso639_1(el.getAttribute("lang", xmlNS).getValue()); String value = el.getTextTrim(); String name = el.getName(); if (!dublinCoreMultilingual.containsKey(lang)) { - dublinCoreMultilingual.put(lang,new HashMap<>()); + dublinCoreMultilingual.put(lang, new HashMap<>()); } - dublinCoreMultilingual.get(lang).put(name,value); + dublinCoreMultilingual.get(lang).put(name, value); } } catch (Exception e) { - Log.warning(Geonet.THESAURUS,"error extracting multilingual dublin core items from thesaurus",e); + Log.warning(Geonet.THESAURUS, "error extracting multilingual dublin core items from thesaurus", e); } } @@ -894,11 +902,15 @@ private void retrieveDublinCore(Element thesaurusEl) { // } private void retrieveMultiLingualTitles(Element thesaurusEl) { try { - String xpathTitles = "skos:ConceptScheme/dc:title[@xml:lang]|skos:ConceptScheme/dcterms:title[@xml:lang]|rdf:Description[rdf:type/@rdf:resource = 'http://www.w3.org/2004/02/skos/core#ConceptScheme']/dc:title[@xml:lang]"; + String xpathTitles = "skos:ConceptScheme/dc:title[@xml:lang]" + + "|skos:ConceptScheme/dcterms:title[@xml:lang]" + + "|skos:ConceptScheme/rdfs:label[@xml:lang]" + + "|skos:ConceptScheme/skos:prefLabel[@xml:lang]" + + "|rdf:Description[rdf:type/@rdf:resource = 'http://www.w3.org/2004/02/skos/core#ConceptScheme']/dc:title[@xml:lang]"; multilingualTitles.clear(); multilingualTitles.putAll(retrieveMultilingualField(thesaurusEl, xpathTitles)); } catch (Exception e) { - Log.warning(Geonet.THESAURUS,"error extracting multilingual titles from thesaurus",e); + Log.warning(Geonet.THESAURUS, "error extracting multilingual titles from thesaurus", e); } } @@ -908,19 +920,19 @@ private void retrieveMultiLingualDescriptions(Element thesaurusEl) { multilingualDescriptions.clear(); multilingualDescriptions.putAll(retrieveMultilingualField(thesaurusEl, xpathDescriptions)); } catch (Exception e) { - Log.warning(Geonet.THESAURUS,"error extracting multilingual descriptions from thesaurus",e); + Log.warning(Geonet.THESAURUS, "error extracting multilingual descriptions from thesaurus", e); } } private Map retrieveMultilingualField(Element thesaurusEl, String xpath) throws JDOMException { List theNSs = getThesaurusNamespaces(); - Namespace xmlNS = Namespace.getNamespace("xml","http://www.w3.org/XML/1998/namespace"); + Namespace xmlNS = Namespace.getNamespace("xml", "http://www.w3.org/XML/1998/namespace"); Map multilingualValues = new HashMap<>(); List multilingualValuesEl = (List) Xml.selectNodes(thesaurusEl, - xpath, theNSs); - for (Element el: multilingualValuesEl) { + xpath, theNSs); + for (Element el : multilingualValuesEl) { String lang = isoLanguageMapper.iso639_2_to_iso639_1(el.getAttribute("lang", xmlNS).getValue()); String titleValue = el.getTextTrim(); multilingualValues.put(lang, titleValue); @@ -931,7 +943,7 @@ private Map retrieveMultilingualField(Element thesaurusEl, Strin /** * Retrieves the thesaurus information from rdf file. - * + *

* Used to set the thesaurusName and thesaurusDate for keywords. */ private void retrieveThesaurusInformation(Path thesaurusFile, String defaultTitle, boolean ignoreMissingError) { @@ -944,34 +956,32 @@ private void retrieveThesaurusInformation(Path thesaurusFile, String defaultTitl try { Element thesaurusEl = Xml.loadFile(thesaurusFile); - List theNSs = new ArrayList<>(); - Namespace rdfNamespace = Namespace.getNamespace("rdf", RDF_NAMESPACE); - theNSs.add(rdfNamespace); - theNSs.add(Namespace.getNamespace("skos", SKOS_NAMESPACE)); - theNSs.add(Namespace.getNamespace("dc", DC_NAMESPACE)); - theNSs.add(Namespace.getNamespace("dcterms", DCTERMS_NAMESPACE)); + List theNSs = getThesaurusNamespaces(); this.defaultNamespace = null; retrieveMultiLingualTitles(thesaurusEl); retrieveDublinCore(thesaurusEl); Element titleEl = Xml.selectElement(thesaurusEl, - "skos:ConceptScheme/dc:title|skos:ConceptScheme/dcterms:title|" + - "skos:Collection/dc:title|skos:Collection/dcterms:title|" + - "rdf:Description/dc:title|rdf:Description/dcterms:title", theNSs); + "skos:ConceptScheme/dc:title|skos:ConceptScheme/dcterms:title" + + "|skos:ConceptScheme/rdfs:label|skos:ConceptScheme/skos:prefLabel" + + "|skos:Collection/dc:title|skos:Collection/dcterms:title" + + "|rdf:Description/dc:title|rdf:Description/dcterms:title", theNSs); if (titleEl != null) { this.title = titleEl.getValue(); - this.defaultNamespace = titleEl.getParentElement().getAttributeValue("about", rdfNamespace); + this.defaultNamespace = titleEl + .getParentElement() + .getAttributeValue("about", Namespace.getNamespace("rdf", RDF_NAMESPACE)); } else { this.title = defaultTitle; this.defaultNamespace = DEFAULT_THESAURUS_NAMESPACE; } Element descriptionEl = Xml.selectElement(thesaurusEl, - "skos:ConceptScheme/dc:description|skos:ConceptScheme/dcterms:description|" + - "skos:Collection/dc:description|skos:Collection/dcterms:description|" + - "rdf:Description/dc:description|rdf:Description/dcterms:description", theNSs); + "skos:ConceptScheme/dc:description|skos:ConceptScheme/dcterms:description|" + + "skos:Collection/dc:description|skos:Collection/dcterms:description|" + + "rdf:Description/dc:description|rdf:Description/dcterms:description", theNSs); this.description = descriptionEl != null ? descriptionEl.getValue() : ""; @@ -984,13 +994,13 @@ private void retrieveThesaurusInformation(Path thesaurusFile, String defaultTitl } Element issuedDateEl = Xml.selectElement(thesaurusEl, "skos:ConceptScheme/dcterms:issued", theNSs); - this.issuedDate = issuedDateEl==null? "": issuedDateEl.getText(); + this.issuedDate = issuedDateEl == null ? "" : issuedDateEl.getText(); Element modifiedDateEl = Xml.selectElement(thesaurusEl, "skos:ConceptScheme/dcterms:modified", theNSs); - this.modifiedDate = modifiedDateEl==null? "": modifiedDateEl.getText(); + this.modifiedDate = modifiedDateEl == null ? "" : modifiedDateEl.getText(); Element createdDateEl = Xml.selectElement(thesaurusEl, "skos:ConceptScheme/dcterms:created", theNSs); - this.createdDate = createdDateEl==null? "": createdDateEl.getText(); + this.createdDate = createdDateEl == null ? "" : createdDateEl.getText(); // Default date Element dateEl = Xml.selectElement(thesaurusEl, "skos:ConceptScheme/dcterms:issued|skos:Collection/dc:date", theNSs); @@ -1027,11 +1037,13 @@ private void retrieveThesaurusInformation(Path thesaurusFile, String defaultTitl } if (Log.isDebugEnabled(Geonet.THESAURUS_MAN)) { - Log.debug(Geonet.THESAURUS_MAN, "Thesaurus information: " + this.title + " (" + this.date + ")"); + Log.debug(Geonet.THESAURUS_MAN, String.format( + "Thesaurus information: %s (%s)", this.title, this.date)); } } catch (Exception ex) { if (!ignoreMissingError) - Log.error(Geonet.THESAURUS_MAN, "Error getting thesaurus info for " + thesaurusFile + ". Error is: " + ex.getMessage()); + Log.error(Geonet.THESAURUS_MAN, String.format( + "Error getting thesaurus info for %s. Error is: %s", thesaurusFile, ex.getMessage())); } } @@ -1059,12 +1071,11 @@ private Date parseThesaurusDate(Element dateEl) { StringBuffer errorMsg = new StringBuffer("Error parsing the thesaurus date value: "); errorMsg.append(dateVal); - boolean success = false; for (SimpleDateFormat df : dfList) { try { thesaurusDate = df.parse(dateVal); - success = true; + return thesaurusDate; } catch (Exception ex) { // Ignore the exception and try next format errorMsg.append("\n * with format: "); @@ -1074,11 +1085,9 @@ private Date parseThesaurusDate(Element dateEl) { } } // Report error if no success - if (!success) { - errorMsg.append("\nCheck thesaurus date in "); - errorMsg.append(this.fname); - Log.error(Geonet.THESAURUS_MAN, errorMsg.toString()); - } + errorMsg.append("\nCheck thesaurus date in "); + errorMsg.append(this.fname); + Log.error(Geonet.THESAURUS_MAN, errorMsg.toString()); return thesaurusDate; } @@ -1102,8 +1111,7 @@ public IsoLanguagesMapper getIsoLanguageMapper() { * @param subject the keyword that is related to the other keyword * @param related the relation between the two keywords */ - public synchronized void addRelation(String subject, KeywordRelation related, String relatedSubject) throws AccessDeniedException, IOException, - MalformedQueryException, QueryEvaluationException, GraphException { + public synchronized void addRelation(String subject, KeywordRelation related, String relatedSubject) throws AccessDeniedException { THESAURUS_SEARCH_CACHE.invalidateAll(); Graph myGraph = repository.getGraph(); @@ -1126,7 +1134,7 @@ public synchronized void addRelation(String subject, KeywordRelation related, St * @return keyword */ public KeywordBean getKeyword(String uri, String... languages) { - String cacheKey = "getKeyword" + uri + Arrays.stream(languages).collect(Collectors.joining("")); + String cacheKey = "getKeyword" + uri + String.join("", languages); Object cacheValue = THESAURUS_SEARCH_CACHE.getIfPresent(cacheKey); if (cacheValue != null) { return (KeywordBean) cacheValue; @@ -1136,9 +1144,9 @@ public KeywordBean getKeyword(String uri, String... languages) { try { Query query = QueryBuilder - .keywordQueryBuilder(getIsoLanguageMapper(), languages) - .where(Wheres.ID(uri)) - .build(); + .keywordQueryBuilder(getIsoLanguageMapper(), languages) + .where(Wheres.ID(uri)) + .build(); keywords = query.execute(this); } catch (Exception e) { @@ -1164,9 +1172,9 @@ public List getTopConcepts(String... languages) { try { Query query = QueryBuilder - .keywordQueryBuilder(getIsoLanguageMapper(), languages) - .select(Selectors.TOPCONCEPTS, true) - .build(); + .keywordQueryBuilder(getIsoLanguageMapper(), languages) + .select(Selectors.TOPCONCEPTS, true) + .build(); keywords = query.execute(this); } catch (Exception e) { @@ -1234,9 +1242,9 @@ public boolean hasBroader(String uri) { */ public List getRelated(String uri, KeywordRelation request, String... languages) { Query query = QueryBuilder - .keywordQueryBuilder(getIsoLanguageMapper(), languages) - .select(Selectors.related(uri, request), true) - .build(); + .keywordQueryBuilder(getIsoLanguageMapper(), languages) + .select(Selectors.related(uri, request), true) + .build(); try { return query.execute(this); @@ -1271,9 +1279,9 @@ public boolean hasKeywordWithLabel(String label, String langCode) { */ public KeywordBean getKeywordWithLabel(String label, String langCode) { Query query = QueryBuilder - .keywordQueryBuilder(getIsoLanguageMapper(), langCode) - .where(Wheres.prefLabel(langCode, label)) - .build(); + .keywordQueryBuilder(getIsoLanguageMapper(), langCode) + .where(Wheres.prefLabel(langCode, label)) + .build(); List matchingKeywords; @@ -1303,7 +1311,7 @@ public Map getTitles(ApplicationContext context) throws JDOMExce return LangUtils.translate(context, getKey()); } - public List getKeywordHierarchy(String keywordLabel, String langCode) { + public List getKeywordHierarchy(String keywordLabel, String langCode) { String cacheKey = "getKeywordHierarchy" + keywordLabel + langCode; Object cacheValue = THESAURUS_SEARCH_CACHE.getIfPresent(cacheKey); if (cacheValue != null) { @@ -1311,26 +1319,26 @@ public List getKeywordHierarchy(String keywordLabel, String langCode) { } boolean isUri = keywordLabel.startsWith("http"); KeywordBean term = - isUri - ? this.getKeyword(keywordLabel, langCode) - : this.getKeywordWithLabel(keywordLabel, langCode); + isUri + ? this.getKeyword(keywordLabel, langCode) + : this.getKeywordWithLabel(keywordLabel, langCode); - List> result = this.classify(term, langCode); + List> result = this.classify(term, langCode); - List hierarchies = new ArrayList<>(); - for ( List hierachy : result) { + List hierarchies = new ArrayList<>(); + for (List hierachy : result) { String path = hierachy.stream() - .map(k -> isUri ? k.getUriCode() : k.getPreferredLabel(langCode)) - .collect(Collectors.joining("^")); + .map(k -> isUri ? k.getUriCode() : k.getPreferredLabel(langCode)) + .collect(Collectors.joining("^")); hierarchies.add(path); } THESAURUS_SEARCH_CACHE.put(cacheKey, hierarchies); return hierarchies; } - public List> classify(KeywordBean term, String langCode) { + public List> classify(KeywordBean term, String langCode) { - List> result = new ArrayList<>(); + List> result = new ArrayList<>(); if (this.hasBroader(term.getUriCode())) { result.addAll(classifyTermWithBroaderTerms(term, langCode)); } else { @@ -1339,16 +1347,16 @@ public List> classify(KeywordBean term, String langCode) return result; } - private List> classifyTermWithBroaderTerms(KeywordBean term, String langCode) { - List> result = new ArrayList<>(); - for (ArrayList stringToBroaderTerm : classifyBroaderTerms(term, langCode)) { + private List> classifyTermWithBroaderTerms(KeywordBean term, String langCode) { + List> result = new ArrayList<>(); + for (ArrayList stringToBroaderTerm : classifyBroaderTerms(term, langCode)) { stringToBroaderTerm.add(term); result.add(stringToBroaderTerm); } return result; } - private List> classifyBroaderTerms(KeywordBean term, String langCode) { + private List> classifyBroaderTerms(KeywordBean term, String langCode) { List> result = new ArrayList<>(); List narrowerList = this.getNarrower(term.getUriCode(), langCode); for (KeywordBean broaderTerm : this.getBroader(term.getUriCode(), langCode)) { @@ -1360,8 +1368,8 @@ private List> classifyBroaderTerms(KeywordBean term, Str return result; } - private ArrayList classifyTermWithNoBroaderTerms(KeywordBean term) { - ArrayList list = new ArrayList <>(); + private ArrayList classifyTermWithNoBroaderTerms(KeywordBean term) { + ArrayList list = new ArrayList<>(); list.add(term); return list; } @@ -1370,6 +1378,7 @@ private ArrayList classifyTermWithNoBroaderTerms(KeywordBean term) private List getThesaurusNamespaces() { List theNSs = new ArrayList<>(); theNSs.add(Namespace.getNamespace("rdf", RDF_NAMESPACE)); + theNSs.add(Namespace.getNamespace("rdfs", RDF_SCHEMA_NAMESPACE)); theNSs.add(Namespace.getNamespace("skos", SKOS_NAMESPACE)); theNSs.add(Namespace.getNamespace("dc", DC_NAMESPACE)); theNSs.add(Namespace.getNamespace("dcterms", DCTERMS_NAMESPACE)); diff --git a/core/src/main/java/org/fao/geonet/kernel/WatchListNotifier.java b/core/src/main/java/org/fao/geonet/kernel/WatchListNotifier.java index 7291dd8ff8b..09a17638f1a 100644 --- a/core/src/main/java/org/fao/geonet/kernel/WatchListNotifier.java +++ b/core/src/main/java/org/fao/geonet/kernel/WatchListNotifier.java @@ -30,9 +30,13 @@ import org.fao.geonet.domain.Selection; import org.fao.geonet.domain.User; import org.fao.geonet.kernel.setting.SettingManager; +import org.fao.geonet.languages.FeedbackLanguages; import org.fao.geonet.repository.SelectionRepository; import org.fao.geonet.repository.UserRepository; import org.fao.geonet.repository.UserSavedSelectionRepository; +import org.fao.geonet.util.LocalizedEmail; +import org.fao.geonet.util.LocalizedEmailParameter; +import org.fao.geonet.util.LocalizedEmailComponent; import org.fao.geonet.util.MailUtil; import org.fao.geonet.utils.Log; import org.quartz.JobExecutionContext; @@ -44,6 +48,10 @@ import java.util.*; import static org.fao.geonet.kernel.setting.Settings.SYSTEM_USER_LASTNOTIFICATIONDATE; +import static org.fao.geonet.util.LocalizedEmailComponent.ComponentType.*; +import static org.fao.geonet.util.LocalizedEmailComponent.KeyType; +import static org.fao.geonet.util.LocalizedEmailComponent.ReplacementType.*; +import static org.fao.geonet.util.LocalizedEmailParameter.ParameterType; /** * Task checking on a regular basis the list of records @@ -53,15 +61,13 @@ public class WatchListNotifier extends QuartzJobBean { private String lastNotificationDate; private String nextLastNotificationDate; - private String subject; - private String message; - private String recordMessage; private String updatedRecordPermalink; private String language = "eng"; private SettingManager settingManager; private ApplicationContext appContext; private UserSavedSelectionRepository userSavedSelectionRepository; private UserRepository userRepository; + private FeedbackLanguages feedbackLanguages; @Value("${usersavedselection.watchlist.searchurl}") private String permalinkApp = "catalog.search#/search?_uuid={{filter}}"; @@ -92,20 +98,7 @@ public WatchListNotifier() { protected void executeInternal(JobExecutionContext jobContext) throws JobExecutionException { appContext = ApplicationContextHolder.get(); settingManager = appContext.getBean(SettingManager.class); - - ResourceBundle messages = ResourceBundle.getBundle("org.fao.geonet.api.Messages", - new Locale( - language - )); - - try { - subject = messages.getString("user_watchlist_subject"); - message = messages.getString("user_watchlist_message"); - recordMessage = messages.getString("user_watchlist_message_record"). - replace("{{link}}", - settingManager.getNodeURL() + permalinkRecordApp); - } catch (Exception e) { - } + feedbackLanguages = appContext.getBean(FeedbackLanguages.class); updatedRecordPermalink = settingManager.getSiteURL(language); @@ -166,6 +159,9 @@ protected void executeInternal(JobExecutionContext jobContext) throws JobExecuti } private void notify(Integer selectionId, Integer userId) { + + Locale[] feedbackLocales = feedbackLanguages.getLocales(new Locale(language)); + // Get metadata with changes since last notification // TODO: Could be relevant to get versionning system info once available // and report deleted records too. @@ -188,27 +184,51 @@ private void notify(Integer selectionId, Integer userId) { // TODO: We should send email depending on user language Optional user = userRepository.findById(userId); if (user.isPresent() && StringUtils.isNotEmpty(user.get().getEmail())) { + String url = updatedRecordPermalink + + permalinkApp.replace("{{filter}}", String.join(" or ", updatedRecords)); - // Build message - StringBuffer listOfUpdateMessage = new StringBuffer(); - for (String record : updatedRecords) { - try { - listOfUpdateMessage.append( - MailUtil.compileMessageWithIndexFields(recordMessage, record, this.language) - ); - } catch (Exception e) { - Log.error(Geonet.USER_WATCHLIST, e.getMessage(), e); + LocalizedEmailComponent emailSubjectComponent = new LocalizedEmailComponent(SUBJECT, "user_watchlist_subject", KeyType.MESSAGE_KEY, POSITIONAL_FORMAT); + LocalizedEmailComponent emailMessageComponent = new LocalizedEmailComponent(MESSAGE, "user_watchlist_message", KeyType.MESSAGE_KEY, POSITIONAL_FORMAT); + + for (Locale feedbackLocale : feedbackLocales) { + + // Build message + StringBuffer listOfUpdateMessage = new StringBuffer(); + for (String record : updatedRecords) { + LocalizedEmailComponent recordMessageComponent = new LocalizedEmailComponent(NESTED, "user_watchlist_message_record", KeyType.MESSAGE_KEY, NAMED_FORMAT); + recordMessageComponent.enableCompileWithIndexFields(record); + recordMessageComponent.enableReplaceLinks(true); + try { + listOfUpdateMessage.append( + recordMessageComponent.parseMessage(feedbackLocale) + ); + } catch (Exception e) { + Log.error(Geonet.USER_WATCHLIST, e.getMessage(), e); + } } + + emailSubjectComponent.addParameters( + feedbackLocale, + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 1, settingManager.getSiteName()), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 2, updatedRecords.size()), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 3, lastNotificationDate) + ); + + emailMessageComponent.addParameters( + feedbackLocale, + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 1, listOfUpdateMessage.toString()), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 2, lastNotificationDate), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 3, url), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 4, url) + ); + } - String url = updatedRecordPermalink + - permalinkApp.replace("{{filter}}", String.join(" or ", updatedRecords)); - String mailSubject = String.format(subject, - settingManager.getSiteName(), updatedRecords.size(), lastNotificationDate); - String htmlMessage = String.format(message, - listOfUpdateMessage.toString(), - lastNotificationDate, - url, url); + LocalizedEmail localizedEmail = new LocalizedEmail(true); + localizedEmail.addComponents(emailSubjectComponent, emailMessageComponent); + + String mailSubject = localizedEmail.getParsedSubject(feedbackLocales); + String htmlMessage = localizedEmail.getParsedMessage(feedbackLocales); if (Log.isDebugEnabled(Geonet.USER_WATCHLIST)) { Log.debug(Geonet.USER_WATCHLIST, String.format( diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataStatus.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataStatus.java index 96b0aa34ee1..16cfc9719ae 100644 --- a/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataStatus.java +++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataStatus.java @@ -69,11 +69,14 @@ public interface IMetadataStatus { MetadataStatus setStatusExt(ServiceContext context, int id, int status, ISODate changeDate, String changeMessage) throws Exception; /** - * Set status of metadata id and do not reindex metadata id afterwards. + * Set status of metadata id and reindex metadata id afterwards based on updateIndex flag + * + * @param status metadata status to set + * @param updateIndex index update flag * * @return the saved status entity object */ - MetadataStatus setStatusExt(MetadataStatus status) throws Exception; + MetadataStatus setStatusExt(MetadataStatus status, boolean updateIndex) throws Exception; /** * Set status of metadata id and reindex metadata id afterwards. diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataManager.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataManager.java index f2f159c029c..7464a267735 100644 --- a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataManager.java +++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataManager.java @@ -582,7 +582,11 @@ public AbstractMetadata insertMetadata(ServiceContext context, AbstractMetadata // Check if the schema is allowed by settings String mdImportSetting = settingManager.getValue(Settings.METADATA_IMPORT_RESTRICT); - if (mdImportSetting != null && !mdImportSetting.equals("")) { + if (mdImportSetting != null) { + // Remove spaces from the list so that "iso19115-3.2018, dublin-core" will also work + mdImportSetting = mdImportSetting.replace(" ", ""); + } + if (!StringUtils.isBlank(mdImportSetting)) { if (!newMetadata.getHarvestInfo().isHarvested() && !Arrays.asList(mdImportSetting.split(",")).contains(schema)) { throw new IllegalArgumentException("The system setting '" + Settings.METADATA_IMPORT_RESTRICT + "' doesn't allow to import " + schema diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataStatus.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataStatus.java index 150bd65a81c..cf5f7fc972d 100644 --- a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataStatus.java +++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataStatus.java @@ -160,9 +160,11 @@ public MetadataStatus setStatus(ServiceContext context, int id, int status, ISOD } @Override - public MetadataStatus setStatusExt(MetadataStatus metatatStatus) throws Exception { + public MetadataStatus setStatusExt(MetadataStatus metatatStatus, boolean updateIndex) throws Exception { metadataStatusRepository.save(metatatStatus); - metadataIndexer.indexMetadata(metatatStatus.getMetadataId() + "", true, IndexingMode.full); + if (updateIndex) { + metadataIndexer.indexMetadata(metatatStatus.getMetadataId() + "", true, IndexingMode.full); + } return metatatStatus; } diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataUtils.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataUtils.java index 040acbf4aca..21cf26e85bd 100644 --- a/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataUtils.java +++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataUtils.java @@ -589,7 +589,7 @@ protected String createDraft(ServiceContext context, String templateId, String g List listOfStatusChange = new ArrayList<>(1); listOfStatusChange.add(metadataStatus); - sa.onStatusChange(listOfStatusChange); + sa.onStatusChange(listOfStatusChange, true); } } @@ -677,6 +677,7 @@ private void cloneStoreFileUploadRequests(AbstractMetadata original, AbstractMet metadataFileUpload.setFileSize(mfu.getFileSize()); metadataFileUpload.setUploadDate(mfu.getUploadDate()); metadataFileUpload.setUserName(mfu.getUserName()); + metadataFileUpload.setDeletedDate(mfu.getDeletedDate()); repo.save(metadataFileUpload); } diff --git a/core/src/main/java/org/fao/geonet/kernel/metadata/DefaultStatusActions.java b/core/src/main/java/org/fao/geonet/kernel/metadata/DefaultStatusActions.java index e8678d483ab..8c0c0ca2b33 100644 --- a/core/src/main/java/org/fao/geonet/kernel/metadata/DefaultStatusActions.java +++ b/core/src/main/java/org/fao/geonet/kernel/metadata/DefaultStatusActions.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2023 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -38,15 +38,24 @@ import org.fao.geonet.kernel.setting.Settings; import org.fao.geonet.repository.*; import org.fao.geonet.repository.specification.GroupSpecs; +import org.fao.geonet.util.LocalizedEmail; +import org.fao.geonet.util.LocalizedEmailParameter; +import org.fao.geonet.util.LocalizedEmailComponent; +import org.fao.geonet.languages.FeedbackLanguages; import org.fao.geonet.util.MailUtil; import org.fao.geonet.utils.Log; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; -import java.text.MessageFormat; import java.util.*; +import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.fao.geonet.kernel.setting.Settings.SYSTEM_FEEDBACK_EMAIL; +import static org.fao.geonet.util.LocalizedEmailComponent.ComponentType.*; +import static org.fao.geonet.util.LocalizedEmailComponent.KeyType; +import static org.fao.geonet.util.LocalizedEmailComponent.ReplacementType.*; +import static org.fao.geonet.util.LocalizedEmailParameter.ParameterType; public class DefaultStatusActions implements StatusActions { @@ -136,7 +145,7 @@ public void onEdit(int id, boolean minorEdit) throws Exception { * @return * @throws Exception */ - public Map onStatusChange(List listOfStatus) throws Exception { + public Map onStatusChange(List listOfStatus, boolean updateIndex) throws Exception { if (listOfStatus.stream().map(MetadataStatus::getMetadataId).distinct().count() != listOfStatus.size()) { throw new IllegalArgumentException("Multiple status update received on the same metadata"); @@ -179,16 +188,7 @@ public Map onStatusChange(List listOf context.debug("Change status of metadata with id " + status.getMetadataId() + " from " + currentStatusId + " to " + statusId); // we know we are allowed to do the change, apply any side effects - boolean deleted = applyStatusChange(status.getMetadataId(), status, statusId); - - // inform content reviewers if the status is submitted - try { - notify(getUserToNotify(status), status); - } catch (Exception e) { - context.warning(String.format( - "Failed to send notification on status change for metadata %s with status %s. Error is: %s", - status.getMetadataId(), status.getStatusValue().getId(), e.getMessage())); - } + boolean deleted = applyStatusChange(status.getMetadataId(), status, statusId, updateIndex); if (deleted) { results.put(status.getMetadataId(), StatusChangeType.DELETED); @@ -208,6 +208,15 @@ public Map onStatusChange(List listOf } } + // inform content reviewers if the status is submitted + try { + notify(getUserToNotify(status), status); + } catch (Exception e) { + context.warning(String.format( + "Failed to send notification on status change for metadata %s with status %s. Error is: %s", + status.getMetadataId(), status.getStatusValue().getId(), e.getMessage())); + } + } return results; @@ -218,10 +227,10 @@ public Map onStatusChange(List listOf * eg. if APPROVED, publish a record, * if RETIRED, unpublish or delete the record. */ - private boolean applyStatusChange(int metadataId, MetadataStatus status, String toStatusId) throws Exception { + private boolean applyStatusChange(int metadataId, MetadataStatus status, String toStatusId, boolean updateIndex) throws Exception { boolean deleted = false; if (!deleted) { - metadataStatusManager.setStatusExt(status); + metadataStatusManager.setStatusExt(status, updateIndex); } return deleted; } @@ -240,61 +249,108 @@ protected void notify(List userToNotify, MetadataStatus status) throws Exc return; } - ResourceBundle messages = ResourceBundle.getBundle("org.fao.geonet.api.Messages", new Locale(this.language)); + ApplicationContext applicationContext = ApplicationContextHolder.get(); + FeedbackLanguages feedbackLanguages = applicationContext.getBean(FeedbackLanguages.class); - String translatedStatusName = getTranslatedStatusName(status.getStatusValue().getId()); - // TODO: Refactor to allow custom messages based on the type of status - String subjectTemplate = ""; - try { - subjectTemplate = messages - .getString("status_change_" + status.getStatusValue().getName() + "_email_subject"); - } catch (MissingResourceException e) { - subjectTemplate = messages.getString("status_change_default_email_subject"); - } - String subject = MessageFormat.format(subjectTemplate, siteName, translatedStatusName, replyToDescr // Author of the change - ); + Locale[] feedbackLocales = feedbackLanguages.getLocales(new Locale(this.language)); Set listOfId = new HashSet<>(1); listOfId.add(status.getMetadataId()); - String textTemplate = ""; - try { - textTemplate = messages.getString("status_change_" + status.getStatusValue().getName() + "_email_text"); - } catch (MissingResourceException e) { - textTemplate = messages.getString("status_change_default_email_text"); - } - - // Replace link in message - ApplicationContext applicationContext = ApplicationContextHolder.get(); - SettingManager sm = applicationContext.getBean(SettingManager.class); - textTemplate = textTemplate.replace("{{link}}", sm.getNodeURL()+ "api/records/'{{'index:uuid'}}'"); - UserRepository userRepository = context.getBean(UserRepository.class); User owner = userRepository.findById(status.getOwner()).orElse(null); IMetadataUtils metadataRepository = ApplicationContextHolder.get().getBean(IMetadataUtils.class); AbstractMetadata metadata = metadataRepository.findOne(status.getMetadataId()); - String metadataUrl = metadataUtils.getDefaultUrl(metadata.getUuid(), this.language); + String subjectTemplateKey = ""; + String textTemplateKey = ""; + boolean failedToFindASpecificSubjectTemplate = false; + boolean failedToFindASpecificTextTemplate = false; + + for (Locale feedbackLocale: feedbackLocales) { + ResourceBundle resourceBundle = ResourceBundle.getBundle("org.fao.geonet.api.Messages", feedbackLocale); + + if (!failedToFindASpecificSubjectTemplate) { + try { + subjectTemplateKey = "status_change_" + status.getStatusValue().getName() + "_email_subject"; + resourceBundle.getString(subjectTemplateKey); + } catch (MissingResourceException e) { + failedToFindASpecificSubjectTemplate = true; + } + } + + if (!failedToFindASpecificTextTemplate) { + try { + textTemplateKey = "status_change_" + status.getStatusValue().getName() + "_email_text"; + resourceBundle.getString(textTemplateKey); + } catch (MissingResourceException e) { + failedToFindASpecificTextTemplate = true; + } + } + + if ((failedToFindASpecificSubjectTemplate) && (failedToFindASpecificTextTemplate)) break; + } + + if (failedToFindASpecificSubjectTemplate) { + subjectTemplateKey = "status_change_default_email_subject"; + } - String message = MessageFormat.format(textTemplate, replyToDescr, // Author of the change - status.getChangeMessage(), translatedStatusName, status.getChangeDate(), status.getDueDate(), - status.getCloseDate(), - owner == null ? "" : Joiner.on(" ").skipNulls().join(owner.getName(), owner.getSurname()), - metadataUrl); + if (failedToFindASpecificTextTemplate) { + textTemplateKey = "status_change_default_email_text"; + } + LocalizedEmailComponent emailSubjectComponent = new LocalizedEmailComponent(SUBJECT, subjectTemplateKey, KeyType.MESSAGE_KEY, NUMERIC_FORMAT); + emailSubjectComponent.enableCompileWithIndexFields(metadata.getUuid()); + + LocalizedEmailComponent emailMessageComponent = new LocalizedEmailComponent(MESSAGE, textTemplateKey, KeyType.MESSAGE_KEY, NUMERIC_FORMAT); + emailMessageComponent.enableCompileWithIndexFields(metadata.getUuid()); + emailMessageComponent.enableReplaceLinks(false); + + LocalizedEmailComponent emailSalutationComponent = new LocalizedEmailComponent(SALUTATION, "{{userName}},\n\n", KeyType.RAW_VALUE, NONE); + + for (Locale feedbackLocale : feedbackLocales) { + // TODO: Refactor to allow custom messages based on the type of status + + emailSubjectComponent.addParameters( + feedbackLocale, + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 1, siteName), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 2, getTranslatedStatusName(status.getStatusValue().getId(), feedbackLocale)), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 3, replyToDescr) + ); + + emailMessageComponent.addParameters( + feedbackLocale, + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 1, replyToDescr), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 2, status.getChangeMessage()), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 3, getTranslatedStatusName(status.getStatusValue().getId(), feedbackLocale)), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 4, status.getChangeDate()), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 5, status.getDueDate()), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 6, status.getCloseDate()), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 7, owner == null ? "" : Joiner.on(" ").skipNulls().join(owner.getName(), owner.getSurname())), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 8, metadataUtils.getDefaultUrl(metadata.getUuid(), feedbackLocale.getISO3Language())) + ); + } - subject = MailUtil.compileMessageWithIndexFields(subject, metadata.getUuid(), this.language); - message = MailUtil.compileMessageWithIndexFields(message, metadata.getUuid(), this.language); for (User user : userToNotify) { - String salutation = Joiner.on(" ").skipNulls().join(user.getName(), user.getSurname()); - //If we have a salutation then end it with a "," - if (StringUtils.isEmpty(salutation)) { - salutation = ""; + LocalizedEmail localizedEmail = new LocalizedEmail(false); + + String userName = Joiner.on(" ").skipNulls().join(user.getName(), user.getSurname()); + //If we have a userName add the salutation + String message; + if (StringUtils.isEmpty(userName)) { + localizedEmail.addComponents(emailSubjectComponent, emailMessageComponent); + + message = localizedEmail.getParsedMessage(feedbackLocales); } else { - salutation += ",\n\n"; + localizedEmail.addComponents(emailSubjectComponent, emailMessageComponent, emailSalutationComponent); + + Map replacements = new HashMap<>(); + replacements.put("{{userName}}", userName); + message = localizedEmail.getParsedMessage(feedbackLocales, replacements); } - sendEmail(user.getEmail(), subject, salutation + message); + String subject = localizedEmail.getParsedSubject(feedbackLocales); + sendEmail(user.getEmail(), subject, message); } } @@ -317,6 +373,25 @@ protected List getUserToNotify(MetadataStatus status) { return new ArrayList<>(); } + // If status is DRAFT and previous status is SUBMITTED, which means either: + // - a cancel working copy (from editor) --> should be notified the reviewer. + // - rejection (from reviewer) --> should be notified the editor. + // and the notification level is recordUserAuthor or recordProfileReviewer, + // then adjust the notification level, depending on the user role + if ((status.getStatusValue().getId() == Integer.parseInt(StatusValue.Status.DRAFT)) && + (!StringUtils.isEmpty(status.getPreviousState()) && + (status.getPreviousState().equals(StatusValue.Status.SUBMITTED))) && + (notificationLevel.equals(StatusValueNotificationLevel.recordUserAuthor) || (notificationLevel.equals(StatusValueNotificationLevel.recordProfileReviewer)))) { + UserRepository userRepository = ApplicationContextHolder.get().getBean(UserRepository.class); + Optional user = userRepository.findById(status.getUserId()); + if (user.isPresent()) { + if (user.get().getProfile() == Profile.Editor) { + notificationLevel = StatusValueNotificationLevel.recordProfileReviewer; + } else { + notificationLevel = StatusValueNotificationLevel.recordUserAuthor; + } + } + } // TODO: Status does not provide batch update // So taking care of one record at a time. // Currently the code could notify a mix of reviewers @@ -378,7 +453,9 @@ public static List getUserToNotify(StatusValueNotificationLevel notificati } } } - return users; + + // Filter out users without email + return users.stream().filter(u -> StringUtils.isNotEmpty(u.getEmail())).collect(Collectors.toList()); } public static List getGroupToNotify(StatusValueNotificationLevel notificationLevel, List groupNames) { @@ -408,14 +485,14 @@ protected void unsetAllOperations(int mdId) throws Exception { } } - private String getTranslatedStatusName(int statusValueId) { + private String getTranslatedStatusName(int statusValueId, Locale locale) { String translatedStatusName = ""; StatusValue s = statusValueRepository.findOneById(statusValueId); if (s == null) { translatedStatusName = statusValueId + " (Status not found in database translation table. Check the content of the StatusValueDes table.)"; } else { - translatedStatusName = s.getLabel(this.language); + translatedStatusName = s.getLabel(locale.getISO3Language()); } return translatedStatusName; } diff --git a/core/src/main/java/org/fao/geonet/kernel/metadata/StatusActions.java b/core/src/main/java/org/fao/geonet/kernel/metadata/StatusActions.java index 9a4aecff585..047c0b1b33a 100644 --- a/core/src/main/java/org/fao/geonet/kernel/metadata/StatusActions.java +++ b/core/src/main/java/org/fao/geonet/kernel/metadata/StatusActions.java @@ -38,6 +38,6 @@ public interface StatusActions { public void onEdit(int id, boolean minorEdit) throws Exception; - public Map onStatusChange(List status) throws Exception; + public Map onStatusChange(List status, boolean updateIndex) throws Exception; } diff --git a/core/src/main/java/org/fao/geonet/kernel/search/EsSearchManager.java b/core/src/main/java/org/fao/geonet/kernel/search/EsSearchManager.java index e06b42e724b..978ab63a750 100644 --- a/core/src/main/java/org/fao/geonet/kernel/search/EsSearchManager.java +++ b/core/src/main/java/org/fao/geonet/kernel/search/EsSearchManager.java @@ -29,8 +29,8 @@ import co.elastic.clients.elasticsearch.core.bulk.BulkOperation; import co.elastic.clients.elasticsearch.core.bulk.UpdateOperation; import co.elastic.clients.elasticsearch.core.search.Hit; -import co.elastic.clients.elasticsearch.indices.*; import co.elastic.clients.elasticsearch.indices.ExistsRequest; +import co.elastic.clients.elasticsearch.indices.*; import co.elastic.clients.transport.endpoints.BooleanResponse; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; @@ -73,8 +73,7 @@ import java.util.*; import static org.fao.geonet.constants.Geonet.IndexFieldNames.IS_TEMPLATE; -import static org.fao.geonet.kernel.search.IndexFields.INDEXING_ERROR_FIELD; -import static org.fao.geonet.kernel.search.IndexFields.INDEXING_ERROR_MSG; +import static org.fao.geonet.kernel.search.IndexFields.*; public class EsSearchManager implements ISearchManager { @@ -216,7 +215,6 @@ private void addMDFields(Element doc, Path schemaDir, doc.addContent(new Element(INDEXING_ERROR_FIELD).setText("true")); doc.addContent(createIndexingErrorMsgElement("indexingErrorMsg-indexingStyleSheetError", "error", Map.of("message", e.getMessage()))); - doc.addContent(new Element(IndexFields.DRAFT).setText("n")); } } @@ -225,7 +223,7 @@ private void addMoreFields(Element doc, Multimap fields) { fields.entries().forEach(e -> { Element newElement = new Element(e.getKey()) .setText(String.valueOf(e.getValue())); - if(objectFields.contains(e.getKey())) { + if (objectFields.contains(e.getKey())) { newElement.setAttribute("type", "object"); } doc.addContent(newElement); @@ -349,6 +347,7 @@ public BulkResponse updateFields(String id, Multimap fields, Set fields.asMap().forEach((e, v) -> fieldMap.put(e, v.toArray())); return updateFields(id, fieldMap, fieldsToRemove); } + public BulkResponse updateFields(String id, Map fieldMap, Set fieldsToRemove) throws IOException { fieldMap.put(Geonet.IndexFieldNames.INDEXING_DATE, new Date()); @@ -404,7 +403,7 @@ public void updateFieldsAsynch(String id, Map fields) { if (exception != null) { LOGGER.error("Failed to index {}", exception); } else { - LOGGER.info("Updated fields for document {}", id); + LOGGER.info("Updated fields for document {}", id); } }); } @@ -479,7 +478,7 @@ private void sendDocumentsToIndex() { } catch (Exception e) { LOGGER.error( "An error occurred while indexing {} documents in current indexing list. Error is {}.", - listOfDocumentsToIndex.size(), e.getMessage()); + listOfDocumentsToIndex.size(), e.getMessage()); } finally { // TODO: Trigger this async ? documents.keySet().forEach(uuid -> overviewFieldUpdater.process(uuid)); @@ -502,6 +501,7 @@ private void checkIndexResponse(BulkResponse bulkItemResponses, String id = ""; String uuid = ""; String isTemplate = ""; + String isDraft = ""; String failureDoc = documents.get(e.id()); try { @@ -510,13 +510,14 @@ private void checkIndexResponse(BulkResponse bulkItemResponses, id = node.get(IndexFields.DBID).asText(); uuid = node.get("uuid").asText(); isTemplate = node.get(IS_TEMPLATE).asText(); + isDraft = node.get(DRAFT).asText(); } catch (Exception ignoredException) { } docWithErrorInfo.put(IndexFields.DBID, id); docWithErrorInfo.put("uuid", uuid); docWithErrorInfo.put(IndexFields.RESOURCE_TITLE, resourceTitle); docWithErrorInfo.put(IS_TEMPLATE, isTemplate); - docWithErrorInfo.put(IndexFields.DRAFT, "n"); + docWithErrorInfo.put(IndexFields.DRAFT, isDraft); docWithErrorInfo.put(INDEXING_ERROR_FIELD, true); ArrayNode errors = docWithErrorInfo.putArray(INDEXING_ERROR_MSG); errors.add(createIndexingErrorMsgObject(e.error().reason(), "error", Map.of())); @@ -539,7 +540,7 @@ private void checkIndexResponse(BulkResponse bulkItemResponses, BulkResponse response = client.bulkRequest(defaultIndex, listErrorOfDocumentsToIndex); if (response.errors()) { LOGGER.error("Failed to save error documents {}.", - Arrays.toString(errorDocumentIds.toArray())); + Arrays.toString(errorDocumentIds.toArray())); } } } @@ -552,6 +553,7 @@ private void checkIndexResponse(BulkResponse bulkItemResponses, static { arrayFields = ImmutableSet.builder() .add(Geonet.IndexFieldNames.RECORDLINK) + .add("geom") .add("topic") .add("cat") .add("keyword") @@ -572,6 +574,7 @@ private void checkIndexResponse(BulkResponse bulkItemResponses, .add("status_text") .add("coordinateSystem") .add("identifier") + .add("maintenance") .add("responsibleParty") .add("mdLanguage") .add("otherLanguage") @@ -664,14 +667,7 @@ public ObjectNode documentToJson(Element xml) { || propertyName.endsWith("DateForResource") || propertyName.startsWith("cl_"); - if (name.equals("geom")) { - try { - doc.set("geom", mapper.readTree(nodeElements.get(0).getTextNormalize())); - } catch (IOException e) { - LOGGER.error("Parsing invalid geometry for JSON node {}. Error is: {}", - nodeElements.get(0).getTextNormalize(), e.getMessage()); - } - } else if (isArray) { + if (isArray) { ArrayNode arrayNode = doc.putArray(propertyName); for (Element node : nodeElements) { if (isObject) { @@ -680,7 +676,7 @@ public ObjectNode documentToJson(Element xml) { mapper.readTree(node.getTextNormalize())); } catch (IOException e) { LOGGER.error("Parsing invalid JSON node {} for property {}. Error is: {}", - node.getTextNormalize(), propertyName, e.getMessage()); + node.getTextNormalize(), propertyName, e.getMessage()); } } else { arrayNode.add( @@ -699,7 +695,7 @@ public ObjectNode documentToJson(Element xml) { )); } catch (IOException e) { LOGGER.error("Parsing invalid JSON node {} for property {}. Error is: {}", - nodeElements.get(0).getTextNormalize(), propertyName, e.getMessage()); + nodeElements.get(0).getTextNormalize(), propertyName, e.getMessage()); } } else { doc.put(propertyName, @@ -712,7 +708,8 @@ public ObjectNode documentToJson(Element xml) { } - /** Field starting with _ not supported in Kibana + /** + * Field starting with _ not supported in Kibana * Those are usually GN internal fields */ private String getPropertyName(String name) { @@ -817,8 +814,8 @@ public SearchResponse query(JsonNode jsonRequest, Set includedFields, return client.query(defaultIndex, jsonRequest, null, includedFields, from, size); } - public Map getFieldsValues(String id, Set fields) throws IOException { - return client.getFieldsValues(defaultIndex, id, fields); + public Map getFieldsValues(String id, Set fields, String language) throws Exception { + return client.getFieldsValues(defaultIndex, id, fields, language); } @@ -940,12 +937,12 @@ public boolean isIndexWritable(String indexName) throws IOException, Elasticsear String indexBlockRead = "index.blocks.read_only_allow_delete"; GetIndicesSettingsRequest request = GetIndicesSettingsRequest.of( - b -> b.index(indexName) - .name(indexBlockRead) + b -> b.index(indexName) + .name(indexBlockRead) ); GetIndicesSettingsResponse settings = this.client.getClient() - .indices().getSettings(request); + .indices().getSettings(request); IndexState indexState = settings.get(indexBlockRead); @@ -956,7 +953,7 @@ public boolean isIndexWritable(String indexName) throws IOException, Elasticsear /** * Make a JSON Object that properly represents an indexingErrorMsg, to be used in the index. * - * @param type either 'error' or 'warning' + * @param type either 'error' or 'warning' * @param string a string that is translatable (see, e.g., en-search.json) * @param values values that replace the placeholders in the `string` parameter * @return a json object that represents an indexingErrorMsg @@ -967,7 +964,7 @@ public ObjectNode createIndexingErrorMsgObject(String string, String type, Map valuesObject.put(k, String.valueOf(v))); + values.forEach((k, v) -> valuesObject.put(k, String.valueOf(v))); indexingErrorMsg.set("values", valuesObject); return indexingErrorMsg; } @@ -975,7 +972,7 @@ public ObjectNode createIndexingErrorMsgObject(String string, String type, Map cache = new HashMap<>(); public UserInfoCacheItem getItem(String accessKey) { synchronized (lockobj) { - if (!cache.containsKey(accessKey)) - return null; - UserInfoCacheItem item = cache.get(accessKey); - if (item.isExpired()) { - cache.remove(accessKey); - return null; - } - return item; + cache.entrySet().removeIf(e -> e.getValue().isExpired()); + return cache.get(accessKey); } } diff --git a/core/src/main/java/org/fao/geonet/kernel/setting/SettingManager.java b/core/src/main/java/org/fao/geonet/kernel/setting/SettingManager.java index a3cd94bcb3c..b6f015d6b58 100644 --- a/core/src/main/java/org/fao/geonet/kernel/setting/SettingManager.java +++ b/core/src/main/java/org/fao/geonet/kernel/setting/SettingManager.java @@ -33,6 +33,7 @@ import org.fao.geonet.domain.Setting; import org.fao.geonet.domain.SettingDataType; import org.fao.geonet.domain.Setting_; +import org.fao.geonet.languages.FeedbackLanguages; import org.fao.geonet.repository.SettingRepository; import org.fao.geonet.repository.SortUtils; import org.fao.geonet.repository.SourceRepository; @@ -94,6 +95,9 @@ public class SettingManager { @Autowired DefaultLanguage defaultLanguage; + @Autowired + FeedbackLanguages feedbackLanguages; + @PostConstruct private void init() { this.pathFinder = new ServletPathFinder(servletContext); @@ -343,6 +347,12 @@ public boolean setValue(String key, String value) { repo.save(setting); + if (key.equals("system/feedback/languages")) { + feedbackLanguages.updateSupportedLocales(); + } else if (key.equals("system/feedback/translationFollowsText")) { + feedbackLanguages.updateTranslationFollowsText(); + } + return true; } diff --git a/core/src/main/java/org/fao/geonet/kernel/setting/Settings.java b/core/src/main/java/org/fao/geonet/kernel/setting/Settings.java index c3c2b209271..a96fa132585 100644 --- a/core/src/main/java/org/fao/geonet/kernel/setting/Settings.java +++ b/core/src/main/java/org/fao/geonet/kernel/setting/Settings.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2023 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -60,6 +60,8 @@ public class Settings { public static final String SYSTEM_USERS_IDENTICON = "system/users/identicon"; public static final String SYSTEM_SEARCHSTATS = "system/searchStats/enable"; public static final String SYSTEM_FEEDBACK_EMAIL = "system/feedback/email"; + public static final String SYSTEM_FEEDBACK_LANGUAGES = "system/feedback/languages"; + public static final String SYSTEM_FEEDBACK_TRANSLATION_FOLLOWS_TEXT = "system/feedback/translationFollowsText"; public static final String SYSTEM_FEEDBACK_MAILSERVER_HOST = "system/feedback/mailServer/host"; public static final String SYSTEM_FEEDBACK_MAILSERVER_PORT = "system/feedback/mailServer/port"; public static final String SYSTEM_FEEDBACK_MAILSERVER_USERNAME = "system/feedback/mailServer/username"; @@ -71,7 +73,6 @@ public class Settings { public static final String SYSTEM_ENABLE_ALL_THESAURUS = "system/metadata/allThesaurus"; public static final String SYSTEM_METADATA_THESAURUS_NAMESPACE = "system/metadata/thesaurusNamespace"; public static final String SYSTEM_METADATA_VALIDATION_REMOVESCHEMALOCATION = "system/metadata/validation/removeSchemaLocation"; - public static final String SYSTEM_METADATA_HISTORY_ENABLED = "system/metadata/history/enabled"; public static final GNSetting SYSTEM_SITE_SVNUUID = new GNSetting("system/site/svnUuid", true); public static final String SYSTEM_INTRANET_NETWORK = "system/intranet/network"; public static final String SYSTEM_INTRANET_NETMASK = "system/intranet/netmask"; @@ -84,6 +85,7 @@ public class Settings { public static final String SYSTEM_CSW_CAPABILITY_RECORD_UUID = "system/csw/capabilityRecordUuid"; public static final String SYSTEM_CSW_METADATA_PUBLIC = "system/csw/metadataPublic"; public static final String SYSTEM_USERSELFREGISTRATION_ENABLE = "system/userSelfRegistration/enable"; + public static final String SYSTEM_USERSELFREGISTRATION_EMAIL_DOMAINS = "system/userSelfRegistration/domainsAllowed"; public static final String SYSTEM_USERSELFREGISTRATION_RECAPTCHA_ENABLE = "system/userSelfRegistration/recaptcha/enable"; public static final String SYSTEM_USERSELFREGISTRATION_RECAPTCHA_PUBLICKEY = "system/userSelfRegistration/recaptcha/publickey"; public static final String SYSTEM_USERSELFREGISTRATION_RECAPTCHA_SECRETKEY = "system/userSelfRegistration/recaptcha/secretkey"; @@ -139,6 +141,8 @@ public class Settings { public static final String METADATA_IMPORT_RESTRICT = "metadata/import/restrict"; public static final String METADATA_IMPORT_USERPROFILE = "metadata/import/userprofile"; public static final String METADATA_BATCH_EDITING_ACCESS_LEVEL = "metadata/batchediting/accesslevel"; + public static final String METADATA_HISTORY_ENABLED = "metadata/history/enabled"; + public static final String METADATA_HISTORY_ACCESS_LEVEL = "metadata/history/accesslevel"; public static final String METADATA_PUBLISHED_DELETE_USERPROFILE = "metadata/delete/profilePublishedMetadata"; public static final String METADATA_PUBLISH_USERPROFILE = "metadata/publication/profilePublishMetadata"; public static final String METADATA_UNPUBLISH_USERPROFILE = "metadata/publication/profileUnpublishMetadata"; @@ -154,6 +158,9 @@ public class Settings { public static final String SYSTEM_SECURITY_PASSWORDENFORCEMENT_USEPATTERN = "system/security/passwordEnforcement/usePattern"; public static final String SYSTEM_SECURITY_PASSWORDENFORCEMENT_PATTERN = "system/security/passwordEnforcement/pattern"; public static final String SYSTEM_SECURITY_PASSWORD_ALLOWADMINRESET = "system/security/password/allowAdminReset"; + public static final String SYSTEM_TRANSLATION_PROVIDER = "system/translation/provider"; + public static final String SYSTEM_TRANSLATION_SERVICEURL = "system/translation/serviceUrl"; + public static final String SYSTEM_TRANSLATION_APIKEY = "system/translation/apiKey"; public static final String MICROSERVICES_ENABLED = "microservices/enabled"; diff --git a/core/src/main/java/org/fao/geonet/kernel/url/UrlAnalyzer.java b/core/src/main/java/org/fao/geonet/kernel/url/UrlAnalyzer.java index 5686f54e12a..e904e8114a8 100644 --- a/core/src/main/java/org/fao/geonet/kernel/url/UrlAnalyzer.java +++ b/core/src/main/java/org/fao/geonet/kernel/url/UrlAnalyzer.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2019 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -39,10 +39,6 @@ import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; -import javax.persistence.criteria.CriteriaBuilder; -import javax.persistence.criteria.CriteriaQuery; -import javax.persistence.criteria.Predicate; -import javax.persistence.criteria.Root; import java.util.Optional; @@ -78,21 +74,19 @@ public void processMetadata(Element element, AbstractMetadata md) throws org.jdo if (schemaPlugin instanceof LinkAwareSchemaPlugin) { metadataLinkRepository - .findAll(metadatalinksTargetting(md)) - .stream() - .forEach(metadatalink -> { - metadatalink.getLink().getRecords().remove(metadatalink); - }); + .findAll(metadatalinksTargetting(md)) + .stream() + .forEach(metadatalink -> metadatalink.getLink().getRecords().remove(metadatalink)); entityManager.flush(); ((LinkAwareSchemaPlugin) schemaPlugin).createLinkStreamer(new ILinkBuilder() { @Override public Link found(String url) { - Link link = linkRepository.findOneByUrl(url); - if (link != null) { - return link; + Optional linkOptional = linkRepository.findOneByUrl(url); + if (linkOptional.isPresent()) { + return linkOptional.get(); } else { - link = new Link(); + Link link = new Link(); link.setUrl(url); linkRepository.save(link); return link; @@ -102,7 +96,7 @@ public Link found(String url) { @Override public void persist(Link link, AbstractMetadata metadata) { MetadataLink metadataLink = new MetadataLink(); - metadataLink.setMetadataId(new Integer(metadata.getId())); + metadataLink.setMetadataId(metadata.getId()); metadataLink.setMetadataUuid(metadata.getUuid()); metadataLink.setLink(link); link.getRecords().add(metadataLink); @@ -115,10 +109,10 @@ public void persist(Link link, AbstractMetadata metadata) { public void purgeMetataLink(Link link) { metadataLinkRepository - .findAll(metadatalinksTargetting(link)) - .stream() - .filter(metadatalink -> isReferencingAnUnknownMetadata((MetadataLink)metadatalink)) - .forEach(metadataLinkRepository::delete); + .findAll(metadatalinksTargetting(link)) + .stream() + .filter(this::isReferencingAnUnknownMetadata) + .forEach(metadataLinkRepository::delete); entityManager.flush(); } @@ -136,28 +130,16 @@ public void testLink(Link link) { } private Specification metadatalinksTargetting(Link link) { - return new Specification() { - @Override - public Predicate toPredicate(Root root, CriteriaQuery criteriaQuery, CriteriaBuilder criteriaBuilder) { - return criteriaBuilder.equal(root.get(MetadataLink_.link).get(Link_.id), link.getId()); - } - }; + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(MetadataLink_.link).get(Link_.id), link.getId()); } private Specification metadatalinksTargetting(AbstractMetadata md) { - return new Specification() { - @Override - public Predicate toPredicate(Root root, CriteriaQuery criteriaQuery, CriteriaBuilder criteriaBuilder) { - return criteriaBuilder.equal(root.get(MetadataLink_.metadataId), md.getId()); - } - }; + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(MetadataLink_.metadataId), md.getId()); } private boolean isReferencingAnUnknownMetadata(MetadataLink metadatalink) { Optional metadata = metadataRepository.findById(metadatalink.getMetadataId()); - return !metadata.isPresent(); + return metadata.isEmpty(); } - - } diff --git a/core/src/main/java/org/fao/geonet/languages/FeedbackLanguages.java b/core/src/main/java/org/fao/geonet/languages/FeedbackLanguages.java new file mode 100644 index 00000000000..183ac8426f5 --- /dev/null +++ b/core/src/main/java/org/fao/geonet/languages/FeedbackLanguages.java @@ -0,0 +1,129 @@ +//============================================================================= +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the +//=== United Nations (FAO-UN), United Nations World Food Programme (WFP) +//=== and United Nations Environment Programme (UNEP) +//=== +//=== This library is free software; you can redistribute it and/or +//=== modify it under the terms of the GNU Lesser General Public +//=== License as published by the Free Software Foundation; either +//=== version 2.1 of the License, or (at your option) any later version. +//=== +//=== This library is distributed in the hope that it will be useful, +//=== but WITHOUT ANY WARRANTY; without even the implied warranty of +//=== MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +//=== Lesser General Public License for more details. +//=== +//=== You should have received a copy of the GNU Lesser General Public +//=== License along with this library; if not, write to the Free Software +//=== Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +//=== +//=== Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, +//=== Rome - Italy. email: geonetwork@osgeo.org +//============================================================================== + +package org.fao.geonet.languages; + +import org.apache.commons.lang.StringUtils; +import org.fao.geonet.constants.Geonet; +import org.fao.geonet.kernel.setting.SettingManager; +import org.fao.geonet.kernel.setting.Settings; +import org.fao.geonet.utils.Log; +import org.springframework.beans.factory.annotation.Autowired; + +import javax.annotation.PostConstruct; +import java.util.*; + +/** + * Represents a utility class for managing supported locales and translation follows text for feedback. + */ +public class FeedbackLanguages { + private Locale[] supportedLocales; + private String translationFollowsText; + + @Autowired + SettingManager settingManager; + + /** + * Initializes the supported locales and translation follows text after bean creation. + */ + @PostConstruct + public void init() { + updateSupportedLocales(); + updateTranslationFollowsText(); + } + + /** + * Updates the supported locales based on the system feedback languages setting. + */ + public void updateSupportedLocales() { + String systemFeedbackLanguages = getSettingsValue(Settings.SYSTEM_FEEDBACK_LANGUAGES); + + if (StringUtils.isBlank(systemFeedbackLanguages)) { + supportedLocales = null; + return; + } + + supportedLocales = Arrays.stream(systemFeedbackLanguages.split(",")) + .map(String::trim) + .map(Locale::new) + .filter(this::isValidLocale) + .toArray(Locale[]::new); + } + + /** + * Updates the translation follows text based on the system feedback translation text setting. + */ + public void updateTranslationFollowsText() { + translationFollowsText = getSettingsValue(Settings.SYSTEM_FEEDBACK_TRANSLATION_FOLLOWS_TEXT); + } + + /** + * Retrieves the supported locales. If no supported locales are found, returns a fallback locale. + * @param fallbackLocale The fallback locale to be returned if no supported locales are available. + * @return An array of supported locales or a single fallback locale if none are available. + */ + public Locale[] getLocales(Locale fallbackLocale) { + if (supportedLocales == null || supportedLocales.length < 1) { + return new Locale[] { fallbackLocale }; + } + + return supportedLocales; + } + + /** + * Retrieves the translation follows text. + * @return The translation follows text. + */ + public String getTranslationFollowsText() { + return translationFollowsText; + } + + /** + * Checks if the provided locale is valid by attempting to load a ResourceBundle. + * @param locale The locale to validate. + * @return True if the locale is valid, false otherwise. + */ + private boolean isValidLocale(Locale locale) { + Boolean isValid; + try { + isValid = locale.getLanguage().equals(Geonet.DEFAULT_LANGUAGE) + || ResourceBundle.getBundle("org.fao.geonet.api.Messages", locale).getLocale().getLanguage().equals(locale.getLanguage()); + } catch (MissingResourceException e) { + isValid = false; + } + if (!isValid) { + String localeLanguage; + try { + localeLanguage = locale.getISO3Language(); + } catch (MissingResourceException e) { + localeLanguage = locale.getLanguage(); + } + Log.warning(Log.GEONETWORK_MODULE + ".feedbacklanguages", "Locale '" + localeLanguage + "' is invalid or missing message bundles. Ensure feedback locales are correct."); + } + return isValid; + } + + private String getSettingsValue(String settingName) { + return settingManager.getValue(settingName); + } +} diff --git a/core/src/main/java/org/fao/geonet/lib/DbLib.java b/core/src/main/java/org/fao/geonet/lib/DbLib.java index 6a85a1e334c..407304043fd 100644 --- a/core/src/main/java/org/fao/geonet/lib/DbLib.java +++ b/core/src/main/java/org/fao/geonet/lib/DbLib.java @@ -183,7 +183,7 @@ private void runSQL(Statement statement, List data, boolean failOnError) * * @param type @return */ - private Path checkFilePath(ServletContext servletContext, Path appPath, Path filePath, String prefix, String type) { + private Path checkFilePath(ServletContext servletContext, Path appPath, Path filePath, String prefix, String type) throws IOException { Path finalPath; finalPath = testPath(filePath.resolve(prefix + type + SQL_EXTENSION)); @@ -214,9 +214,10 @@ private Path checkFilePath(ServletContext servletContext, Path appPath, Path fil if (finalPath != null) return finalPath; else { - Log.debug(Geonet.DB, " No default SQL script found: " + (filePath + "/" + prefix + type + SQL_EXTENSION)); + String msg = String.format("SQL script not found: %s", filePath + "/" + prefix + type + SQL_EXTENSION); + Log.debug(Geonet.DB, msg); + throw new IOException(msg); } - return toPath(""); } private Path toPath(String pathString) { diff --git a/core/src/main/java/org/fao/geonet/util/LocalizedEmail.java b/core/src/main/java/org/fao/geonet/util/LocalizedEmail.java new file mode 100644 index 00000000000..0aa1bf978fb --- /dev/null +++ b/core/src/main/java/org/fao/geonet/util/LocalizedEmail.java @@ -0,0 +1,149 @@ +//============================================================================= +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the +//=== United Nations (FAO-UN), United Nations World Food Programme (WFP) +//=== and United Nations Environment Programme (UNEP) +//=== +//=== This library is free software; you can redistribute it and/or +//=== modify it under the terms of the GNU Lesser General Public +//=== License as published by the Free Software Foundation; either +//=== version 2.1 of the License, or (at your option) any later version. +//=== +//=== This library is distributed in the hope that it will be useful, +//=== but WITHOUT ANY WARRANTY; without even the implied warranty of +//=== MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +//=== Lesser General Public License for more details. +//=== +//=== You should have received a copy of the GNU Lesser General Public +//=== License along with this library; if not, write to the Free Software +//=== Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +//=== +//=== Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, +//=== Rome - Italy. email: geonetwork@osgeo.org +//============================================================================== + +package org.fao.geonet.util; + +import org.apache.commons.lang.StringUtils; +import org.fao.geonet.ApplicationContextHolder; +import org.fao.geonet.languages.FeedbackLanguages; +import org.fao.geonet.utils.Log; + +import static org.fao.geonet.util.LocalizedEmailComponent.ComponentType.*; +import static org.fao.geonet.util.LocalizedEmailComponent.ComponentType; + +import java.util.*; + +/** + * Class representing a localized email. + */ +public class LocalizedEmail { + private final Boolean isHtml; + private final Map components; + private final String translationFollowsText; + + private static final String SUBJECT_DELIMITER = " | "; + private static final String HTML_MESSAGE_DELIMITER = "


"; + private static final String HTML_LINE_BREAK = "

"; + private static final String TEXT_MESSAGE_DELIMITER = "\n\n--------------------------------------------------------\n\n"; + private static final String TEXT_LINE_BREAK = "\n\n"; + + public LocalizedEmail(Boolean isHtml) { + this.isHtml = isHtml; + + FeedbackLanguages feedbackLanguages = ApplicationContextHolder.get().getBean(FeedbackLanguages.class); + this.translationFollowsText = feedbackLanguages.getTranslationFollowsText(); + + this.components = new HashMap<>(); + } + + /** + * Add one or more components to the email object. Existing components are replaced. + * + * @param newComponents The components to add to the email. + */ + public void addComponents(LocalizedEmailComponent... newComponents) { + + for (LocalizedEmailComponent newComponent : newComponents) { + + if (newComponent == null) { + throw new IllegalArgumentException("Null parameter not allowed"); + } + + components.put(newComponent.getComponentType(), newComponent); + } + } + + public String getParsedSubject(Locale[] feedbackLocales) { + LinkedHashMap subjects = components.get(SUBJECT).getParsedMessagesMap(feedbackLocales); + return String.join(SUBJECT_DELIMITER, subjects.values()); + } + + public String getParsedMessage(Locale[] feedbackLocales) { + return getParsedMessage(feedbackLocales, null); + } + + public String getParsedMessage(Locale[] feedbackLocales, Map replacements) { + LinkedHashMap messages = components.get(MESSAGE).getParsedMessagesMap(feedbackLocales, true); + + // Prepend the message with a salutation placeholder if the salutation component is present + if (components.containsKey(SALUTATION) && components.get(SALUTATION) != null) { + + LinkedHashMap salutations = components.get(SALUTATION).getParsedMessagesMap(feedbackLocales); + LinkedHashMap messagesWithSalutations = new LinkedHashMap<>(); + + for (Map.Entry entry : messages.entrySet()) { + //Skip messages that have no matching salutation + if (!salutations.containsKey(entry.getKey())) { + continue; + } + + String message = entry.getValue(); + String salutation = salutations.get(entry.getKey()); + + if (replacements != null && !replacements.isEmpty()) { + for (Map.Entry replacement : replacements.entrySet()) { + salutation = salutation.replace(replacement.getKey(), replacement.getValue()); + } + } + + messagesWithSalutations.put(entry.getKey(), salutation + message); + } + + messages = messagesWithSalutations; + + } + + String messageDelimiter; + String lineBreak; + + // Set the delimiter and break string to use based on email type + if (isHtml) { + messageDelimiter = HTML_MESSAGE_DELIMITER; + lineBreak = HTML_LINE_BREAK; + // Wrap each message in a div with a lang attribute for accessibility + messages.replaceAll((locale, message) -> "
" + message + "
"); + } else { + messageDelimiter = TEXT_MESSAGE_DELIMITER; + lineBreak = TEXT_LINE_BREAK; + } + + String emailMessage = String.join(messageDelimiter, messages.values()); + + // Prepend the message with the translation follows text if there is more than one language specified + if (messages.size() > 1 && !StringUtils.isBlank(translationFollowsText)) { + emailMessage = translationFollowsText + lineBreak + emailMessage; + } + + // If the email is html wrap the content in html and body tags + if (isHtml) { + if (emailMessage.contains("") || emailMessage.contains("")) { + Log.warning(Log.GEONETWORK_MODULE + ".localizedemail","Multilingual emails are unsupported for HTML emails with messages containing or tags. Reverting to first specified locale."); + return messages.get(feedbackLocales[0]); + } + emailMessage = "" + emailMessage + ""; + } + + return emailMessage; + } +} + diff --git a/core/src/main/java/org/fao/geonet/util/LocalizedEmailComponent.java b/core/src/main/java/org/fao/geonet/util/LocalizedEmailComponent.java new file mode 100644 index 00000000000..fa61f8e07f8 --- /dev/null +++ b/core/src/main/java/org/fao/geonet/util/LocalizedEmailComponent.java @@ -0,0 +1,372 @@ +//============================================================================= +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the +//=== United Nations (FAO-UN), United Nations World Food Programme (WFP) +//=== and United Nations Environment Programme (UNEP) +//=== +//=== This library is free software; you can redistribute it and/or +//=== modify it under the terms of the GNU Lesser General Public +//=== License as published by the Free Software Foundation; either +//=== version 2.1 of the License, or (at your option) any later version. +//=== +//=== This library is distributed in the hope that it will be useful, +//=== but WITHOUT ANY WARRANTY; without even the implied warranty of +//=== MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +//=== Lesser General Public License for more details. +//=== +//=== You should have received a copy of the GNU Lesser General Public +//=== License along with this library; if not, write to the Free Software +//=== Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +//=== +//=== Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, +//=== Rome - Italy. email: geonetwork@osgeo.org +//============================================================================== + +package org.fao.geonet.util; + +import org.fao.geonet.ApplicationContextHolder; +import org.fao.geonet.kernel.search.JSONLocCacheLoader; +import org.fao.geonet.kernel.setting.SettingManager; + +import java.text.MessageFormat; +import java.util.*; + +import static org.fao.geonet.util.LocalizedEmailComponent.ReplacementType.*; + +/** + * This class is used to handle email parameters used to format localized email messages + */ +public class LocalizedEmailComponent { + + private final ComponentType componentType; + private final String keyOrRawValue; + private final KeyType keyType; + private final ReplacementType replacementType; + private final Map> parameters; + private Boolean compileWithIndexFields; + private String metadataUuid; + private Boolean replaceLinks; + private Boolean replaceLinksWithHtmlFormat = false; + + /** + * Enum representing the types of components in an email. + *

+ * This enum defines four types of components: + *

    + *
  • {@link ComponentType#SUBJECT SUBJECT}: The email subject field.
  • + *
  • {@link ComponentType#MESSAGE MESSAGE}: The email body.
  • + *
  • {@link ComponentType#SALUTATION SALUTATION}: The salutation to prepend each localized message with. (Ex. 'Hello John')
  • + *
  • {@link ComponentType#NESTED NESTED}: A component of insignificant type that is used to generate other components.
  • + *
+ */ + public enum ComponentType { + /** + * The email subject field. + */ + SUBJECT, + + /** + * The email body. + */ + MESSAGE, + + /** + * The salutation to prepend each localized message with. (Ex. 'Hello John'). + */ + SALUTATION, + + /** + * A component of insignificant type that is used to generate other components. + */ + NESTED + } + + /** + * Enum representing the types of keys used to parse a components message. + *

+ * This enum defines four types of keys: + *

    + *
  • {@link KeyType#MESSAGE_OR_JSON_KEY MESSAGE_OR_JSON_KEY}: Represents a component that tries to retrieve its value using {@link ResourceBundle#getString} or JSON localization files if message key was not found.
  • + *
  • {@link KeyType#MESSAGE_KEY MESSAGE_KEY}: Represents a component that retrieves its value using {@link ResourceBundle#getString}.
  • + *
  • {@link KeyType#JSON_KEY JSON_KEY}: Represents a component that retrieves its value by searching the JSON localization files for the specified key.
  • + *
  • {@link KeyType#RAW_VALUE RAW_VALUE}: Represents a component in which keys are not required. The raw value from keyOrRawValue is used.
  • + *
+ *

+ */ + public enum KeyType { + /** + * Represents a component that tries to retrieve its value using {@link ResourceBundle#getString} or JSON localization files if message key was not found. + */ + MESSAGE_OR_JSON_KEY, + + /** + * Represents a component that retrieves its value using {@link ResourceBundle#getString}. + */ + MESSAGE_KEY, + + /** + * Represents a component that retrieves its value by searching the JSON localization files for the specified key. + */ + JSON_KEY, + + /** + * Represents a component in which keys are not required. The raw value from keyOrRawValue is used. + */ + RAW_VALUE + } + + /** + * Enum representing the types of replacements performed on the email component. + *

+ * This enum defines four types of replacement: + *

    + *
  • {@link ReplacementType#POSITIONAL_FORMAT POSITIONAL_FORMAT}: A parameter that retrieves its value using {@link ResourceBundle#getString}. + * The value property is set to the message key to search for.
  • + *
  • {@link ReplacementType#NUMERIC_FORMAT NUMERIC_FORMAT}: A parameter that retrieves its value by searching the JSON localization files for the specified key. + * The value property is set to the json key to search for.
  • + *
  • {@link ReplacementType#NAMED_FORMAT NAMED_FORMAT}: A parameter that retrieves its value using {@link XslUtil#getIndexField}. + * The value property is set to the field name to search for, and the uuid property is set to the record uuid to search for (required).
  • + *
  • {@link ReplacementType#NONE NONE}: For components that require no replacement to compute their values.
  • + *
+ *

+ */ + public enum ReplacementType { + /** + * For {@link String#format}, where parameters are replaced based on their position (Ex. %s). + * The parameter id stores an integer representing the order of the parameters. + */ + POSITIONAL_FORMAT, + + /** + * For {@link MessageFormat#format}, where parameters are replaced based on position (Ex. {0}). + * The parameter id stores an integer representing the order of the parameters. + */ + NUMERIC_FORMAT, + + /** + * For {@link String#replace}, where parameters are replaced based on their names ({{title}}). + * The parameter id stores the string to replace. + */ + NAMED_FORMAT, + + /** + * For components that require no replacement to compute their values. + */ + NONE + } + + /** + * Constructor for LocalizedEmailParameters. + * + * @param replacementType the type of template variable + */ + public LocalizedEmailComponent(ComponentType componentType, String keyOrRawValue, KeyType keyType, ReplacementType replacementType) { + this.componentType = componentType; + this.keyOrRawValue = keyOrRawValue; + this.keyType = keyType; + this.replacementType = replacementType; + this.parameters = new HashMap<>(); + this.compileWithIndexFields = false; + this.metadataUuid = null; + this.replaceLinks = false; + } + + /** + * Adds parameters to the email parameters list. + * + * @param newParameters the parameters to add + * @throws IllegalArgumentException if a null parameter is passed or if a duplicate parameter id is found + */ + public void addParameters(Locale locale, LocalizedEmailParameter... newParameters) { + // If the map does not have the locale as a key add it + if (!parameters.containsKey(locale)) { + parameters.put(locale, new ArrayList<>()); + } + + for (LocalizedEmailParameter newParameter : newParameters) { + + if (newParameter == null) { + throw new IllegalArgumentException("Null parameter not allowed"); + } + + // If the parameter id is already in the list + if (parameters.get(locale).stream().anyMatch(existingParameter -> newParameter.getId().equals(existingParameter.getId()))) { + throw new IllegalArgumentException("Duplicate parameter id: " + newParameter.getId()); + } + + // If the type of parameters are positional and the new parameters id is not an integer + if ((replacementType.equals(POSITIONAL_FORMAT) || replacementType.equals(NUMERIC_FORMAT)) && !(newParameter.getId() instanceof Integer)) { + throw new IllegalArgumentException("Positional parameter id must be an integer"); + } + + parameters.get(locale).add(newParameter); + } + } + + /** + * @return the map of locales to lists of email parameters + */ + public Map> getParameters() { + return parameters; + } + + /** + * Enables the compilation with index fields and sets the metadata UUID. + * + * @param metadataUuid the metadata UUID + */ + public void enableCompileWithIndexFields(String metadataUuid) { + this.compileWithIndexFields = true; + this.metadataUuid = metadataUuid; + } + + /** + * Sets the replace links flag and format. + * + * @param useHtmlFormat replace links using the HTML format instead of the text format. + */ + public void enableReplaceLinks(Boolean useHtmlFormat) { + this.replaceLinks = true; + this.replaceLinksWithHtmlFormat = useHtmlFormat; + } + + /** + * @return The type of the component. + */ + public ComponentType getComponentType() { + return componentType; + } + + /** + * Parses the message based on the provided key or template and locale. + * + * @param locale the locale + * @return the parsed message + * @throws RuntimeException if an unsupported template variable type is encountered + */ + public String parseMessage(Locale locale) { + + ArrayList parametersForLocale = parameters.get(locale); + + String parsedMessage; + switch (keyType) { + case MESSAGE_OR_JSON_KEY: + try { + parsedMessage = getResourceBundleString(locale); + } catch (MissingResourceException missingResourceException) { + parsedMessage = getTranslationMapString(locale); + } + break; + case MESSAGE_KEY: + try { + parsedMessage = getResourceBundleString(locale); + } catch (MissingResourceException e) { + parsedMessage = keyOrRawValue; + } + break; + case JSON_KEY: + parsedMessage = getTranslationMapString(locale); + break; + case RAW_VALUE: + parsedMessage = keyOrRawValue; + break; + default: + throw new IllegalArgumentException("Unsupported key type: " + keyType); + } + + // Handle replacements + if (replacementType == POSITIONAL_FORMAT || replacementType == NUMERIC_FORMAT) { + + Object[] parsedLocaleEmailParameters = parametersForLocale.stream() + .sorted(Comparator.comparing(parameter -> (Integer) parameter.getId())) + .map(parameter -> parameter.parseValue(locale)) + .toArray(); + + if (replacementType == POSITIONAL_FORMAT) { + parsedMessage = String.format(parsedMessage, parsedLocaleEmailParameters); + } else { + // Replace the link placeholders with index field placeholder so that it isn't interpreted as a MessageFormat arg + if (replaceLinks) { + parsedMessage = replaceLinks(parsedMessage); + } + parsedMessage = MessageFormat.format(parsedMessage, parsedLocaleEmailParameters); + } + + } else if (replacementType == NAMED_FORMAT) { + + for (LocalizedEmailParameter parameter : parametersForLocale) { + parsedMessage = parsedMessage.replace(parameter.getId().toString(), parameter.parseValue(locale)); + } + + } + + // Replace link placeholders + if (replaceLinks) { + parsedMessage = replaceLinks(parsedMessage); + } + + // Replace index field placeholders + if (compileWithIndexFields && metadataUuid != null) { + parsedMessage = MailUtil.compileMessageWithIndexFields(parsedMessage, metadataUuid, locale.getLanguage()); + } + + return parsedMessage; + } + + /** + * Returns a map of locales to parsed messages for the provided array of locales. + * + * @param feedbackLocales the array of locales + * @return the map of locales to parsed messages + */ + public LinkedHashMap getParsedMessagesMap(Locale[] feedbackLocales) { + return getParsedMessagesMap(feedbackLocales, false); + } + + /** + * Returns a map of locales to parsed messages for the provided array of locales. + * If flagged only distinct values are returned. + * + * @param feedbackLocales the array of locales + * @param distinct flag to only return messages with distinct values + * @return the map of locales to parsed messages + */ + public LinkedHashMap getParsedMessagesMap(Locale[] feedbackLocales, Boolean distinct) { + + LinkedHashMap parsedMessages = new LinkedHashMap<>(); + + for (Locale locale : feedbackLocales) { + String parsedMessage = parseMessage(locale); + if (!distinct || !parsedMessages.containsValue(parsedMessage)) { + parsedMessages.put(locale, parsedMessage); + } + } + + return parsedMessages; + } + + private String getResourceBundleString(Locale locale) { + return ResourceBundle.getBundle("org.fao.geonet.api.Messages", locale).getString(keyOrRawValue); + } + + private String getTranslationMapString(Locale locale) { + try { + Map translationMap = new JSONLocCacheLoader(ApplicationContextHolder.get(), locale.getISO3Language()).call(); + return translationMap.getOrDefault(keyOrRawValue, keyOrRawValue); + } catch (Exception exception) { + return keyOrRawValue; + } + } + + private String replaceLinks(String message) { + + SettingManager settingManager = ApplicationContextHolder.get().getBean(SettingManager.class); + + String newPlaceholder; + if (replaceLinksWithHtmlFormat) { + newPlaceholder = "{{index:uuid}}"; + } else { + newPlaceholder = "'{{'index:uuid'}}'"; + } + return message.replace("{{link}}", settingManager.getNodeURL() + "api/records/" + newPlaceholder); + } +} diff --git a/core/src/main/java/org/fao/geonet/util/LocalizedEmailParameter.java b/core/src/main/java/org/fao/geonet/util/LocalizedEmailParameter.java new file mode 100644 index 00000000000..f68c36aec38 --- /dev/null +++ b/core/src/main/java/org/fao/geonet/util/LocalizedEmailParameter.java @@ -0,0 +1,179 @@ +//============================================================================= +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the +//=== United Nations (FAO-UN), United Nations World Food Programme (WFP) +//=== and United Nations Environment Programme (UNEP) +//=== +//=== This library is free software; you can redistribute it and/or +//=== modify it under the terms of the GNU Lesser General Public +//=== License as published by the Free Software Foundation; either +//=== version 2.1 of the License, or (at your option) any later version. +//=== +//=== This library is distributed in the hope that it will be useful, +//=== but WITHOUT ANY WARRANTY; without even the implied warranty of +//=== MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +//=== Lesser General Public License for more details. +//=== +//=== You should have received a copy of the GNU Lesser General Public +//=== License along with this library; if not, write to the Free Software +//=== Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +//=== +//=== Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, +//=== Rome - Italy. email: geonetwork@osgeo.org +//============================================================================== + +package org.fao.geonet.util; + +import org.fao.geonet.ApplicationContextHolder; +import org.fao.geonet.kernel.search.JSONLocCacheLoader; + +import java.util.*; + +/** + * Class representing a parameter used in a localized email. + * It provides functionality to set and get parameter properties, and parse parameter values. + */ +public class LocalizedEmailParameter { + private final Object id; + private final ParameterType parameterType; + private final Object value; // (Based on Parameter type) + private final Object metadataUuid; + + /** + * Enum representing different types of parameters used in a localized email context. + *

+ * This enum defines five types of parameters: + *

    + *
  • {@link ParameterType#MESSAGE_OR_JSON_KEY MESSAGE_OR_JSON_KEY}: A parameter that tries to retrieve its value using {@link ResourceBundle#getString} or JSON localization files if message key was not found. + * The value property is set to the (message or json) key to search for.
  • + *
  • {@link ParameterType#MESSAGE_KEY MESSAGE_KEY}: A parameter that retrieves its value using {@link ResourceBundle#getString}. + * The value property is set to the message key to search for.
  • + *
  • {@link ParameterType#JSON_KEY JSON_KEY}: A parameter that retrieves its value by searching the JSON localization files for the specified key. + * The value property is set to the json key to search for.
  • + *
  • {@link ParameterType#INDEX_FIELD INDEX_FIELD}: A parameter that retrieves its value using {@link XslUtil#getIndexField}. + * The value property is set to the field name to search for, and the uuid property is set to the record uuid to search for (required).
  • + *
  • {@link ParameterType#RAW_VALUE RAW_VALUE}: A parameter with a precomputed value that is simply returned. + * The value property contains the precomputed value.
  • + *
+ *

+ * These types can be used to categorize parameters and define their intended use in the context of localized email parameterization. + */ + public enum ParameterType { + /** + * A parameter that tries to retrieve its value using {@link ResourceBundle#getString} or JSON localization files if message key was not found. + * The value property is set to the (message or json) key to search for. + */ + MESSAGE_OR_JSON_KEY, + + /** + * A parameter that retrieves its value using {@link ResourceBundle#getString} + * The value property is set to the message key to search for. + */ + MESSAGE_KEY, + + /** + * A parameter that retrieves its value by searching the JSON localization files for the specified key. + * The value property is set to the json key to search for. + */ + JSON_KEY, + + /** + * A parameter that retrieves its value using {@link XslUtil#getIndexField} + * The value property is set to the field name to search for. + * The uuid property is set to the record uuid to search for and is required. + */ + INDEX_FIELD, + + /** + * A parameter with a precomputed value that is simply returned. + * The value property contains the precomputed value. + */ + RAW_VALUE + } + + /** + * Constructor with parameters. + * + * @param parameterType the type of the parameter + * @param id the id of the parameter + * @param value the value of the parameter + */ + public LocalizedEmailParameter(ParameterType parameterType, Object id, Object value) { + this.parameterType = parameterType; + this.id = id; + this.value = value; + this.metadataUuid = null; + } + + /** + * Constructor with parameters. + * + * @param parameterType the type of the parameter + * @param id the id of the parameter + * @param value the value of the parameter + * @param metadataUuid The metadata uuid to use for parsing index field values + */ + public LocalizedEmailParameter(ParameterType parameterType, Object id, Object value, String metadataUuid) { + this.parameterType = parameterType; + this.id = id; + this.value = value; + this.metadataUuid = metadataUuid; + } + + /** + * @return the id of the parameter + */ + public Object getId() { + return id; + } + + /** + * Parses the value of the parameter based on its type and the provided locale + * + * @param locale the locale to use to parse the value + * @return the parsed string value + */ + public String parseValue(Locale locale) { + + if (value == null) { + return "null"; + } + + switch (parameterType) { + case MESSAGE_OR_JSON_KEY: + try { + return getResourceBundleString(locale); + } catch (MissingResourceException missingResourceException) { + return getJsonTranslationMapString(locale); + } + case MESSAGE_KEY: + try { + return getResourceBundleString(locale); + } catch (MissingResourceException e) { + return value.toString(); + } + case JSON_KEY: + return getJsonTranslationMapString(locale); + case INDEX_FIELD: + if (metadataUuid == null) throw new IllegalArgumentException("Metadata UUID is required for parameters of type INDEX_FIELD"); + return XslUtil.getIndexField(null, metadataUuid, value, locale); + case RAW_VALUE: + return value.toString(); + default: + throw new IllegalArgumentException("Unsupported parameter type: " + parameterType); + } + } + + private String getResourceBundleString(Locale locale) { + return ResourceBundle.getBundle("org.fao.geonet.api.Messages", locale).getString(value.toString()); + } + + private String getJsonTranslationMapString(Locale locale) { + try { + Map translationMap = new JSONLocCacheLoader(ApplicationContextHolder.get(), locale.getISO3Language()).call(); + return translationMap.getOrDefault(value.toString(), value.toString()); + } catch (Exception exception) { + return value.toString(); + } + } +} + diff --git a/core/src/main/java/org/fao/geonet/util/LogUtil.java b/core/src/main/java/org/fao/geonet/util/LogUtil.java index aa1d0d437f4..93f3c023f7d 100644 --- a/core/src/main/java/org/fao/geonet/util/LogUtil.java +++ b/core/src/main/java/org/fao/geonet/util/LogUtil.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2023 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -57,7 +57,7 @@ public static String initializeHarvesterLog(String type, String name) { // Filename safe representation of harvester name (using '_' as needed). final String harvesterName = name.replaceAll("\\W+", "_"); final String harvesterType = type.replaceAll("\\W+", "_"); - SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmm"); + SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss"); String logfile = "harvester_" + harvesterType @@ -71,7 +71,7 @@ public static String initializeHarvesterLog(String type, String name) { } ThreadContext.put("harvest", harvesterName); - ThreadContext.putIfNull("logfile", logfile); + ThreadContext.put("logfile", logfile); ThreadContext.put("timeZone", timeZoneSetting); return logfile; diff --git a/core/src/main/java/org/fao/geonet/util/MailUtil.java b/core/src/main/java/org/fao/geonet/util/MailUtil.java index fc0c743c6fe..517a292b99f 100644 --- a/core/src/main/java/org/fao/geonet/util/MailUtil.java +++ b/core/src/main/java/org/fao/geonet/util/MailUtil.java @@ -364,9 +364,6 @@ private static void configureBasics(String hostName, Integer smtpPort, email.setAuthenticator(new DefaultAuthenticator(username, password)); } - - email.setDebug(true); - if (tls != null && tls) { email.setStartTLSEnabled(tls); email.setStartTLSRequired(tls); diff --git a/core/src/main/java/org/fao/geonet/util/XslUtil.java b/core/src/main/java/org/fao/geonet/util/XslUtil.java index 256129da48b..452ab855e75 100644 --- a/core/src/main/java/org/fao/geonet/util/XslUtil.java +++ b/core/src/main/java/org/fao/geonet/util/XslUtil.java @@ -47,6 +47,7 @@ import org.apache.http.impl.client.DefaultHttpClient; import org.fao.geonet.ApplicationContextHolder; import org.fao.geonet.SystemInfo; +import org.fao.geonet.analytics.WebAnalyticsConfiguration; import org.fao.geonet.api.records.attachments.FilesystemStore; import org.fao.geonet.api.records.attachments.FilesystemStoreResourceContainer; import org.fao.geonet.api.records.attachments.Store; @@ -631,13 +632,19 @@ public static MetadataResourceContainer getResourceContainerDescription(String m Store store = BeanFactoryAnnotationUtils.qualifiedBeanOfType(ApplicationContextHolder.get().getBeanFactory(), Store.class, "filesystemStore"); if (store != null) { - if (store.getResourceManagementExternalProperties() != null && store.getResourceManagementExternalProperties().isFolderEnabled()) { - ServiceContext context = ServiceContext.get(); - return store.getResourceContainerDescription(ServiceContext.get(), metadataUuid, approved); - } else { - // Return an empty object which should not be used because the folder is not enabled. - return new FilesystemStoreResourceContainer(metadataUuid, -1, null, null, null, approved); + try { + if (store.getResourceManagementExternalProperties() != null && store.getResourceManagementExternalProperties().isFolderEnabled()) { + ServiceContext context = ServiceContext.get(); + return store.getResourceContainerDescription(ServiceContext.get(), metadataUuid, approved); + } else { + // Return an empty object which should not be used because the folder is not enabled. + return new FilesystemStoreResourceContainer(metadataUuid, -1, null, null, null, approved); + } + } catch (RuntimeException e) { + Log.error(Geonet.RESOURCES, "Could not locate resource in getResourceContainerDescription due to runtime exception", e); + return null; } + } Log.error(Geonet.RESOURCES, "Could not locate a Store bean in getResourceContainerDescription"); return null; @@ -843,12 +850,10 @@ public static String getIndexField(Object appName, Object uuid, Object field, Ob try { Set fields = new HashSet<>(); fields.add(fieldname); - // TODO: Multilingual fields - final Map values = searchManager.getFieldsValues(id, fields); + final Map values = searchManager.getFieldsValues(id, fields, language); return values.get(fieldname); } catch (Exception e) { - e.printStackTrace(); - Log.error(Geonet.GEONETWORK, "Failed to get index field '" + fieldname + "' value on '" + id + "', caused by " + e.getMessage()); + Log.warning(Geonet.GEONETWORK, "Failed to get index field '" + fieldname + "' value on '" + id + "', caused by " + e.getMessage()); } return ""; } @@ -1241,7 +1246,7 @@ public static String buildDataUrl(String url, Integer size) { Matcher m = Pattern.compile(settingManager.getNodeURL() + "api/records/(.*)/attachments/(.*)$").matcher(url); BufferedImage image; if (m.find()) { - Store store = ApplicationContextHolder.get().getBean(FilesystemStore.class); + Store store = ApplicationContextHolder.get().getBean("filesystemStore", Store.class); try (Store.ResourceHolder file = store.getResourceInternal( m.group(1), MetadataResourceVisibility.PUBLIC, @@ -1436,6 +1441,28 @@ public static String getThesaurusIdByTitle(String title) { return thesaurus == null ? "" : "geonetwork.thesaurus." + thesaurus.getKey(); } + /** + * Retrieve the thesaurus title using the thesaurus key. + * + * @param id the thesaurus key + * @return the thesaurus title or empty string if the thesaurus doesn't exist. + */ + public static String getThesaurusTitleByKey(String id) { + ApplicationContext applicationContext = ApplicationContextHolder.get(); + ThesaurusManager thesaurusManager = applicationContext.getBean(ThesaurusManager.class); + Thesaurus thesaurus = thesaurusManager.getThesaurusByName(id); + return thesaurus == null ? "" : thesaurus.getTitle(); + } + + + public static String getThesaurusUriByKey(String id) { + ApplicationContext applicationContext = ApplicationContextHolder.get(); + ThesaurusManager thesaurusManager = applicationContext.getBean(ThesaurusManager.class); + Thesaurus thesaurus = thesaurusManager.getThesaurusByName(id); + return thesaurus == null ? "" : thesaurus.getDefaultNamespace(); + } + + /** * Utility method to retrieve the name (label) for an iso language using it's code for a specific language. @@ -1576,4 +1603,18 @@ private static List buildRecordLink(List hits, String type) { public static String escapeForJson(String value) { return StringEscapeUtils.escapeJson(value); } + + public static String getWebAnalyticsService() { + ApplicationContext applicationContext = ApplicationContextHolder.get(); + WebAnalyticsConfiguration webAnalyticsConfiguration = applicationContext.getBean(WebAnalyticsConfiguration.class); + + return webAnalyticsConfiguration.getService(); + } + + public static String getWebAnalyticsJavascriptCode() { + ApplicationContext applicationContext = ApplicationContextHolder.get(); + WebAnalyticsConfiguration webAnalyticsConfiguration = applicationContext.getBean(WebAnalyticsConfiguration.class); + + return webAnalyticsConfiguration.getJavascriptCode(); + } } diff --git a/core/src/main/java/org/fao/geonet/web/GeoNetworkStrictHttpFirewall.java b/core/src/main/java/org/fao/geonet/web/GeoNetworkStrictHttpFirewall.java new file mode 100644 index 00000000000..cdf34c45f18 --- /dev/null +++ b/core/src/main/java/org/fao/geonet/web/GeoNetworkStrictHttpFirewall.java @@ -0,0 +1,47 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.web; + +import org.springframework.security.web.firewall.StrictHttpFirewall; + +import java.util.regex.Pattern; + +import static java.nio.charset.StandardCharsets.ISO_8859_1; +import static java.nio.charset.StandardCharsets.UTF_8; + +/** + * Spring Security HttpFirewall that allows parsing UTF8 header values. + */ +public class GeoNetworkStrictHttpFirewall extends StrictHttpFirewall { + private static final Pattern ALLOWED_HEADER_VALUE_PATTERN = Pattern.compile("[\\p{IsAssigned}&&[^\\p{IsControl}]]*"); + + public GeoNetworkStrictHttpFirewall() { + super(); + + this.setAllowedHeaderValues(header -> { + String parsed = new String(header.getBytes(ISO_8859_1), UTF_8); + return ALLOWED_HEADER_VALUE_PATTERN.matcher(parsed).matches(); + }); + } +} diff --git a/core/src/main/resources/config-spring-geonetwork.xml b/core/src/main/resources/config-spring-geonetwork.xml index afaf71f9686..052e4d6ae6d 100644 --- a/core/src/main/resources/config-spring-geonetwork.xml +++ b/core/src/main/resources/config-spring-geonetwork.xml @@ -238,6 +238,8 @@ + + tools --> Delete index and reindex. +After updating use **Admin Console > Tools** and use **Delete index and reindex**. ## List of changes diff --git a/docs/manual/docs/overview/change-log/version-4.4.2.md b/docs/manual/docs/overview/change-log/version-4.4.2.md index 807950b6642..11864f8b524 100644 --- a/docs/manual/docs/overview/change-log/version-4.4.2.md +++ b/docs/manual/docs/overview/change-log/version-4.4.2.md @@ -2,15 +2,13 @@ GeoNetwork 4.4.2 release is a minor release. -## Migration notes +## Update notes -### Java - -**Version 4.4 only works on Java 11.** +When updating please review the following actions: ### Index changes -After update, don't forget to go to admin console --> tools --> Delete index and reindex. +After updating use **Admin Console > Tools** and use **Delete index and reindex**. ## List of changes diff --git a/docs/manual/docs/overview/change-log/version-4.4.3.md b/docs/manual/docs/overview/change-log/version-4.4.3.md new file mode 100644 index 00000000000..1b38da471dd --- /dev/null +++ b/docs/manual/docs/overview/change-log/version-4.4.3.md @@ -0,0 +1,44 @@ +# Version 4.4.3 {#version-423} + +GeoNetwork 4.4.3 release is a minor release. + +## Update notes + +When updating please review the following actions: + +### Index changes + +This version use Elasticsearch version 8 Java client, it is recommended to use an Elasticsearch version 8 server. +However version 7.15+ and 8+ have been tested. + +After updating use **Admin Console > Tools** and use **Delete index and reindex**. + +### Map + +[Stamen background layers are not available, update your maps](https://github.com/geonetwork/core-geonetwork/pull/7715). + + +## List of changes + +Major changes: + +- [Elasticssearch 8 upgrade](https://github.com/geonetwork/core-geonetwork/pull/7599) +- [Editor / Distribution panel improvements](https://github.com/geonetwork/core-geonetwork/pull/7468) +- [Thesaurus / Add support for codelist described using SDMX](https://github.com/geonetwork/core-geonetwork/pull/7790) +- [Thesaurus / Add support for thesaurus described using OWL format](https://github.com/geonetwork/core-geonetwork/pull/7674) +- [Thesaurus / Improve support of EU publication office SKOS format](https://github.com/geonetwork/core-geonetwork/pull/7673) +- [INSPIRE / Add testsuite for IACS](https://github.com/geonetwork/core-geonetwork/pull/7756) +- [Map viewer / Remove Stamen background layers - no longer available](https://github.com/geonetwork/core-geonetwork/pull/7715) +- [i18n / Add welsh language for user interface](https://github.com/geonetwork/core-geonetwork/pull/7851) +- [Index / Add danish language configuration](https://github.com/geonetwork/core-geonetwork/pull/7697) +- [Index / Translated the index warnings and errors](https://github.com/geonetwork/core-geonetwork/pull/7531) +- [Create a metadata / Add dynamic and download privileges to the users in the same group](https://github.com/geonetwork/core-geonetwork/pull/7744) +- [Decouple metadata user feedback from metadata rating feature](https://github.com/geonetwork/core-geonetwork/pull/7796) +- [Extend http proxy to manage duplicated parameters](https://github.com/geonetwork/core-geonetwork/pull/7854) +- [Fix MIME-types on attachments](https://github.com/geonetwork/core-geonetwork/pull/7675) +- [Fix pdf link to the application website](https://github.com/geonetwork/core-geonetwork/pull/7681) +- Update `org.json:json` from version 20140107 to 20240205 +- Documentation / Manual improvements +- Documentation / API SpringDoc fixes + +and more \... see [4.4.3 issues](https://github.com/geonetwork/core-geonetwork/issues?q=is%3Aissue+milestone%3A4.4.3+is%3Aclosed) and [pull requests](https://github.com/geonetwork/core-geonetwork/pulls?page=3&q=is%3Apr+milestone%3A4.4.3+is%3Aclosed) for full details. diff --git a/docs/manual/docs/overview/change-log/version-4.4.4.md b/docs/manual/docs/overview/change-log/version-4.4.4.md new file mode 100644 index 00000000000..7a6b27c37c8 --- /dev/null +++ b/docs/manual/docs/overview/change-log/version-4.4.4.md @@ -0,0 +1,29 @@ +# Version 4.4.4 + +GeoNetwork 4.4.4 is a minor release. + +## Update notes + +When updating please review the following actions: + +### Index changes + +After updating use **Admin Console > Tools** and use **Delete index and reindex**: + +* [Passing key into update/remove process xslt for iso 19139 to fix issue with updating/deleting resources with same url](https://github.com/geonetwork/core-geonetwork/pull/7431) + +## List of changes + +Major changes: + +* [CSW / GetRecords / Number of matches in page info](https://github.com/geonetwork/core-geonetwork/pull/7937) + +* [Editor associated resources planel is redesigned with a new user-interface to link to external resources (DOI and URL to external catalogue)](https://github.com/geonetwork/core-geonetwork/pull/7669) + +Fixes: + +* [Search Export CSV - Escape double-quotes with double-quotes instead of backslash](https://github.com/geonetwork/core-geonetwork/pull/7927) + +* [Metadata feedback / Fix email to multiple recipients](https://github.com/geonetwork/core-geonetwork/pull/7875) + +and more \... see [4.4.4-0 issues](https://github.com/geonetwork/core-geonetwork/issues?q=is%3Aissue+milestone%3A4.4.4+is%3Aclosed) and [pull requests](https://github.com/geonetwork/core-geonetwork/pulls?page=3&q=is%3Apr+milestone%3A4.4.4+is%3Aclosed) for full details. diff --git a/docs/manual/docs/overview/change-log/version-4.4.5.md b/docs/manual/docs/overview/change-log/version-4.4.5.md new file mode 100644 index 00000000000..e5a1f37d416 --- /dev/null +++ b/docs/manual/docs/overview/change-log/version-4.4.5.md @@ -0,0 +1,42 @@ +# Version 4.4.5 + +GeoNetwork 4.4.5 is a minor release. + +## Update notes + +When updating please review the following actions: + +### Index changes + +After updating use **Admin Console > Tools** and use **Delete index and reindex**: + + +## List of changes + +Major changes: + +* [Embed Geonetwork in other application using WebComponents](https://github.com/geonetwork/core-geonetwork/pull/6516) + +* [Configure analytics service easily eg. matomo](https://github.com/geonetwork/core-geonetwork/pull/7313) + +* [Harvester / Translate incoming records with translation providers](https://github.com/geonetwork/core-geonetwork/pull/7849) + +* [Workflow / Configuration editor board to display draft, approved record or both](https://github.com/geonetwork/core-geonetwork/pull/7477) + +* [Editor / Table mode / Add ordering control](https://github.com/geonetwork/core-geonetwork/pull/8016) + +* [API / Extent / Add geometry collection support](https://github.com/geonetwork/core-geonetwork/pull/7911) + +* [API / Search / Add support for aggregation on related records](https://github.com/geonetwork/core-geonetwork/pull/7939) + +* [i18n / Add armenian, azerbaijani, georgian, romanian and ukrainian languages](https://github.com/geonetwork/core-geonetwork/pull/7968) + +* [Library / JQuery update](https://github.com/geonetwork/core-geonetwork/pull/8015) + + +Fixes: + +* [API / CSW / Fix nextrecords when using pagination](https://github.com/geonetwork/core-geonetwork/pull/7977) + + +and more \... see [4.4.5-0 issues](https://github.com/geonetwork/core-geonetwork/issues?q=is%3Aissue+milestone%3A4.4.5+is%3Aclosed) and [pull requests](https://github.com/geonetwork/core-geonetwork/pulls?page=3&q=is%3Apr+milestone%3A4.4.5+is%3Aclosed) for full details. diff --git a/docs/manual/docs/overview/change-log/version-4.4.6.md b/docs/manual/docs/overview/change-log/version-4.4.6.md new file mode 100644 index 00000000000..1e9a6a2a214 --- /dev/null +++ b/docs/manual/docs/overview/change-log/version-4.4.6.md @@ -0,0 +1,35 @@ +# Version 4.4.6 {#version-446} + +GeoNetwork 4.4.6 is a minor release. + +## Update notes + +When updating please review the following actions: + +### Index changes + +Due to [Elasticsearch update to 8.14.3](https://github.com/geonetwork/core-geonetwork/pull/8337) it is recommended to use 8.14.x version of Elasticsearch server. + +After updating use **Admin Console > Tools** and use **Delete index and reindex**: + + +## List of changes + +Major changes: + +* [Add support for external management named properties in JCloud](https://github.com/geonetwork/core-geonetwork/pull/8357) + +* [Use UI language for metadata selection export to CSV / PDF. + ](https://github.com/geonetwork/core-geonetwork/pull/8262) + +* [WebDav harvester / Add support for XSLT filter process](https://github.com/geonetwork/core-geonetwork/pull/8243) + +* [Register user / allow to configured allowed email domains](https://github.com/geonetwork/core-geonetwork/pull/8186) + +* [Register user / allow to select the group where the user wants to register](https://github.com/geonetwork/core-geonetwork/pull/8176) + +* [Support multiple DOI servers](https://github.com/geonetwork/core-geonetwork/pull/8098) + +* [Standard / DCAT (and profiles) export ](https://github.com/geonetwork/core-geonetwork/pull/7600) + +and more \... see [4.4.6-0 issues](https://github.com/geonetwork/core-geonetwork/issues?q=is%3Aissue+milestone%3A4.4.6+is%3Aclosed) and [pull requests](https://github.com/geonetwork/core-geonetwork/pulls?page=3&q=is%3Apr+milestone%3A4.4.6+is%3Aclosed) for full details. diff --git a/docs/manual/docs/user-guide/associating-resources/doi.md b/docs/manual/docs/user-guide/associating-resources/doi.md index 31434831688..cb57e8f9589 100644 --- a/docs/manual/docs/user-guide/associating-resources/doi.md +++ b/docs/manual/docs/user-guide/associating-resources/doi.md @@ -7,15 +7,33 @@ The catalogue support DOI creation using: - [DataCite API](https://support.datacite.org/docs/mds-api-guide). - EU publication office API -Configure the API access point in the `admin console --> settings`: +Configure the DOI API access point to publish the metadata in the `Admin console --> Settings --> Doi servers`: -![](img/doi-admin-console.png) +![](img/doi-create-server.png) + +Providing the following information: + +- `Name`: A descriptive name for the server. +- `Description`: (Optional) A verbose description of the server. +- `DataCite API endpoint`: The API url, usually https://mds.datacite.org or https://mds.test.datacite.org for testing. +- `DataCite username` / `DataCite password`: Credentials required to publish the DOI resources. +- `Landing page URL template`: The URL to use to register the DOI. A good default for GeoNetwork is http://localhost:8080/geonetwork/srv/resources/records/{{uuid}}. The landing page URL MUST contains the UUID of the record. +- `Final DOI URL prefix`: (Optional) Keep it empty to use the default https://doi.org prefix. Use https://mds.test.datacite.org/doi when using the test API. +- `DOI pattern`: Default is `{{uuid}}` but the DOI structure can be customized with database id and/or record group eg. `example-{{groupOwner}}-{{id}}`. +- `DataCite prefix`: Usually looks like `10.xxxx`. You will be allowed to register DOI names only under the prefixes that have been assigned to you. +- `Publication groups`: (Optional) Select the groups which metadata should be published to the DOI server. If no groups are selected, the server will be provided to publish the metadata that has no other DOI servers related to the metadata owner group. A record can be downloaded using the DataCite format from the API using: ## Creating the DOI -Once configured, DOI can be created using the interface. DOI is created on demand. It means that a user must ask for creation of a DOI. When created, the task is notified by email to the reviewer of the group (by default, can be configured for administrator only using the notification level of the task). +Once configured, DOI can be created using the interface. DOI is created on demand. It means that a user must ask for creation of a DOI. It can be created by: + +- The user who created the metadata. +- A user with Reviewer profile in the metadata group owner. +- A user with Administrator profile. + +When created, the task is notified by email to the reviewer of the group (by default, can be configured for administrator only using the notification level of the task). ![](img/doi-request-menu.png) diff --git a/docs/manual/docs/user-guide/associating-resources/img/doi-create-server.png b/docs/manual/docs/user-guide/associating-resources/img/doi-create-server.png new file mode 100644 index 00000000000..efccf603065 Binary files /dev/null and b/docs/manual/docs/user-guide/associating-resources/img/doi-create-server.png differ diff --git a/docs/manual/docs/user-guide/describing-information/inspire-editing.md b/docs/manual/docs/user-guide/describing-information/inspire-editing.md index 7b3a9e1a358..5e54cc4145a 100644 --- a/docs/manual/docs/user-guide/describing-information/inspire-editing.md +++ b/docs/manual/docs/user-guide/describing-information/inspire-editing.md @@ -709,7 +709,7 @@ It is also possible to use the [latest ISO standard ISO19115-3:2018](https://git . This document is used to discover the location of the services to call to query and retrieve metadata. - - *Icon* - An icon to assign to harvested metadata. The icon will be used when showing harvested metadata records in the search results. - - *Use account* - Account credentials for basic HTTP authentication on the CSW server. -- **Search criteria** - Using the Add button, you can add several search criteria. You can query only the fields recognised by the CSW protocol. -- **Options** - Scheduling options. -- **Options** - Specific harvesting options for this harvester. - - *Validate* - If checked, the metadata will be validated after retrieval. If the validation does not pass, the metadata will be skipped. +To create a CSW harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `CSW`: + +![](img/add-csw-harvester.png) + +Providing the following information: + +- **Identification** + - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester. + - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester. + - *User*: User who owns the harvested records. + +- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)). + +- **Configure connection to OGC CSW 2.0.2** + - *Service URL*: The URL of the capabilities document of the CSW server to be harvested. eg. . This document is used to discover the location of the services to call to query and retrieve metadata. + - *Remote authentication*: If checked, should be provided the credentials for basic HTTP authentication on the CSW server. + - *Search filter*: (Optional) Define the search criteria below to restrict the records to harvest. + - *Search options*: + - *Sort by*: Define sort option to retrieve the results. Sorting by 'identifier:A' means by UUID with alphabetical order. Any CSW queryables can be used in combination with A or D for setting the ordering. + - *Output Schema*: The metadata standard to request the metadata records from the CSW server. + - *Distributed search*: Enables the distributed search in remote server (if the remote server supports it). When this option is enabled, the remote catalog cascades the search to the Federated CSW servers that has configured. + +- **Configure response processing for CSW** + - *Action on UUID collision*: When a harvester finds the same uuid on a record collected by another method (another harvester, importer, dashboard editor,...), should this record be skipped (default), overriden or generate a new UUID? + - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron). + - Accept all metadata without validation. + - Accept metadata that are XSD valid. + - Accept metadata that are XSD and schematron valid. + - *Check for duplicate resources based on the resource identifier*: If checked, ignores metadata with a resource identifier (`gmd:identificationInfo/*/gmd:citation/gmd:CI_Citation/gmd:identifier/*/gmd:code/gco:CharacterString`) that is assigned to other metadata record in the catalog. It only applies to records in ISO19139 or ISO profiles. + - *XPath filter*: (Optional) When record is retrived from remote server, check an XPath expression to accept or discard the record. + - *XSL transformation to apply*: (Optional) The referenced XSL transform will be applied to each metadata record before it is added to GeoNetwork. + - *Batch edits*: (Optional) Allows to update harvested records, using XPATH syntax. It can be used to add, replace or delete element. + - *Category*: (Optional) A GeoNetwork category to assign to each metadata record. + - **Privileges** - Assign privileges to harvested metadata. -- **Categories** diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-filesystem.md b/docs/manual/docs/user-guide/harvesting/harvesting-filesystem.md index 5e0b6b3ab54..900deeafc4c 100644 --- a/docs/manual/docs/user-guide/harvesting/harvesting-filesystem.md +++ b/docs/manual/docs/user-guide/harvesting/harvesting-filesystem.md @@ -4,21 +4,35 @@ This harvester will harvest metadata as XML files from a filesystem available on ## Adding a Local File System harvester -The figure above shows the options available: - -- **Site** - Options about the remote site. - - *Name* - This is a short description of the filesystem harvester. It will be shown in the harvesting main page as the name for this instance of the Local Filesystem harvester. - - *Directory* - The path name of the directory containing the metadata (as XML files) to be harvested. - - *Recurse* - If checked and the *Directory* path contains other directories, then the harvester will traverse the entire file system tree in that directory and add all metadata files found. - - *Keep local if deleted at source* - If checked then metadata records that have already been harvested will be kept even if they have been deleted from the *Directory* specified. - - *Icon* - An icon to assign to harvested metadata. The icon will be used when showing harvested metadata records in the search results. -- **Options** - Scheduling options. -- **Harvested Content** - Options that are applied to harvested content. - - *Apply this XSLT to harvested records* - Choose an XSLT here that will convert harvested records to a different format. - - *Validate* - If checked, the metadata will be validated after retrieval. If the validation does not pass, the metadata will be skipped. -- **Privileges** - Assign privileges to harvested metadata. -- **Categories** +To create a Local File System harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `Directory`: + +![](img/add-filesystem-harvester.png) + +Providing the following information: -!!! Notes +- **Identification** + - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester. + - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester. + - *User*: User who owns the harvested records. - - in order to be successfully harvested, metadata records retrieved from the file system must match a metadata schema in the local GeoNetwork instance +- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)). + +- **Configure connection to Directory** + - *Directory*: The path name of the directory containing the metadata (as XML files) to be harvested. The directory must be accessible by GeoNetwork. + - *Also search in subfolders*: If checked and the *Directory* path contains other directories, then the harvester will traverse the entire file system tree in that directory and add all metadata files found. + - *Script to run before harvesting* + - *Type of record* + +- **Configure response processing for filesystem** + - *Action on UUID collision*: When a harvester finds the same uuid on a record collected by another method (another harvester, importer, dashboard editor,...), should this record be skipped (default), overriden or generate a new UUID? + - *Update catalog record only if file was updated* + - *Keep local even if deleted at source*: If checked then metadata records that have already been harvested will be kept even if they have been deleted from the *Directory* specified. + - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron). + - Accept all metadata without validation. + - Accept metadata that are XSD valid. + - Accept metadata that are XSD and schematron valid. + - *XSL transformation to apply*: (Optional) The referenced XSL transform will be applied to each metadata record before it is added to GeoNetwork. + - *Batch edits*: (Optional) Allows to update harvested records, using XPATH syntax. It can be used to add, replace or delete element. + - *Category*: (Optional) A GeoNetwork category to assign to each metadata record. + +- **Privileges** - Assign privileges to harvested metadata. diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-geonetwork-2.md b/docs/manual/docs/user-guide/harvesting/harvesting-geonetwork-2.md new file mode 100644 index 00000000000..de085a9bb9b --- /dev/null +++ b/docs/manual/docs/user-guide/harvesting/harvesting-geonetwork-2.md @@ -0,0 +1,9 @@ +# GeoNetwork 2.0 Harvester {#gn2_harvester} + +## Upgrading from GeoNetwork 2.0 Guidance + +GeoNetwork 2.1 introduced a new powerful harvesting engine which is not compatible with GeoNetwork version 2.0 based catalogues. + +* Harvesting metadata from a v2.0 server requires this harvesting type. +* Old 2.0 servers can still harvest from 2.1 servers +* Due to the fact that GeoNetwork 2.0 is no longer suitable for production use, this harvesting type is deprecated. diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-geonetwork.md b/docs/manual/docs/user-guide/harvesting/harvesting-geonetwork.md index b3bbff7fb44..3c692b5e3ec 100644 --- a/docs/manual/docs/user-guide/harvesting/harvesting-geonetwork.md +++ b/docs/manual/docs/user-guide/harvesting/harvesting-geonetwork.md @@ -1,3 +1,43 @@ -# GeoNetwork 2.0 Harvester {#gn2_harvester} +# GeoNetwork 2.1-3.X Harvester -GeoNetwork 2.1 introduced a new powerful harvesting engine which is not compatible with GeoNetwork version 2.0 based catalogues. Old 2.0 servers can still harvest from 2.1 servers but harvesting metadata from a v2.0 server requires this harvesting type. Due to the fact that GeoNetwork 2.0 was released more than 5 years ago, this harvesting type is deprecated. +This harvester will connect to a remote GeoNetwork server that uses versions from 2.1-3.X and retrieve metadata records that match the query parameters. + +## Adding a GeoNetwork 2.1-3.X harvester + +To create a GeoNetwork 2.1-3.X harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `GeoNetwork (from 2.1 to 3.x)`: + +![](img/add-geonetwork-3-harvester.png) + +Providing the following information: + +- **Identification** + - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester. + - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester. + - *User*: User who owns the harvested records. + +- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)). + +- **Configure connection to GeoNetwork (from 2.1 to 3.x)** + - *Catalog URL*: + - The remote URL of the GeoNetwork server from which metadata will be harvested. The URL should contain the catalog name, for example: http://www.fao.org/geonetwork. + - Additionally, it should be configured the node name, usually the value `srv`. + - *Search filter*: (Optional) Define the filter to retrieve the remote metadata. + - *Catalog*: (Optional) Select the portal in the remote server to harvest. + +- **Configure response processing for GeoNetwork** + - *Action on UUID collision*: When a harvester finds the same uuid on a record collected by another method (another harvester, importer, dashboard editor,...), should this record be skipped (default), overriden or generate a new UUID? + - *Remote authentication*: If checked, should be provided the credentials for basic HTTP authentication on the WebDAV/WAF server. + - *Use full MEF format*: If checked, uses MEF format instead of XML to retrieve the remote metadata. Recommended to metadata with files. + - *Use change date for comparison*: If checked, uses change date to detect changes on remote server. + - *Set category if it exists locally*: If checked, uses the category set on the metadata in the remote server also locally (assuming it exists locally). Applies only when using MEF format for the harvesting. + - *Category*: (Optional) A GeoNetwork category to assign to each metadata record. + - *XSL filter name to apply*: (Optional) The XSL filter is applied to each metadata record. The filter is a process which depends on the schema (see the `process` folder of the schemas). + + It could be composed of parameter which will be sent to XSL transformation using the following syntax: `anonymizer?protocol=MYLOCALNETWORK:FILEPATH&email=gis@organisation.org&thesaurus=MYORGONLYTHEASURUS` + + - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron). + - Accept all metadata without validation. + - Accept metadata that are XSD valid. + - Accept metadata that are XSD and schematron valid. + +- **Privileges** - Assign privileges to harvested metadata. diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-geoportal.md b/docs/manual/docs/user-guide/harvesting/harvesting-geoportal.md index e8887286ea3..ec16a07b9ae 100644 --- a/docs/manual/docs/user-guide/harvesting/harvesting-geoportal.md +++ b/docs/manual/docs/user-guide/harvesting/harvesting-geoportal.md @@ -4,24 +4,38 @@ This harvester will connect to a remote GeoPortal version 9.3.x or 10.x server a ## Adding a GeoPortal REST harvester -The figure above shows the options available: - -- **Site** - Options about the remote site. - - *Name* - This is a short description of the remote site. It will be shown in the harvesting main page as the name for this instance of the GeoPortal REST harvester. - - *Base URL* - The base URL of the GeoPortal server to be harvested. eg. . The harvester will add the additional path required to access the REST services on the GeoPortal server. - - *Icon* - An icon to assign to harvested metadata. The icon will be used when showing harvested metadata records in the search results. -- **Search criteria** - Using the Add button, you can add several search criteria. You can query any field on the GeoPortal server using the Lucene query syntax described at . -- **Options** - Scheduling options. -- **Harvested Content** - Options that are applied to harvested content. - - *Apply this XSLT to harvested records* - Choose an XSLT here that will convert harvested records to a different format. See notes section below for typical usage. - - *Validate* - If checked, the metadata will be validated after retrieval. If the validation does not pass, the metadata will be skipped. +To create a GeoPortal REST harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `GeoPortal REST`: + +![](img/add-geoportalrest-harvester.png) + +Providing the following information: + +- **Identification** + - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester. + - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester. + - *User*: User who owns the harvested records. + +- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)). + +- **Configure connection to GeoPortal REST** + - *URL*: The base URL of the GeoPortal server to be harvested. eg. . The harvester will add the additional path required to access the REST services on the GeoPortal server. + - *Remote authentication*: If checked, should be provided the credentials for basic HTTP authentication on the server. + - *Search filter*: (Optional) You can query any field on the GeoPortal server using the Lucene query syntax described at . + +- **Configure response processing for geoPREST** + - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron). + - Accept all metadata without validation. + - Accept metadata that are XSD valid. + - Accept metadata that are XSD and schematron valid. + - *XSL transformation to apply*: (Optional) The referenced XSL transform will be applied to each metadata record before it is added to GeoNetwork. + - **Privileges** - Assign privileges to harvested metadata. -- **Categories** + !!! Notes - - this harvester uses two REST services from the GeoPortal API: + - This harvester uses two REST services from the GeoPortal API: - `rest/find/document` with searchText parameter to return an RSS listing of metadata records that meet the search criteria (maximum 100000) - `rest/document` with id parameter from each result returned in the RSS listing - - this harvester has been tested with GeoPortal 9.3.x and 10.x. It can be used in preference to the CSW harvester if there are issues with the handling of the OGC standards etc. - - typically ISO19115 metadata produced by the Geoportal software will not have a 'gmd' prefix for the namespace `http://www.isotc211.org/2005/gmd`. GeoNetwork XSLTs will not have any trouble understanding this metadata but will not be able to map titles and codelists in the viewer/editor. To fix this problem, please select the ``Add-gmd-prefix`` XSLT for the *Apply this XSLT to harvested records* in the **Harvested Content** set of options described earlier + - This harvester has been tested with GeoPortal 9.3.x and 10.x. It can be used in preference to the CSW harvester if there are issues with the handling of the OGC standards etc. + - Typically ISO19115 metadata produced by the Geoportal software will not have a 'gmd' prefix for the namespace `http://www.isotc211.org/2005/gmd`. GeoNetwork XSLTs will not have any trouble understanding this metadata but will not be able to map titles and codelists in the viewer/editor. To fix this problem, please select the ``Add-gmd-prefix`` XSLT for the *Apply this XSLT to harvested records* in the **Harvested Content** set of options described earlier diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-oaipmh.md b/docs/manual/docs/user-guide/harvesting/harvesting-oaipmh.md index cf046363634..6c528feb7e2 100644 --- a/docs/manual/docs/user-guide/harvesting/harvesting-oaipmh.md +++ b/docs/manual/docs/user-guide/harvesting/harvesting-oaipmh.md @@ -1,36 +1,49 @@ # OAIPMH Harvesting {#oaipmh_harvester} -This is a harvesting protocol that is widely used among libraries. GeoNetwork implements version 2.0 of the protocol. +This is a harvesting protocol that is widely used among libraries. GeoNetwork implements version 2.0 of the protocol. An OAI-PMH server implements a harvesting protocol that GeoNetwork, acting as a client, can use to harvest metadata. ## Adding an OAI-PMH harvester -An OAI-PMH server implements a harvesting protocol that GeoNetwork, acting as a client, can use to harvest metadata. +To create a OAI-PMH harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `OAI/PMH`: -Configuration options: +![](img/add-oaipmh-harvester.png) -- **Site** - Options describing the remote site. - - *Name* - This is a short description of the remote site. It will be shown in the harvesting main page as the name for this instance of the OAIPMH harvester. - - *URL* - The URL of the OAI-PMH server from which metadata will be harvested. - - *Icon* - An icon to assign to harvested metadata. The icon will be used when showing search results. - - *Use account* - Account credentials for basic HTTP authentication on the OAIPMH server. -- **Search criteria** - This allows you to select metadata records for harvest based on certain criteria: - - *From* - You can provide a start date here. Any metadata whose last change date is equal to or greater than this date will be harvested. To add or edit a value for this field you need to use the icon alongside the text box. This field is optional so if you don't provide a start date the constraint is dropped. Use the icon to clear the field. - - *Until* - Functions in the same way as the *From* parameter but adds an end constraint to the last change date search. Any metadata whose last change data is less than or equal to this data will be harvested. - - *Set* - An OAI-PMH server classifies metadata into sets (like categories in GeoNetwork). You can request all metadata records that belong to a set (and any of its subsets) by specifying the name of that set here. - - *Prefix* - 'Prefix' means metadata format. The oai_dc prefix must be supported by all OAI-PMH compliant servers. - - You can use the Add button to add more than one Search Criteria set. Search Criteria sets can be removed by clicking on the small cross at the top left of the set. +Providing the following information: -!!! note +- **Identification** + - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester. + - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester. + - *User*: User who owns the harvested records. - the 'OAI provider sets' drop down next to the *Set* text box and the 'OAI provider prefixes' drop down next to the *Prefix* textbox are initially blank. After specifying the connection URL, you can press the **Retrieve Info** button, which will connect to the remote OAI-PMH server, retrieve all supported sets and prefixes and fill the drop downs with these values. Selecting a value from either of these drop downs will fill the appropriate text box with the selected value. +- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)). +- **Configure connection to OGC CSW 2.0.2** + - *URL*: The URL of the OAI-PMH server from which metadata will be harvested. + - *Remote authentication*: If checked, should be provided the credentials for basic HTTP authentication on the OAIPMH server. + - *Search filter*: (Optional) Define the search criteria below to restrict the records to harvest. + - *From*: You can provide a start date here. Any metadata whose last change date is equal to or greater than this date will be harvested. To add or edit a value for this field you need to use the icon alongside the text box. This field is optional so if you don't provide a start date the constraint is dropped. Use the icon to clear the field. + - *Until*: Functions in the same way as the *From* parameter but adds an end constraint to the last change date search. Any metadata whose last change data is less than or equal to this data will be harvested. + - *Set*: An OAI-PMH server classifies metadata into sets (like categories in GeoNetwork). You can request all metadata records that belong to a set (and any of its subsets) by specifying the name of that set here. + - *Prefix*: 'Prefix' means metadata format. The oai_dc prefix must be supported by all OAI-PMH compliant servers. + + !!! note + + The 'OAI provider sets' drop down next to the *Set* text box and the 'OAI provider prefixes' drop down next to the *Prefix* textbox are initially blank. After specifying the connection URL, you can press the **Retrieve Info** button, which will connect to the remote OAI-PMH server, retrieve all supported sets and prefixes and fill the drop downs with these values. Selecting a value from either of these drop downs will fill the appropriate text box with the selected value. +- **Configure response processing for oaipmh** + - *Action on UUID collision*: When a harvester finds the same uuid on a record collected by another method (another harvester, importer, dashboard editor,...), should this record be skipped (default), overriden or generate a new UUID? + - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron). + - Accept all metadata without validation. + - Accept metadata that are XSD valid. + - Accept metadata that are XSD and schematron valid. + - *XSL transformation to apply*: (Optional) The referenced XSL transform will be applied to each metadata record before it is added to GeoNetwork. + + - *Category*: (Optional) A GeoNetwork category to assign to each metadata record. + +- **Privileges** - Assign privileges to harvested metadata. -- **Options** - Scheduling Options. -- **Privileges** -- **Categories** !!! Notes - - if you request the oai_dc output format, GeoNetwork will convert it to Dublin Core format. - - when you edit a previously created OAIPMH harvester instance, both the *set* and *prefix* drop down lists will be empty. You have to press the retrieve info button again to connect to the remote server and retrieve set and prefix information. - - the id of the remote server must be a UUID. If not, metadata can be harvested but during hierarchical propagation id clashes could corrupt harvested metadata. + - If you request the oai_dc output format, GeoNetwork will convert it to Dublin Core format. + - When you edit a previously created OAIPMH harvester instance, both the *set* and *prefix* drop down lists will be empty. You have to press the retrieve info button again to connect to the remote server and retrieve set and prefix information. + - The id of the remote server must be a UUID. If not, metadata can be harvested but during hierarchical propagation id clashes could corrupt harvested metadata. diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-ogcwxs.md b/docs/manual/docs/user-guide/harvesting/harvesting-ogcwxs.md index 52c88c134d4..70f45cf75d6 100644 --- a/docs/manual/docs/user-guide/harvesting/harvesting-ogcwxs.md +++ b/docs/manual/docs/user-guide/harvesting/harvesting-ogcwxs.md @@ -11,27 +11,46 @@ An OGC service implements a GetCapabilities operation that GeoNetwork, acting as ## Adding an OGC Service Harvester -Configuration options: - -- **Site** - - *Name* - The name of the catalogue and will be one of the search criteria. - - *Type* - The type of OGC service indicates if the harvester has to query for a specific kind of service. Supported type are WMS (1.0.0, 1.1.1, 1.3.0), WFS (1.0.0 and 1.1.0), WCS (1.0.0), WPS (0.4.0 and 1.0.0), CSW (2.0.2) and SOS (1.0.0). - - *Service URL* - The service URL is the URL of the service to contact (without parameters like "REQUEST=GetCapabilities", "VERSION=", \...). It has to be a valid URL like . - - *Metadata language* - Required field that will define the language of the metadata. It should be the language used by the OGC web service administrator. - - *ISO topic category* - Used to populate the topic category element in the metadata. It is recommended to choose one as the topic category is mandatory for the ISO19115/19139 standard if the hierarchical level is "datasets". - - *Type of import* - By default, the harvester produces one service metadata record. Check boxes in this group determine the other metadata that will be produced. - - *Create metadata for layer elements using GetCapabilities information*: Checking this option means that the harvester will loop over datasets served by the service as described in the GetCapabilities document. - - *Create metadata for layer elements using MetadataURL attributes*: Checkthis option means that the harvester will generate metadata from an XML document referenced in the MetadataUrl attribute of the dataset in the GetCapabilities document. If the document referred to by this attribute is not valid (eg. unknown schema, bad XML format), the GetCapabilities document is used as per the previous option. - - *Create thumbnails for WMS layers*: If harvesting from an OGC WMS, then checking this options means that thumbnails will be created during harvesting. - - *Target schema* - The metadata schema of the dataset metadata records that will be created by this harvester. - - *Icon* - The default icon displayed as attribution logo for metadata created by this harvester. -- **Options** - Scheduling Options. -- **Privileges** -- **Category for service** - Metadata for the harvested service is assigned to the category selected in this option (eg. "interactive resources"). -- **Category for datasets** - Metadata for the harvested datasets is assigned to the category selected in this option (eg. "datasets"). +To create a OGC Service harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `OGC Web Services`: + +![](img/add-ogcwebservices-harvester.png) + +Providing the following information: + +- **Identification** + - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester. + - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester. + - *User*: User who owns the harvested records. + +- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)). + +- **Configure connection to OGC Web Services** + - *Service URL*: The service URL is the URL of the service to contact (without parameters like "REQUEST=GetCapabilities", "VERSION=", \...). It has to be a valid URL like . + - *Service type* - The type of OGC service indicates if the harvester has to query for a specific kind of service. Supported type are WMS (1.0.0, 1.1.1, 1.3.0), WFS (1.0.0 and 1.1.0), WCS (1.0.0), WPS (0.4.0 and 1.0.0), CSW (2.0.2) and SOS (1.0.0). + - *Remote authentication*: If checked, should be provided the credentials for basic HTTP authentication on the server. + +- **Configure response processing for ogcwxs** + - *Build service metadata record from a template*: + - *Category for service metadata*: (Optional) Metadata for the harvested service is assigned to the category selected in this option (eg. "interactive resources"). + - *Create record for each layer only using GetCapabilities information*: Checking this option means that the harvester will loop over datasets served by the service as described in the GetCapabilities document. + - *Import record for each layer using MetadataURL attributes*: Checkthis option means that the harvester will generate metadata from an XML document referenced in the MetadataUrl attribute of the dataset in the GetCapabilities document. If the document referred to by this attribute is not valid (eg. unknown schema, bad XML format), the GetCapabilities document is used as per the previous option. + - *Build dataset metadata records from a template* + - *Create thumbnail*: If checked, when harvesting from an OGC Web Map Service (WMS) that supports WGS84 projection, thumbnails for the layers metadata will be created during harvesting. + - *Category for datasets*: Metadata for the harvested datasets is assigned to the category selected in this option (eg. "datasets"). + + - *ISO category*: (Optional) Used to populate the topic category element in the metadata. It is recommended to choose one as the topic category is mandatory for the ISO19115/19139 standard if the hierarchical level is "datasets". + - *Metadata language*: Required field that will define the language of the metadata. It should be the language used by the OGC web service administrator. + - *Output schema*: The metadata schema of the dataset metadata records that will be created by this harvester. The value should be an XSLT process which is used by the harvester to convert the GetCapabilities document to metadata records from that schema. If in doubt, use the default value `iso19139`. + - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron). + - Accept all metadata without validation. + - Accept metadata that are XSD valid. + - Accept metadata that are XSD and schematron valid. + - *XSL transformation to apply*: (Optional) The referenced XSL transform will be applied to each metadata record before it is added to GeoNetwork. + + +- **Privileges** - Assign privileges to harvested metadata. + !!! Notes - - every time the harvester runs, it will remove previously harvested records and create new records. GeoNetwork will generate the uuid for all metadata (both service and datasets). The exception to this rule is dataset metadata created using the MetadataUrl tag is in the GetCapabilities document, in that case, the uuid of the remote XML document is used instead - - thumbnails can only be generated when harvesting an OGC Web Map Service (WMS). The WMS should support the WGS84 projection - - the chosen *Target schema* must have the support XSLTs which are used by the harvester to convert the GetCapabilities statement to metadata records from that schema. If in doubt, use iso19139. + - Every time the harvester runs, it will remove previously harvested records and create new records. GeoNetwork will generate the uuid for all metadata (both service and datasets). The exception to this rule is dataset metadata created using the MetadataUrl tag is in the GetCapabilities document, in that case, the uuid of the remote XML document is used instead diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-sde.md b/docs/manual/docs/user-guide/harvesting/harvesting-sde.md index 7f4f99cb913..32cdd4df780 100644 --- a/docs/manual/docs/user-guide/harvesting/harvesting-sde.md +++ b/docs/manual/docs/user-guide/harvesting/harvesting-sde.md @@ -1,55 +1,60 @@ # Harvesting an ARCSDE Node {#sde_harvester} -This is a harvesting protocol for metadata stored in an ArcSDE installation. +This is a harvesting protocol for metadata stored in an ArcSDE installation. The harvester identifies the ESRI metadata format: ESRI ISO, ESRI FGDC to apply the required xslts to transform metadata to ISO19139. ## Adding an ArcSDE harvester -The harvester identifies the ESRI metadata format: ESRI ISO, ESRI FGDC to apply the required xslts to transform metadata to ISO19139. Configuration options: +To create an ArcSDE harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `ArcSDE`: + +![](img/add-arcsde-harvester.png) + +Providing the following information: - **Identification** - - *Name* - This is a short description of the node. It will be shown in the harvesting main page. - - *Group* - User admin of this group and catalog administrator can manage this node. - - *Harvester user* - User that owns the harvested metadata. -- **Schedule** - Schedule configuration to execute the harvester. -- **Configuration for protocol ArcSDE** - - *Server* - ArcSde server IP address or name. - - *Port* - ArcSde service port (typically 5151) or ArcSde database port, depending on the connection type selected, see below the *Connection type* section. - - *Database name* - ArcSDE instance name (typically esri_sde). - - *ArcSde version* - ArcSde version to harvest. The data model used by ArcSde is different depending on the ArcSde version. + - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester. + - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester. + - *User*: User who owns the harvested records. + +- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)). + +- **Configure connection to Database** + - *Server*: ArcSDE server IP address or name. + - *Port*: ArcSDE service port (typically 5151) or ArcSDE database port, depending on the connection type selected, see below the *Connection type* section. + - *Database name*: ArcSDE instance name (typically esri_sde). + - *ArcSDE version: ArcSDE version to harvest. The data model used by ArcSDE is different depending on the ArcSDE version. - *Connection type* - - *ArcSde service* - Uses the ArcSde service to retrieve the metadata. + - *ArcSDE service*: Uses the ArcSDE service to retrieve the metadata. !!! note - Additional installation steps are required to use the ArcSDE harvester because it needs proprietary ESRI Java api jars to be installed. - - ArcSDE Java API libraries need to be installed by the user in GeoNetwork (folder INSTALL_DIR_GEONETWORK/WEB-INF/lib), as these are proprietary libraries not distributed with GeoNetwork. - - The following jars are required: - - - jpe_sdk.jar - - jsde_sdk.jar - - dummy-api-XXX.jar must be removed from INSTALL_DIR/web/geonetwork/WEB-INF/lib + Additional installation steps are required to use the ArcSDE harvester because it needs proprietary ESRI Java api jars to be installed. + ArcSDE Java API libraries need to be installed by the user in GeoNetwork (folder `INSTALL_DIR_GEONETWORK/WEB-INF/lib`), as these are proprietary libraries not distributed with GeoNetwork. - - *Database direct connection* - Uses a database connection (JDBC) to retrieve the metadata. With + The following jars are required: - !!! note + - jpe_sdk.jar + - jsde_sdk.jar - Database direct connection requires to copy JDBC drivers in INSTALL_DIR_GEONETWORK/WEB-INF/lib. + `dummy-api-XXX.jar` must be removed from `INSTALL_DIR/web/geonetwork/WEB-INF/lib`. + - *Database direct connection*: Uses a database connection (JDBC) to retrieve the metadata. + + !!! note + + Database direct connection requires to copy JDBC drivers in `INSTALL_DIR_GEONETWORK/WEB-INF/lib`. !!! note Postgres JDBC drivers are distributed with GeoNetwork, but not for Oracle or SqlServer. - - *Database type* - ArcSde database type: Oracle, Postgres, SqlServer. Only available if connection type is configured to *Database direct connection*. - - *Username* - Username to connect to ArcSDE server. - - *Password* - Password of the ArcSDE user. -- **Advanced options for protocol arcsde** - - *Validate records before import* - Defines the criteria to reject metadata that is invalid according to XSD and schematron rules. + - *Database type* - ArcSDE database type: Oracle, Postgres, SqlServer. Only available if connection type is configured to *Database direct connection*. + - *Remote authentication*: Credentials to connect to the ArcSDE server. + +- **Configure response processing for arcsde** + - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron). - Accept all metadata without validation. - Accept metadata that are XSD valid. - Accept metadata that are XSD and schematron valid. + - **Privileges** - Assign privileges to harvested metadata. diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-simpleurl.md b/docs/manual/docs/user-guide/harvesting/harvesting-simpleurl.md index 775b4a9d1a9..e7243dc8421 100644 --- a/docs/manual/docs/user-guide/harvesting/harvesting-simpleurl.md +++ b/docs/manual/docs/user-guide/harvesting/harvesting-simpleurl.md @@ -4,47 +4,72 @@ This harvester connects to a remote server via a simple URL to retrieve metadata ## Adding a simple URL harvester -- **Site** - Options about the remote site. +To create a Simple URL harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `Simple URL`: - - *Name* - This is a short description of the remote site. It will be shown in the harvesting main page as the name for this instance of the harvester. - - *Service URL* - The URL of the server to be harvested. This can include pagination params like `?start=0&rows=20` - - *loopElement* - Propery/element containing a list of the record entries. (Indicated as an absolute path from the document root.) eg. `/datasets` - - *numberOfRecordPath* : Property indicating the total count of record entries. (Indicated as an absolute path from the document root.) eg. `/nhits` - - *recordIdPath* : Property containing the record id. eg. `datasetid` - - *pageFromParam* : Property indicating the first record item on the current "page" eg. `start` - - *pageSizeParam* : Property indicating the number of records containned in the current "page" eg. `rows` - - *toISOConversion* : Name of the conversion schema to use, which must be available as XSL on the GN instance. eg. `OPENDATASOFT-to-ISO19115-3-2018` +![](img/add-simpleurl-harvester.png) - !!! note +Providing the following information: - GN looks for schemas by name in . These schemas might internally include schemas from other locations like . To indicate the `fromJsonOpenDataSoft` schema for example, from the latter location directly in the admin UI the following syntax can be used: `schema:iso19115-3.2018:convert/fromJsonOpenDataSoft`. +- **Identification** + - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester. + - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester. + - *User*: User who owns the harvested records. +- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)). - **Sample configuration for opendatasoft** +- **Configure connection to Simple URL** + - *URL* - The URL of the server to be harvested. This can include pagination params like `?start=0&rows=20` + - *Remote authentication*: If checked, should be provided the credentials for basic HTTP authentication on the server. + - *Element to loop on*: Propery/element containing a list of the record entries. (Indicated as an absolute path from the document root.) eg. `/datasets` + - *Element for the UUID of each record* : Property containing the record id. eg. `datasetid` + - *Pagination parameters*: (optional). + - *Element for the number of records to collect*: Property indicating the total count of record entries. (Indicated as an absolute path from the document root.) eg. `/nhits` + - *From URL parameter*: Property indicating the first record item on the current "page" eg. `start` + - *Size URL parameter*: Property indicating the number of records containned in the current "page" eg. `rows` + +- **Configure response processing for Simple URL** - - *loopElement* - `/datasets` - - *numberOfRecordPath* : `/nhits` - - *recordIdPath* : `datasetid` - - *pageFromParam* : `start` - - *pageSizeParam* : `rows` - - *toISOConversion* : `OPENDATASOFT-to-ISO19115-3-2018` + - *XSL transformation to apply*: Name of the conversion schema to use, which must be available as XSL on the GeoNetwork instance. eg. `OPENDATASOFT-to-ISO19115-3-2018` - **Sample configuration for ESRI** + !!! note - - *loopElement* - `/dataset` - - *numberOfRecordPath* : `/result/count` - - *recordIdPath* : `landingPage` - - *pageFromParam* : `start` - - *pageSizeParam* : `rows` - - *toISOConversion* : `ESRIDCAT-to-ISO19115-3-2018` + GN looks for schemas by name in . These schemas might internally include schemas from other locations like . To indicate the `fromJsonOpenDataSoft` schema for example, from the latter location directly in the admin UI the following syntax can be used: `schema:iso19115-3.2018:convert/fromJsonOpenDataSoft`. - **Sample configuration for DKAN** - - - *loopElement* - `/result/0` - - *numberOfRecordPath* : `/result/count` - - *recordIdPath* : `id` - - *pageFromParam* : `start` - - *pageSizeParam* : `rows` - - *toISOConversion* : `DKAN-to-ISO19115-3-2018` + - *Batch edits*: (Optional) Allows to update harvested records, using XPATH syntax. It can be used to add, replace or delete element. + - *Category*: (Optional) A GeoNetwork category to assign to each metadata record. + - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron). + - Accept all metadata without validation. + - Accept metadata that are XSD valid. + - Accept metadata that are XSD and schematron valid. - **Privileges** - Assign privileges to harvested metadata. + + +## Sample configurations + +### Sample configuration for opendatasoft + +- *Element to loop on* - `/datasets` +- *Element for the number of records to collect* : `/nhits` +- *Element for the UUID of each record* : `datasetid` +- *From URL parameter* : `start` +- *Size URL parameter* : `rows` +- *XSL transformation to apply* : `OPENDATASOFT-to-ISO19115-3-2018` + +### Sample configuration for ESRI + +- *Element to loop on* - `/dataset` +- *Element for the number of records to collect* : `/result/count` +- *Element for the UUID of each record* : `landingPage` +- *From URL parameter* : `start` +- *Size URL parameter* : `rows` +- *XSL transformation to apply* : `ESRIDCAT-to-ISO19115-3-2018` + +### Sample configuration for DKAN + +- *Element to loop on* - `/result/0` +- *Element for the number of records to collect* : `/result/count` +- *Element for the UUID of each record* : `id` +- *From URL parameter* : `start` +- *Size URL parameter* : `rows` +- *XSL transformation to apply* : `DKAN-to-ISO19115-3-2018` diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-thredds.md b/docs/manual/docs/user-guide/harvesting/harvesting-thredds.md index 2c988d58e34..bb4716c7508 100644 --- a/docs/manual/docs/user-guide/harvesting/harvesting-thredds.md +++ b/docs/manual/docs/user-guide/harvesting/harvesting-thredds.md @@ -4,35 +4,33 @@ THREDDS catalogs describe inventories of datasets. They are organised in a hiera ## Adding a THREDDS Catalog Harvester -The available options are: - -- **Site** - - *Name* - This is a short description of the THREDDS catalog. It will be shown in the harvesting main page as the name of this THREDDS harvester instance. - - *Catalog URL* - The remote URL of the THREDDS Catalog from which metadata will be harvested. This must be the xml version of the catalog (i.e. ending with .xml). The harvester will crawl through all datasets and services defined in this catalog creating metadata for them as specified by the options described further below. - - *Metadata language* - Use this option to specify the language of the metadata to be harvested. - - *ISO topic category* - Use this option to specify the ISO topic category of service metadata. - - *Create ISO19119 metadata for all services in catalog* - Select this option to generate iso19119 metadata for services defined in the THREDDS catalog (eg. OpenDAP, OGC WCS, ftp) and for the THREDDS catalog itself. - - *Create metadata for Collection datasets* - Select this option to generate metadata for each collection dataset (THREDDS dataset containing other datasets). Creation of metadata can be customised using options that are displayed when this option is selected as described further below. - - *Create metadata for Atomic datasets* - Select this option to generate metadata for each atomic dataset (THREDDS dataset not containing other datasets -- for example cataloguing a netCDF dataset). Creation of metadata can be customised using options that are displayed when this option is selected as described further below. - - *Ignore harvesting attribute* - Select this option to harvest metadata for selected datasets regardless of the harvest attribute for the dataset in the THREDDS catalog. If this option is not selected, metadata will only be created for datasets that have a harvest attribute set to true. - - *Extract DIF metadata elements and create ISO metadata* - Select this option to generate ISO metadata for datasets in the THREDDS catalog that have DIF metadata elements. When this option is selected a list of schemas is shown that have a DIFToISO.xsl stylesheet available (see for example `GEONETWORK_DATA_DIR/config/schema_plugins/iso19139/convert/DIFToISO.xsl`). Metadata is generated by reading the DIF metadata items in the THREDDS into a DIF format metadata record and then converting that DIF record to ISO using the DIFToISO stylesheet. - - *Extract Unidata dataset discovery metadata using fragments* - Select this option when the metadata in your THREDDS or netCDF/ncml datasets follows Unidata dataset discovery conventions (see ). You will need to write your own stylesheets to extract this metadata as fragments and define a template to combine with the fragments. When this option is selected the following additional options will be shown: - - *Select schema for output metadata records* - choose the ISO metadata schema or profile for the harvested metadata records. Note: only the schemas that have THREDDS fragment stylesheets will be displayed in the list (see the next option for the location of these stylesheets). - - *Stylesheet to create metadata fragments* - Select a stylesheet to use to convert metadata for the dataset (THREDDS metadata and netCDF ncml where applicable) into metadata fragments. These stylesheets can be found in the directory convert/ThreddsToFragments in the schema directory eg. for iso19139 this would be `GEONETWORK_DATA_DIR/config/schema_plugins/iso19139/convert/ThreddsToFragments`. - - *Create subtemplates for fragments and XLink them into template* - Select this option to create a subtemplate (=metadata fragment stored in GeoNetwork catalog) for each metadata fragment generated. - - *Template to combine with fragments* - Select a template that will be filled in with the metadata fragments generated for each dataset. The generated metadata fragments are used to replace referenced elements in the templates with an xlink to a subtemplate if the *Create subtemplates* option is checked. If *Create subtemplates* is not checked, then the fragments are simply copied into the template metadata record. - - For Atomic Datasets , one additional option is provided *Harvest new or modified datasets only*. If this option is checked only datasets that have been modified or didn't exist when the harvester was last run will be harvested. - - *Create Thumbnails* - Select this option to create thumbnails for WMS layers in referenced WMS services - - *Icon* - An icon to assign to harvested metadata. The icon will be used when showing search results. -- **Options** - Scheduling Options. -- **Privileges** -- **Category for Service** - Select the category to assign to the ISO19119 service records for the THREDDS services. -- **Category for Datasets** - Select the category to assign the generated metadata records (and any subtemplates) to. - -At the bottom of the page there are the following buttons: - -- **Back** - Go back to the main harvesting page. The harvesting definition is not added. -- **Save** - Saves this harvester definition creating a new harvesting instance. After the save operation has completed, the main harvesting page will be displayed. +To create a THREDDS Catalog harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `Thredds Catalog`: + +![](img/add-threddscatalog-harvester.png) + +Providing the following information: + +- **Identification** + - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester. + - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester. + - *User*: User who owns the harvested records. + +- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)). + +- **Configure connection to Thredds catalog** + - *Service URL*: The remote URL of the THREDDS Catalog from which metadata will be harvested. This must be the xml version of the catalog (i.e. ending with .xml). The harvester will crawl through all datasets and services defined in this catalog creating metadata for them as specified by the options described further below. + +- **Configure response processing for thredds** + - *Language*: Use this option to specify the language of the metadata to be harvested. + - *ISO19115 Topic category for output metadata records*: Use this option to specify the ISO topic category of service metadata. + - *Create ISO19119 metadata for all services in the thredds catalog*: Select this option to generate iso19119 metadata for services defined in the THREDDS catalog (eg. OpenDAP, OGC WCS, ftp) and for the THREDDS catalog itself. + - *Select schema for output metadata records*: The metadata standard to create the metadata. It should be a valid metadata schema installed in GeoNetwork, by default `iso19139`. + - *Dataset title*: (Optional) Title for the dataset. Default is catalog url. + - *Dataset abstract*: (Optional) Abstract for the dataset. Default is 'Thredds Dataset'. + - *Geonetwork category to assign to dataset metadata records* - Select the category to assign to the ISO19119 service records for the THREDDS services. + - *Geonetwork category to assign to dataset metadata records* - Select the category to assign the generated metadata records (and any subtemplates) to. + +- **Privileges** - Assign privileges to harvested metadata. ## More about harvesting THREDDS DIF metadata elements with the THREDDS Harvester diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-webdav.md b/docs/manual/docs/user-guide/harvesting/harvesting-webdav.md index 4313483f627..cdd6b12434a 100644 --- a/docs/manual/docs/user-guide/harvesting/harvesting-webdav.md +++ b/docs/manual/docs/user-guide/harvesting/harvesting-webdav.md @@ -4,19 +4,35 @@ This harvesting type uses the WebDAV (Distributed Authoring and Versioning) prot ## Adding a WebDAV harvester -- **Site** - Options about the remote site. - - *Subtype* - Select WebDAV or WAF according to the type of server being harvested. - - *Name* - This is a short description of the remote site. It will be shown in the harvesting main page as the name for this instance of the WebDAV harvester. - - *URL* - The remote URL from which metadata will be harvested. Each file found that ends with .xml is assumed to be a metadata record. - - *Icon* - An icon to assign to harvested metadata. The icon will be used when showing search results. - - *Use account* - Account credentials for basic HTTP authentication on the WebDAV/WAF server. -- **Options** - Scheduling options. -- **Options** - Specific harvesting options for this harvester. - - *Validate* - If checked, the metadata will be validated after retrieval. If the validation does not pass, the metadata will be skipped. - - *Recurse* - When the harvesting engine will find folders, it will recursively descend into them. -- **Privileges** - Assign privileges to harvested metadata. -- **Categories** +To create a WebDAV harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `WebDAV / WAF`: + +![](img/add-webdav-harvester.png) + +Providing the following information: -!!! Notes +- **Identification** + - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester. + - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester. + - *User*: User who owns the harvested records. - - The same metadata could be harvested several times by different instances of the WebDAV harvester. This is not good practise because copies of the same metadata record will have a different UUID. +- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)). + +- **Configure connection to WebDAV / WAF** + - *URL*: The remote URL from which metadata will be harvested. Each file found that has the extension `.xml` is assumed to be a metadata record. + - *Type of protocol*: Select WebDAV or WAF according to the type of server being harvested. + - *Remote authentication*: If checked, should be provided the credentials for basic HTTP authentication on the WebDAV/WAF server. + - *Also search in subfolders*: When the harvesting engine will find folders, it will recursively descend into them. + +- **Configure response processing for webdav** + - *Action on UUID collision*: When a harvester finds the same uuid on a record collected by another method (another harvester, importer, dashboard editor,...), should this record be skipped (default), overriden or generate a new UUID? + - *XSL filter name to apply*: (Optional) The XSL filter is applied to each metadata record. The filter is a process which depends on the schema (see the `process` folder of the schemas). + + It could be composed of parameter which will be sent to XSL transformation using the following syntax: `anonymizer?protocol=MYLOCALNETWORK:FILEPATH&email=gis@organisation.org&thesaurus=MYORGONLYTHEASURUS` + + - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron). + - Accept all metadata without validation. + - Accept metadata that are XSD valid. + - Accept metadata that are XSD and schematron valid. + - *Category*: (Optional) A GeoNetwork category to assign to each metadata record. + +- **Privileges** - Assign privileges to harvested metadata. diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-wfs-features.md b/docs/manual/docs/user-guide/harvesting/harvesting-wfs-features.md index 16abfa13bb7..c198e5f5966 100644 --- a/docs/manual/docs/user-guide/harvesting/harvesting-wfs-features.md +++ b/docs/manual/docs/user-guide/harvesting/harvesting-wfs-features.md @@ -2,26 +2,43 @@ Metadata can be present in the tables of a relational databases, which are commonly used by many organisations. Putting an OGC Web Feature Service (WFS) over a relational database will allow metadata to be extracted via standard query mechanisms. This harvesting type allows the user to specify a GetFeature query and map information from the features to fragments of metadata that can be linked or copied into a template to create metadata records. +An OGC web feature service (WFS) implements a GetFeature query operation that returns data in the form of features (usually rows from related tables in a relational database). GeoNetwork, acting as a client, can read the GetFeature response and apply a user-supplied XSLT stylesheet to produce metadata fragments that can be linked or copied into a user-supplied template to build metadata records. + ## Adding an OGC WFS GetFeature Harvester -An OGC web feature service (WFS) implements a GetFeature query operation that returns data in the form of features (usually rows from related tables in a relational database). GeoNetwork, acting as a client, can read the GetFeature response and apply a user-supplied XSLT stylesheet to produce metadata fragments that can be linked or copied into a user-supplied template to build metadata records. +To create a OGC WFS GetFeature harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `OGC WFS GetFeature`: + +![](img/add-wfsgetfeature-harvester.png) -The available options are: +Providing the following information: -- **Site** - - *Name* - This is a short description of the harvester. It will be shown in the harvesting main page as the name for this WFS GetFeature harvester. - - *Service URL* - The bare URL of the WFS service (no OGC params required) - - *Metadata language* - The language that will be used in the metadata records created by the harvester +- **Identification** + - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester. + - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester. + - *User*: User who owns the harvested records. + +- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)). + +- **Configure connection to OGC CSW 2.0.2** + - *Service URL*: The bare URL of the WFS service (no OGC params required). + - *Remote authentication*: If checked, should be provided the credentials for basic HTTP authentication on the WFS server. - *OGC WFS GetFeature Query* - The OGC WFS GetFeature query used to extract features from the WFS. - - *Schema for output metadata records* - choose the metadata schema or profile for the harvested metadata records. Note: only the schemas that have WFS fragment stylesheets will be displayed in the list (see the next option for the location of these stylesheets). - - *Stylesheet to create fragments* - User-supplied stylesheet that transforms the GetFeature response to a metadata fragments document (see below for the format of that document). Stylesheets exist in the WFSToFragments directory which is in the convert directory of the selected output schema. eg. for the iso19139 schema, this directory is `GEONETWORK_DATA_DIR/config/schema_plugins/iso19139/convert/WFSToFragments`. - - *Save large response to disk* - Check this box if you expect the WFS GetFeature response to be large (eg. greater than 10MB). If checked, the GetFeature response will be saved to disk in a temporary file. Each feature will then be extracted from the temporary file and used to create the fragments and metadata records. If not checked, the response will be held in RAM. - - *Create subtemplates* - Check this box if you want the harvested metadata fragments to be saved as subtemplates in the metadata catalog and xlink'd into the metadata template (see next option). If not checked, the fragments will be copied into the metadata template. - - *Template to use to build metadata using fragments* - Choose the metadata template that will be combined with the harvested metadata fragments to create metadata records. This is a standard GeoNetwork metadata template record. - - *Category for records built with linked fragments* - Choose the metadata template that will be combined with the harvested metadata fragments to create metadata records. This is a standard GeoNetwork metadata template record. -- **Options** -- **Privileges** -- **Category for subtemplates** - When fragments are saved to GeoNetwork as subtemplates they will be assigned to the category selected here. + +- **Configure response processing for wfsfeatures** + - *Language*: The language that will be used in the metadata records created by the harvester. + - *Metadata standard*: The metadata standard to create the metadata. It should be a valid metadata schema installed in GeoNetwork, by default `iso19139`. + - *Save large response to disk*: Check this box if you expect the WFS GetFeature response to be large (eg. greater than 10MB). If checked, the GetFeature response will be saved to disk in a temporary file. Each feature will then be extracted from the temporary file and used to create the fragments and metadata records. If not checked, the response will be held in RAM. + - *Stylesheet to create fragments*: User-supplied stylesheet that transforms the GetFeature response to a metadata fragments document (see below for the format of that document). Stylesheets exist in the WFSToFragments directory which is in the convert directory of the selected output schema. eg. for the iso19139 schema, this directory is `GEONETWORK_DATA_DIR/config/schema_plugins/iso19139/convert/WFSToFragments`. + - *Create subtemplates*: Check this box if you want the harvested metadata fragments to be saved as subtemplates in the metadata catalog and xlink'd into the metadata template (see next option). If not checked, the fragments will be copied into the metadata template. + - *Select template to combine with fragments*: Choose the metadata template that will be combined with the harvested metadata fragments to create metadata records. This is a standard GeoNetwork metadata template record. + - *Category for directory entries*: (Optional) When fragments are saved to GeoNetwork as subtemplates they will be assigned to the category selected here. + - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron). + - Accept all metadata without validation. + - Accept metadata that are XSD valid. + - Accept metadata that are XSD and schematron valid. + +- **Privileges** - Assign privileges to harvested metadata. + ## More about turning the GetFeature Response into metadata fragments diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-z3950.md b/docs/manual/docs/user-guide/harvesting/harvesting-z3950.md deleted file mode 100644 index 47722c37464..00000000000 --- a/docs/manual/docs/user-guide/harvesting/harvesting-z3950.md +++ /dev/null @@ -1,90 +0,0 @@ -# Z3950 Harvesting {#z3950_harvester} - -Z3950 is a remote search and harvesting protocol that is commonly used to permit search and harvest of metadata. Although the protocol is often used for library catalogs, significant geospatial metadata catalogs can also be searched using Z3950 (eg. the metadata collections of the Australian Government agencies that participate in the Australian Spatial Data Directory - ASDD). This harvester allows the user to specify a Z3950 query and retrieve metadata records from one or more Z3950 servers. - -## Adding a Z3950 Harvester - -The available options are: - -- **Site** - - *Name* - A short description of this Z3950 harvester. It will be shown in the harvesting main page using this name. - - *Z3950 Server(s)* - These are the Z3950 servers that will be searched. You can select one or more of these servers. - - *Z3950 Query* - Specify the Z3950 query to use when searching the selected Z3950 servers. At present this field is known to support the Prefix Query Format (also known as Prefix Query Notation) which is described at this URL: . See below for more information and some simple examples. - - *Icon* - An icon to assign to harvested metadata. The icon will be used when showing search results. -- **Options** - Scheduling options. -- **Harvested Content** - - *Apply this XSLT to harvested records* - Choose an XSLT here that will convert harvested records to a different format. - - *Validate* - If checked, records that do not/cannot be validated will be rejected. -- **Privileges** -- **Categories** - -!!! note - - this harvester automatically creates a new Category named after each of the Z3950 servers that return records. Records that are returned by a server are assigned to the category named after that server. - - -## More about PQF Z3950 Queries - -PQF is a rather arcane query language. It is based around the idea of attributes and attribute sets. The most common attribute set used for geospatial metadata in Z3950 servers is the GEO attribute set (which is an extension of the BIB-1 and GILS attribute sets - see ). So all PQF queries to geospatial metadata Z3950 servers should start off with @attrset geo. - -The most useful attribute types in the GEO attribute set are as follows: - -| @attr number | Meaning | Description | -|---------------|------------|--------------------------------------------------| -| 1 | Use | What field to search | -| 2 | Relation | How to compare the term specified | -| 4 | Structure | What type is the term? eg. date, numeric, phrase | -| 5 | Truncation | How to truncate eg. right | - -In GeoNetwork the numeric values that can be specified for `@attr 1` map to the lucene index field names as follows: - -| @attr 1= | Lucene index field | ISO19139 element | -|----------------------|-------------------------------|-------------------------------------------------------------------------------------------------------------| -| 1016 | any | All text from all metadata elements | -| 4 | title, altTitle | gmd:identificationInfo//gmd:citation//gmd:title/gco:CharacterString | -| 62 | abstract | gmd:identificationInfo//gmd:abstract/gco:CharacterString | -| 1012 | _changeDate | Not a metadata element (maintained by GeoNetwork) | -| 30 | createDate | gmd:MD_Metadata/gmd:dateStamp/gco:Date | -| 31 | publicationDate | gmd:identificationInfo//gmd:citation//gmd:date/gmd:='publication' | -| 2072 | tempExtentBegin | gmd:identificationInfo//gmd:extent//gmd:temporalElement//gml:begin(Position) | -| 2073 | tempExtentEnd | gmd:identificationInfo//gmd:extent//gmd:temporalElement//gml:end(Position) | -| 2012 | fileId | gmd:MD_Metadata/gmd:fileIdentifier/* | -| 12 | identifier | gmd:identificationInfo//gmd:citation//gmd:identifier//gmd:code/* | -| 21,29,2002,3121,3122 | keyword | gmd:identificationInfo//gmd:keyword/* | -| 2060 | northBL,eastBL,southBL,westBL | gmd:identificationInfo//gmd:extent//gmd:EX_GeographicBoundingBox/gmd:westBoundLongitude*/gco:Decimal (etc) | - -Note that this is not a complete set of the mappings between Z3950 GEO attribute set and the GeoNetwork lucene index field names for ISO19139. Check out INSTALL_DIR/web/geonetwork/xml/search/z3950Server.xsl and INSTALL_DIR/web/geonetwork/xml/schemas/iso19139/index-fields.xsl for more details and annexe A of the GEO attribute set for Z3950 at for more details. - -Common values for the relation attribute (`@attr=2`): - -| @attr 2= | Description | -|-----------|--------------------------| -| 1 | Less than | -| 2 | Less than or equal to | -| 3 | Equals | -| 4 | Greater than or equal to | -| 5 | Greater than | -| 6 | Not equal to | -| 7 | Overlaps | -| 8 | Fully enclosed within | -| 9 | Encloses | -| 10 | Fully outside of | - -So a simple query to get all metadata records that have the word 'the' in any field would be: - -`@attrset geo @attr 1=1016 the` - -- `@attr 1=1016` means that we are doing a search on any field in the metadata record - -A more sophisticated search on a bounding box might be formulated as: - -`@attrset geo @attr 1=2060 @attr 4=201 @attr 2=7 "-36.8262 142.6465 -44.3848 151.2598` - -- `@attr 1=2060` means that we are doing a bounding box search -- `@attr 4=201` means that the query contains coordinate strings -- `@attr 2=7` means that we are searching for records whose bounding box overlaps the query box specified at the end of the query - -!!! Notes - - - Z3950 servers must be configured for GeoNetwork in `INSTALL_DIR/web/geonetwork/WEB-INF/classes/JZKitConfig.xml.tem` - - every time the harvester runs, it will remove previously harvested records and create new ones. diff --git a/docs/manual/docs/user-guide/harvesting/img/add-arcsde-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-arcsde-harvester.png new file mode 100644 index 00000000000..258c163bfda Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-arcsde-harvester.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/add-csw-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-csw-harvester.png new file mode 100644 index 00000000000..e6e484359b9 Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-csw-harvester.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/add-filesystem-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-filesystem-harvester.png new file mode 100644 index 00000000000..0e0f0d66bfd Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-filesystem-harvester.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/add-geonetwork-3-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-geonetwork-3-harvester.png new file mode 100644 index 00000000000..002459bae7d Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-geonetwork-3-harvester.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/add-geoportalrest-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-geoportalrest-harvester.png new file mode 100644 index 00000000000..31d60f997e7 Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-geoportalrest-harvester.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/add-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-harvester.png new file mode 100644 index 00000000000..5d50e1dce3e Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-harvester.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/add-oaipmh-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-oaipmh-harvester.png new file mode 100644 index 00000000000..a6ad14e6a54 Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-oaipmh-harvester.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/add-ogcwebservices-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-ogcwebservices-harvester.png new file mode 100644 index 00000000000..2734781c718 Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-ogcwebservices-harvester.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/add-simpleurl-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-simpleurl-harvester.png new file mode 100644 index 00000000000..6f7af0255a9 Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-simpleurl-harvester.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/add-threddscatalog-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-threddscatalog-harvester.png new file mode 100644 index 00000000000..a326a4b7c79 Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-threddscatalog-harvester.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/add-webdav-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-webdav-harvester.png new file mode 100644 index 00000000000..4b36e089b8d Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-webdav-harvester.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/add-wfsgetfeature-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-wfsgetfeature-harvester.png new file mode 100644 index 00000000000..bd3646bc0cf Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-wfsgetfeature-harvester.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/harvester-history.png b/docs/manual/docs/user-guide/harvesting/img/harvester-history.png new file mode 100644 index 00000000000..f9064c1a8f3 Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/harvester-history.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/harvester-statistics.png b/docs/manual/docs/user-guide/harvesting/img/harvester-statistics.png new file mode 100644 index 00000000000..b311bb2ec8e Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/harvester-statistics.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/harvesters.png b/docs/manual/docs/user-guide/harvesting/img/harvesters.png new file mode 100644 index 00000000000..bd008fdef7c Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/harvesters.png differ diff --git a/docs/manual/docs/user-guide/harvesting/index.md b/docs/manual/docs/user-guide/harvesting/index.md index 46f52f782c5..abea85ff38c 100644 --- a/docs/manual/docs/user-guide/harvesting/index.md +++ b/docs/manual/docs/user-guide/harvesting/index.md @@ -6,7 +6,8 @@ Harvesting is the process of ingesting metadata from remote sources and storing The following sources can be harvested: -- [GeoNetwork 2.0 Harvester](harvesting-geonetwork.md) +- [GeoNetwork 2.1-3.X Harvester](harvesting-geonetwork.md) +- [GeoNetwork 2.0 Harvester](harvesting-geonetwork-2.md) - [Harvesting CSW services](harvesting-csw.md) - [Harvesting OGC Services](harvesting-ogcwxs.md) - [Simple URL harvesting (opendata)](harvesting-simpleurl.md) @@ -17,7 +18,6 @@ The following sources can be harvested: - [GeoPortal REST Harvesting](harvesting-geoportal.md) - [THREDDS Harvesting](harvesting-thredds.md) - [WFS GetFeature Harvesting](harvesting-wfs-features.md) -- [Z3950 Harvesting](harvesting-z3950.md) ## Mechanism overview @@ -134,79 +134,45 @@ The script will add the certificate to the JVM keystore, if you run it as follow $ ./ssl_key_import.sh https_server_name 443 -## The main page +## Harvesting page -To access the harvesting main page you have to be logged in as an administrator. From the administration page, select the harvest shortcut. The harvesting main page will then be displayed. +To access the harvesting main page you have to be logged in with a profile `Administrator` or `UserAdmin`. From the `Admin console` menu, select the option `Harvesting`. -The page shows a list of the currently defined harvesters and a set of buttons for management functions. The meaning of each column in the list of harvesters is as follows: +The page shows a list of the currently defined harvesters with information about the status of the harvesters: -1. *Select* Check box to select one or more harvesters. The selected harvesters will be affected by the first row of buttons (activate, deactivate, run, remove). For example, if you select three harvesters and press the Remove button, they will all be removed. -2. *Name* This is the harvester name provided by the administrator. -3. *Type* The harvester type (eg. GeoNetwork, WebDAV etc\...). -4. *Status* An icon showing current status. See [Harvesting Status and Error Icons](index.md#admin_harvesting_status) for the different icons and status descriptions. -5. *Errors* An icon showing the result of the last harvesting run, which could have succeeded or not. See [Harvesting Status and Error Icons](index.md#admin_harvesting_status) for the different icons and error descriptions. Hovering the cursor over the icon will show detailed information about the last harvesting run. -6. *Run at* and *Every*: Scheduling of harvester runs. Essentially the time of the day + how many hours between repeats and on which days the harvester will run. -7. *Last run* The date, in ISO 8601 format, of the most recent harvesting run. -8. *Operation* A list of buttons/links to operations on a harvester. - - Selecting *Edit* will allow you to change the parameters for a harvester. - - Selecting *Clone* will allow you to create a clone of this harvester and start editing the details of the clone. - - Selecting *History* will allow you to view/change the harvesting history for a harvester - see [Harvest History](index.md#harvest_history). +![](img/harvesters.png) -At the bottom of the list of harvesters are two rows of buttons. The first row contains buttons that can operate on a selected set of harvesters. You can select the harvesters you want to operate on using the check box in the Select column and then press one of these buttons. When the button finishes its action, the check boxes are cleared. Here is the meaning of each button: +The following information is shown for each harvester: -1. *Activate* When a new harvester is created, the status is *inactive*. Use this button to make it *active* and start the harvester(s) according to the schedule it has/they have been configured to use. -2. *Deactivate* Stops the harvester(s). Note: this does not mean that currently running harvest(s) will be stopped. Instead, it means that the harvester(s) will not be scheduled to run again. -3. *Run* Start the selected harvesters immediately. This is useful for testing harvester setups. -4. *Remove* Remove all currently selected harvesters. A dialogue will ask the user to confirm the action. +- **Last run**: Date on which the harvester was last run. +- **Total**: It is the total number of metadata found remotely. Metadata with the same id are considered as one. +- **Updated**: Number of metadata that are present locally but needed to be updated because their last modification date was different from the remote one. +- **Unchanged**: Number of local metadata that have not been modified. Its remote last modification date has not changed. -The second row contains general purpose buttons. Here is the meaning of each button: +At the bottom of the harvester list there are the following buttons: -1. *Back* Simply returns to the main administration page. -2. *Add* This button creates a new harvester. -3. *Refresh* Refreshes the current list of harvesters from the server. This can be useful to see if the harvesting list has been altered by someone else or to get the status of any running harvesters. -4. *History* Show the harvesting history of all harvesters. See [Harvest History](index.md#harvest_history) for more details. +1. *Harvest from*: Allows you to select the type of harvester to create. +2. *Clone*: Creates a new harvester, using the information of an existing harvester. +3. *Refresh*: Refreshes the list of harvesters. -## Harvesting Status and Error Icons {#admin_harvesting_status} +### Adding new harvesters -## Harvesting result tips +To add a new harvester, click on the `Harvest from` button. A drop-down list with all available harvesting protocols will appear. -When a harvester runs and completes, a tool tip showing detailed information about the harvesting process is shown in the **Errors** column for the harvester. If the harvester succeeded then hovering the cursor over the tool tip will show a table, with some rows labelled as follows: +![](img/add-harvester.png) -- **Total** - This is the total number of metadata found remotely. Metadata with the same id are considered as one. -- **Added** - Number of metadata added to the system because they were not present locally. -- **Removed** - Number of metadata that have been removed locally because they are not present in the remote server anymore. -- **Updated** - Number of metadata that are present locally but that needed to be updated because their last change date was different from the remote one. -- **Unchanged** - Local metadata left unchanged. Their remote last change date did not change. -- **Unknown schema** - Number of skipped metadata because their format was not recognised by GeoNetwork. -- **Unretrievable** - Number of metadata that were ready to be retrieved from the remote server but for some reason there was an exception during the data transfer process. -- **Bad Format** - Number of skipped metadata because they did not have a valid XML representation. -- **Does not validate** - Number of metadata which did not validate against their schema. These metadata were harvested with success but skipped due to the validation process. Usually, there is an option to force validation: if you want to harvest these metadata anyway, simply turn/leave it off. -- **Thumbnails/Thumbnails failed** - Number of metadata thumbnail images added/that could not be added due to some failure. -- **Metadata URL attribute used** - Number of layers/featuretypes/coverages that had a metadata URL that could be used to link to a metadata record (OGC Service Harvester only). -- **Services added** - Number of ISO19119 service records created and added to the catalogue (for THREDDS catalog harvesting only). -- **Collections added** - Number of collection dataset records added to the catalogue (for THREDDS catalog harvesting only). -- **Atomics added** - Number of atomic dataset records added to the catalogue (for THREDDS catalog harvesting only). -- **Subtemplates added** - Number of subtemplates (= fragment visible in the catalog) added to the metadata catalog. -- **Subtemplates removed** - Number of subtemplates (= fragment visible in the catalog) removed from the metadata catalog. -- **Fragments w/Unknown schema** - Number of fragments which have an unknown metadata schema. -- **Fragments returned** - Number of fragments returned by the harvester. -- **Fragments matched** - Number of fragments that had identifiers that in the template used by the harvester. -- **Existing datasets** - Number of metadata records for datasets that existed when the THREDDS harvester was run. -- **Records built** - Number of records built by the harvester from the template and fragments. -- **Could not insert** - Number of records that the harvester could not insert into the catalog (usually because the record was already present eg. in the Z3950 harvester this can occur if the same record is harvested from different servers). +You can choose the type of harvesting you want to do. Supported harvesters and details on what to do next can be found in the following sections. -## Adding new harvesters +### Harvester History {#harvest_history} -The Add button in the main page allows you to add new harvesters. A drop down list is then shown with all the available harvester protocols. +Each time a harvester is run, a log file is generated of what was harvested and/or what went wrong (e.g., an exception report). To view the harvester history, select a harvester in the harvester list and select the `Harvester history` tab on the harvester page: -You can choose the type of harvest you intend to perform and press *Add* to begin the process of adding the harvester. The supported harvesters and details of what to do next are in the following sections: +![](img/harvester-history.png) -## Harvest History {#harvest_history} +Once the harvester history is displayed, it is possible to download the log file of the harvester run and delete the harvester history. -Each time a harvester is run, it generates a status report of what was harvested and/or what went wrong (eg. exception report). These reports are stored in a table in the database used by GeoNetwork. The entire harvesting history for all harvesters can be recalled using the History button on the Harvesting Management page. The harvest history for an individual harvester can also be recalled using the History link in the Operations for that harvester. +### Harvester records -Once the harvest history has been displayed it is possible to: +When a harvester is executed, you can see the list of harvested metadata and some statistics about the metadata. Select a harvester in the list of harvesters and select the `Metadata records` tab on the harvester page: -- expand the detail of any exceptions -- sort the history by harvest date (or in the case of the history of all harvesters, by harvester name) -- delete any history entry or the entire history +![](img/harvester-statistics.png) diff --git a/docs/manual/docs/user-guide/publishing/managing-privileges.md b/docs/manual/docs/user-guide/publishing/managing-privileges.md index 7670dbff69e..2bb33525d01 100644 --- a/docs/manual/docs/user-guide/publishing/managing-privileges.md +++ b/docs/manual/docs/user-guide/publishing/managing-privileges.md @@ -16,11 +16,11 @@ Below is a brief description for each privilege to help you identify which ones **Publish**: Users in the specified group/s are able to view the metadata eg. if it matches search criteria entered by such a user. -**Download**: Users in the specified group/s are able to download the data. - **Interactive Map**: Users in the specified group/s are able to get an interactive map. The interactive map has to be created separately using a Web Map Server such as GeoServer, which is distributed with GeoNetwork. -**Featured**: When randomly selected by GeoNetwork, the metadata record can appear in the `Featured` section of the GeoNetwork home page. +**Download**: Users in the specified group/s are able to download the data. + +**Editing**: Users in the specified group/s are able to edit the metadata, if they have the *editor* profile. **Notify**: Users in the specified group receive notification if data attached to the metadata record is downloaded. @@ -57,12 +57,13 @@ Any user (logged in or not) can view the public metadata. An *administrator* can edit any metadata. -A *reviewer* can edit a metadata if: +A *reviewer* / *editor* can edit a metadata if: + +* They are the metadata owner. + +* The metadata has editing privilege in the group(s) where the user is a *reviewer* / *editor*. -- The metadata owner is member of one of the groups assigned to the reviewer. -- They are the metadata owner. -A *User Administrator* or an *Editor* can only edit metadata they created. # Setting Privileges diff --git a/docs/manual/docs/user-guide/workflow/batchediting.md b/docs/manual/docs/user-guide/workflow/batchediting.md index 8a399b621c1..919655b4f48 100644 --- a/docs/manual/docs/user-guide/workflow/batchediting.md +++ b/docs/manual/docs/user-guide/workflow/batchediting.md @@ -1,4 +1,4 @@ -# Updating a set of records {#batchediting} +# Update records from edit console {#batchediting} From the editor board, editors can do batch editing on multiple records. diff --git a/docs/manual/docs/user-guide/workflow/batchupdate-from-admin.md b/docs/manual/docs/user-guide/workflow/batchupdate-from-admin.md index 9e065bb8df9..92b355364b8 100644 --- a/docs/manual/docs/user-guide/workflow/batchupdate-from-admin.md +++ b/docs/manual/docs/user-guide/workflow/batchupdate-from-admin.md @@ -1,4 +1,4 @@ -# Updating a set of records from the admin console {#batchupdate_from_admin} +# Updating records from Admin console {#batchupdate_from_admin} In some circumstances, users may want to update a set of records. Administrator users can, from the `admin console`, open the `tools`, `Batch process` page. diff --git a/docs/manual/docs/user-guide/workflow/batchupdate-xsl.md b/docs/manual/docs/user-guide/workflow/batchupdate-xsl.md index 09b373b4c84..37e3db5dc2d 100644 --- a/docs/manual/docs/user-guide/workflow/batchupdate-xsl.md +++ b/docs/manual/docs/user-guide/workflow/batchupdate-xsl.md @@ -1,4 +1,4 @@ -# Updating a set of records using XSLT {#batchupdate_xsl} +# Update records using XSLT {#batchupdate_xsl} ## Applying changes {#batch-process-apply} diff --git a/docs/manual/docs/user-guide/workflow/batchupdate.md b/docs/manual/docs/user-guide/workflow/batchupdate.md deleted file mode 100644 index f09a49bd4b5..00000000000 --- a/docs/manual/docs/user-guide/workflow/batchupdate.md +++ /dev/null @@ -1,7 +0,0 @@ -# Updating a set of records {#batchupdate} - -In some circumstances, users may want to update a set of records. The catalog provides 2 options for that: - -- [Updating a set of records](batchediting.md) -- [Updating a set of records from the admin console](batchupdate-from-admin.md) -- [Updating a set of records using XSLT](batchupdate-xsl.md) diff --git a/docs/manual/docs/user-guide/workflow/batchupdate/index.md b/docs/manual/docs/user-guide/workflow/batchupdate/index.md new file mode 100644 index 00000000000..83b742f0e15 --- /dev/null +++ b/docs/manual/docs/user-guide/workflow/batchupdate/index.md @@ -0,0 +1,7 @@ +# Updating a set of records {#batchupdate} + +In some circumstances, users may want to update a set of records: + +- [Updating a set of records from the edit console](../batchediting.md) +- [Updating a set of records from the admin console](../batchupdate-from-admin.md) +- [Updating a set of records using XSLT](../batchupdate-xsl.md) diff --git a/docs/manual/docs/user-guide/workflow/index.md b/docs/manual/docs/user-guide/workflow/index.md index 451c2b9280a..7be9799b26c 100644 --- a/docs/manual/docs/user-guide/workflow/index.md +++ b/docs/manual/docs/user-guide/workflow/index.md @@ -5,4 +5,4 @@ - [Life cycle](life-cycle.md) - [Publishing GIS data in a map server](geopublication.md) - [Versioning](versioning.md) -- [Updating a set of records](batchupdate.md) +- [Updating a set of records](batchupdate/index.md) diff --git a/docs/manual/docs/user-guide/workflow/life-cycle.md b/docs/manual/docs/user-guide/workflow/life-cycle.md index c1bf876157f..70416aa4511 100644 --- a/docs/manual/docs/user-guide/workflow/life-cycle.md +++ b/docs/manual/docs/user-guide/workflow/life-cycle.md @@ -1,26 +1,29 @@ # Life cycle -## Record life cycle +Metadata records can have a lifecycle that typically goes through one or more states. This is an optional feature that +can be activated on demand (see [Activate the metadata workflow](#activate-workflow)). -Metadata records have a lifecycle that typically goes through one or more states. For example, when a record is: +For example, when a record is: - created and edited by an `Editor` it is in the `Draft` state. -- being reviewed by a `content reviewer`, or a review is requested brings the record to `Submitted` state. +- being reviewed by a `Content reviewer`, or a review is requested brings the record to `Submitted` state. - completed and corrected by the `Content Reviewer` it is in the `Approved` state. - superseded or replaced the state is `Retired`. -The catalog has (an extensible) set of states that a metadata record can have: +The catalog has a set of states that a metadata record can have: -- `Unknown` - this is the default state - nothing is known about the status of the metadata record. - `Draft` - the record is under construction or being edited. - `Submitted` - the record has been submitted for approval to a content review. - `Approved` - the content reviewer has reviewed and approved the metadata record. -- `Rejected` - the content reviewer has reviewed and rejected the metadata record. - `Retired` - the record has been retired. -Workflow can be enabled for the full catalogue, certain groups or on an individual record level. +When the metadata workflow is activated, the existing records are set in a special status `Unknown`. -In the last case, to enable workflow and change the status from `Unknown` to `Draft`, click the `enable workflow` button in the metadata view: +## Activate the metadata workflow {#activate-workflow} + +To enable the record life cycle, activate the metadata workflow. It can be activated for the full catalogue, certain groups, or on an individual record. + +In the case of activating for an individual record: enable workflow in a metadata, change the status from `Unknown` to `Draft`, and then click the `Enable workflow` button in the metadata view: ![](img/workflow-enable.png) @@ -28,10 +31,11 @@ In the last case, to enable workflow and change the status from `Unknown` to `Dr To use the workflow for metadata records created before enabling it, you must use the above option. +To enable workflow for the full catalogue or certain groups, check `Administration` --> `Settings` --> `Metadata Workflow`. In workflow mode, in case approved records are modified, you're working on a copy of the approved record. Changes on the record will not be visible to users outside your group until the modified record is approved again. -To enable workflow for the full catalogue or certain groups, check Administration --> Settings --> Metadata Workflow. In workflow mode, in case approved records are modified, you're working on a copy of the approved record. Changes on the record will not be visible to users outside your group until the modified record is approved again. +## Usage -When done editing you can submit a record for review by a content reviewer. The submit button is available on the `manage record` menu in the metadata view. A popup will open in which you can leave a message for the content reviewer. +When done editing you can submit a record for review by a content reviewer. The submit button is available on the `Manage record` menu in the metadata view. A popup will open in which you can leave a message for the content reviewer. ![](img/submit-for-review.png) diff --git a/docs/manual/mkdocs.yml b/docs/manual/mkdocs.yml index 9bc8ec5332e..f25d6a2ca22 100644 --- a/docs/manual/mkdocs.yml +++ b/docs/manual/mkdocs.yml @@ -39,17 +39,18 @@ theme: icon: material/weather-sunny name: Switch to light mode features: - - content.action.view - - content.action.edit - - content.code.copy - - content.tabs.link - - navigation.tabs + - toc.follow - navigation.tracking + - navigation.top + - navigation.tabs - navigation.prune - navigation.indexes - - toc.follow - - navigation.top - navigation.footer + - header.autohide + - content.tabs.link + - content.code.copy + - content.action.view + - content.action.edit - announce.dismiss # Plugins - install using: pip3 install -r requirements.txt @@ -65,11 +66,11 @@ plugins: name: English build: true default: true - site_name: 'GeoNetwork opensource (EN)' + site_name: 'GeoNetwork opensource' - locale: fr name: Français build: !ENV [FRENCH,true] - site_name: 'GeoNetwork opensource (FR)' + site_name: 'GeoNetwork opensource' site_description: Catalogue GeoNetwork pour répertorier, rechercher et examiner les enregistrements. nav_translations: Home: Home @@ -83,6 +84,7 @@ extra: version: provider: mike default: stable + alias: true homepage: https://geonetwork-opensource.org/ social: - icon: fontawesome/brands/github @@ -147,13 +149,22 @@ nav: - overview/authors.md - 'Changelog': - overview/change-log/index.md - - 'Latest': - - overview/change-log/latest/index.md + - overview/change-log/version-4.4.6.md + - overview/change-log/version-4.2.11.md + - 'Release History': + - overview/change-log/history/index.md + - overview/change-log/version-4.4.6.md + - overview/change-log/version-4.4.5.md + - overview/change-log/version-4.4.4.md + - overview/change-log/version-4.4.3.md - overview/change-log/version-4.4.2.md - overview/change-log/version-4.4.1.md - overview/change-log/version-4.4.0.md - - 'Stable': - - overview/change-log/stable/index.md + - 'Release History': + - overview/change-log/history/index.md + - overview/change-log/version-4.2.11.md + - overview/change-log/version-4.2.10.md + - overview/change-log/version-4.2.9.md - overview/change-log/version-4.2.8.md - overview/change-log/version-4.2.7.md - overview/change-log/version-4.2.6.md @@ -163,8 +174,7 @@ nav: - overview/change-log/version-4.2.2.md - overview/change-log/version-4.2.1.md - overview/change-log/version-4.2.0.md - - 'Maintenance': - - overview/change-log/maintenance/index.md + - overview/change-log/version-3.12.12.md - overview/change-log/version-3.12.11.md - overview/change-log/version-3.12.10.md - overview/change-log/version-3.12.9.md @@ -177,8 +187,6 @@ nav: - overview/change-log/version-3.12.2.md - overview/change-log/version-3.12.1.md - overview/change-log/version-3.12.0.md - - 'Archive': - - overview/change-log/archive/index.md - overview/change-log/version-4.0.6.md - overview/change-log/version-4.0.5.md - overview/change-log/version-4.0.4.md @@ -243,19 +251,21 @@ nav: - user-guide/describing-information/multilingual-editing.md - 'Associating Resources': - user-guide/associating-resources/index.md - - user-guide/associating-resources/cite.md + - 'Associating documents': + - user-guide/associating-resources/linking-documents.md + - user-guide/associating-resources/using-filestore.md + - user-guide/associating-resources/linking-thumbnail.md + - user-guide/associating-resources/linking-online-resources.md + - 'Associating another record': + - user-guide/associating-resources/linking-records.md + - user-guide/associating-resources/linking-parent.md + - user-guide/associating-resources/linking-source.md + - user-guide/associating-resources/linking-dataset-or-service.md + - user-guide/associating-resources/linking-feature-catalog.md + - user-guide/associating-resources/linking-others.md + - user-guide/associating-resources/linking-remote-records.md - user-guide/associating-resources/doi.md - - user-guide/associating-resources/linking-dataset-or-service.md - - user-guide/associating-resources/linking-documents.md - - user-guide/associating-resources/linking-feature-catalog.md - - user-guide/associating-resources/linking-online-resources.md - - user-guide/associating-resources/linking-others.md - - user-guide/associating-resources/linking-parent.md - - user-guide/associating-resources/linking-records.md - - user-guide/associating-resources/linking-remote-records.md - - user-guide/associating-resources/linking-source.md - - user-guide/associating-resources/linking-thumbnail.md - - user-guide/associating-resources/using-filestore.md + - user-guide/associating-resources/cite.md - 'Tag information': - user-guide/tag-information/index.md - user-guide/tag-information/tagging-with-categories.md @@ -271,10 +281,11 @@ nav: - user-guide/analyzing/metadata.md - 'Workflow': - user-guide/workflow/index.md - - user-guide/workflow/batchediting.md - - user-guide/workflow/batchupdate-from-admin.md - - user-guide/workflow/batchupdate-xsl.md - - user-guide/workflow/batchupdate.md + - 'Updating a set of records': + - user-guide/workflow/batchupdate/index.md + - user-guide/workflow/batchediting.md + - user-guide/workflow/batchupdate-from-admin.md + - user-guide/workflow/batchupdate-xsl.md - user-guide/workflow/geopublication.md - user-guide/workflow/life-cycle.md - user-guide/workflow/suggestion.md @@ -285,6 +296,7 @@ nav: - user-guide/harvesting/harvesting-csw.md - user-guide/harvesting/harvesting-filesystem.md - user-guide/harvesting/harvesting-geonetwork.md + - user-guide/harvesting/harvesting-geonetwork-2.md - user-guide/harvesting/harvesting-geoportal.md - user-guide/harvesting/harvesting-oaipmh.md - user-guide/harvesting/harvesting-ogcwxs.md @@ -293,7 +305,6 @@ nav: - user-guide/harvesting/harvesting-thredds.md - user-guide/harvesting/harvesting-webdav.md - user-guide/harvesting/harvesting-wfs-features.md - - user-guide/harvesting/harvesting-z3950.md - user-guide/export/index.md - 'Administration': - administrator-guide/index.md diff --git a/docs/manual/overrides/main.html b/docs/manual/overrides/main.html new file mode 100644 index 00000000000..f2432066c88 --- /dev/null +++ b/docs/manual/overrides/main.html @@ -0,0 +1,6 @@ +{% extends "base.html" %} + +{% block outdated %} + GeoNetwork 4.4 is latest. + Click here to go to stable. +{% endblock %} diff --git a/docs/manual/pom.xml b/docs/manual/pom.xml index 68654e27401..a0d9a973a96 100644 --- a/docs/manual/pom.xml +++ b/docs/manual/pom.xml @@ -27,7 +27,7 @@ gn-docs org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 gn-guide diff --git a/docs/pom.xml b/docs/pom.xml index e1c5e3b5c5c..0621811afc1 100644 --- a/docs/pom.xml +++ b/docs/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 gn-docs diff --git a/doi/pom.xml b/doi/pom.xml index e0fb8dcbf9f..21348549305 100644 --- a/doi/pom.xml +++ b/doi/pom.xml @@ -28,7 +28,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 diff --git a/doi/src/main/java/org/fao/geonet/doi/client/BaseDoiClient.java b/doi/src/main/java/org/fao/geonet/doi/client/BaseDoiClient.java index 46cb4ab7b99..934895b2cdd 100644 --- a/doi/src/main/java/org/fao/geonet/doi/client/BaseDoiClient.java +++ b/doi/src/main/java/org/fao/geonet/doi/client/BaseDoiClient.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2023 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -94,14 +94,22 @@ protected void create(String url, String body, String contentType, url, body, status, httpResponse.getStatusText(), responseBody); Log.info(LOGGER_NAME, message); - throw new DoiClientException(message); + throw new DoiClientException(String.format( + "Error creating DOI: %s", + message)) + .withMessageKey("exception.doi.serverErrorCreate") + .withDescriptionKey("exception.doi.serverErrorCreate.description", new String[]{message}); } else { Log.info(LOGGER_NAME, String.format( successMessage, url)); } } catch (Exception ex) { Log.error(LOGGER_NAME, " -- Error (exception): " + ex.getMessage(), ex); - throw new DoiClientException(ex.getMessage()); + throw new DoiClientException(String.format( + "Error creating DOI: %s", + ex.getMessage())) + .withMessageKey("exception.doi.serverErrorCreate") + .withDescriptionKey("exception.doi.serverErrorCreate.description", new String[]{ex.getMessage()}); } finally { if (postMethod != null) { @@ -139,13 +147,24 @@ protected String retrieve(String url) } else { Log.info(LOGGER_NAME, "Retrieve DOI metadata end -- Error: " + httpResponse.getStatusText()); - throw new DoiClientException( httpResponse.getStatusText() + - CharStreams.toString(new InputStreamReader(httpResponse.getBody()))); + String message = httpResponse.getStatusText() + + CharStreams.toString(new InputStreamReader(httpResponse.getBody())); + + throw new DoiClientException(String.format( + "Error retrieving DOI: %s", + message)) + .withMessageKey("exception.doi.serverErrorRetrieve") + .withDescriptionKey("exception.doi.serverErrorRetrieve.description", new String[]{message}); + } } catch (Exception ex) { Log.error(LOGGER_NAME, " -- Error (exception): " + ex.getMessage(), ex); - throw new DoiClientException(ex.getMessage()); + throw new DoiClientException(String.format( + "Error retrieving DOI: %s", + ex.getMessage())) + .withMessageKey("exception.doi.serverErrorRetrieve") + .withDescriptionKey("exception.doi.serverErrorRetrieve.description", new String[]{ex.getMessage()}); } finally { if (getMethod != null) { diff --git a/doi/src/main/java/org/fao/geonet/doi/client/DoiDataciteClient.java b/doi/src/main/java/org/fao/geonet/doi/client/DoiDataciteClient.java index 589d7f137a9..73317a4b122 100644 --- a/doi/src/main/java/org/fao/geonet/doi/client/DoiDataciteClient.java +++ b/doi/src/main/java/org/fao/geonet/doi/client/DoiDataciteClient.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2023 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -24,8 +24,6 @@ import org.apache.commons.httpclient.HttpStatus; import org.apache.commons.io.IOUtils; -import org.apache.http.auth.AuthScope; -import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.client.methods.HttpDelete; import org.fao.geonet.ApplicationContextHolder; import org.fao.geonet.utils.GeonetHttpRequestFactory; @@ -179,14 +177,24 @@ public void deleteDoiMetadata(String doi) if ((status != HttpStatus.SC_NOT_FOUND) && (status != HttpStatus.SC_OK)) { Log.info(LOGGER_NAME, "Delete DOI metadata end -- Error: " + httpResponse.getStatusText()); - throw new DoiClientException( httpResponse.getStatusText() ); + String message = httpResponse.getStatusText(); + + throw new DoiClientException(String.format( + "Error deleting DOI: %s", + message)) + .withMessageKey("exception.doi.serverErrorDelete") + .withDescriptionKey("exception.doi.serverErrorDelete.description", new String[]{message}); } else { Log.info(LOGGER_NAME, "DeleteDOI metadata end"); } } catch (Exception ex) { Log.error(LOGGER_NAME, " -- Error (exception): " + ex.getMessage(), ex); - throw new DoiClientException(ex.getMessage()); + throw new DoiClientException(String.format( + "Error deleting DOI: %s", + ex.getMessage())) + .withMessageKey("exception.doi.serverErrorDelete") + .withDescriptionKey("exception.doi.serverErrorDelete.description", new String[]{ex.getMessage()}); } finally { if (deleteMethod != null) { @@ -219,14 +227,25 @@ public void deleteDoi(String doi) if ((status != HttpStatus.SC_NOT_FOUND) && (status != HttpStatus.SC_OK)) { Log.info(LOGGER_NAME, "Delete DOI end -- Error: " + httpResponse.getStatusText()); - throw new DoiClientException( httpResponse.getStatusText() ); + String message = httpResponse.getStatusText(); + + throw new DoiClientException(String.format( + "Error deleting DOI: %s", + message)) + .withMessageKey("exception.doi.serverErrorDelete") + .withDescriptionKey("exception.doi.serverErrorDelete.description", new String[]{message}); } else { Log.info(LOGGER_NAME, "DeleteDOI end"); } } catch (Exception ex) { Log.error(LOGGER_NAME, " -- Error (exception): " + ex.getMessage(), ex); - throw new DoiClientException(ex.getMessage()); + + throw new DoiClientException(String.format( + "Error deleting DOI: %s", + ex.getMessage())) + .withMessageKey("exception.doi.serverErrorDelete") + .withDescriptionKey("exception.doi.serverErrorDelete.description", new String[]{ex.getMessage()}); } finally { if (deleteMethod != null) { diff --git a/doi/src/main/java/org/fao/geonet/doi/client/DoiManager.java b/doi/src/main/java/org/fao/geonet/doi/client/DoiManager.java index 87871c21d72..012c710585e 100644 --- a/doi/src/main/java/org/fao/geonet/doi/client/DoiManager.java +++ b/doi/src/main/java/org/fao/geonet/doi/client/DoiManager.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2010 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -32,8 +32,8 @@ import org.fao.geonet.domain.*; import org.fao.geonet.kernel.AccessManager; import org.fao.geonet.kernel.ApplicableSchematron; -import org.fao.geonet.kernel.DataManager; import org.fao.geonet.kernel.SchematronValidator; +import org.fao.geonet.kernel.datamanager.base.BaseMetadataManager; import org.fao.geonet.kernel.datamanager.base.BaseMetadataSchemaUtils; import org.fao.geonet.kernel.datamanager.base.BaseMetadataUtils; import org.fao.geonet.kernel.schema.MetadataSchema; @@ -41,12 +41,10 @@ import org.fao.geonet.kernel.search.IndexingMode; import org.fao.geonet.kernel.setting.SettingManager; import org.fao.geonet.repository.SchematronRepository; -import org.fao.geonet.utils.Log; import org.fao.geonet.utils.Xml; import org.jdom.Element; import org.jdom.JDOMException; import org.jdom.Namespace; -import org.springframework.beans.factory.annotation.Autowired; import java.io.IOException; import java.nio.file.Files; @@ -60,11 +58,9 @@ /** * Class to register/unregister DOIs using the Datacite Metadata Store (MDS) API. + *

+ * See ... * - * See https://support.datacite.org/docs/mds-api-guide - * - * @author Jose García - * @author Francois Prunayre */ public class DoiManager { private static final String DOI_ADD_XSL_PROCESS = "process/doi-add.xsl"; @@ -75,112 +71,52 @@ public class DoiManager { public static final String DOI_DEFAULT_URL = "https://doi.org/"; public static final String DOI_DEFAULT_PATTERN = "{{uuid}}"; - private IDoiClient client; - private String doiPrefix; - private String doiPattern; - private String landingPageTemplate; - private boolean initialised = false; - private boolean isMedra = false; - - DataManager dm; - SettingManager sm; - BaseMetadataSchemaUtils schemaUtils; - - @Autowired - BaseMetadataUtils metadataUtils; - - @Autowired - SchematronValidator validator; - - @Autowired - DoiBuilder doiBuilder; - - @Autowired - SchematronRepository schematronRepository; + private final SettingManager sm; + private final BaseMetadataSchemaUtils schemaUtils; + private final BaseMetadataManager metadataManager; + private final BaseMetadataUtils metadataUtils; + private final SchematronValidator validator; + private final DoiBuilder doiBuilder; + private final SchematronRepository schematronRepository; + + + public DoiManager(final SettingManager sm, final BaseMetadataSchemaUtils schemaUtils, + final BaseMetadataManager metadataManager, final BaseMetadataUtils metadataUtils, + final SchematronValidator validator, final DoiBuilder doiBuilder, + final SchematronRepository schematronRepository) { + this.sm = sm; + this.schemaUtils = schemaUtils; + this.metadataManager = metadataManager; + this.metadataUtils = metadataUtils; + this.validator = validator; + this.doiBuilder = doiBuilder; + this.schematronRepository = schematronRepository; - - public DoiManager() { - sm = ApplicationContextHolder.get().getBean(SettingManager.class); - dm = ApplicationContextHolder.get().getBean(DataManager.class); - schemaUtils = ApplicationContextHolder.get().getBean(BaseMetadataSchemaUtils.class); - - loadConfig(); } - public boolean isInitialised() { - return initialised; + private IDoiClient createDoiClient(DoiServer doiServer) { + boolean isMedra = isMedraServer(doiServer); + return isMedra ? + new DoiMedraClient(doiServer.getUrl(), doiServer.getUsername(), doiServer.getPassword(), doiServer.getPublicUrl()) : + new DoiDataciteClient(doiServer.getUrl(), doiServer.getUsername(), doiServer.getPassword(), doiServer.getPublicUrl()); } - /** - * Check parameters and build the client. - * - */ - public void loadConfig() { - initialised = false; - if (sm != null) { - - String serverUrl = sm.getValue(DoiSettings.SETTING_PUBLICATION_DOI_DOIURL); - String doiPublicUrl = StringUtils.defaultIfEmpty( - sm.getValue(DoiSettings.SETTING_PUBLICATION_DOI_DOIPUBLICURL), - DOI_DEFAULT_URL); - String username = sm.getValue(DoiSettings.SETTING_PUBLICATION_DOI_DOIUSERNAME); - String password = sm.getValue(DoiSettings.SETTING_PUBLICATION_DOI_DOIPASSWORD); - - doiPrefix = sm.getValue(DoiSettings.SETTING_PUBLICATION_DOI_DOIKEY); - doiPattern = StringUtils.defaultIfEmpty( - sm.getValue(DoiSettings.SETTING_PUBLICATION_DOI_DOIPATTERN), - DOI_DEFAULT_PATTERN - ); - - landingPageTemplate = sm.getValue(DoiSettings.SETTING_PUBLICATION_DOI_LANDING_PAGE_TEMPLATE); - - final boolean emptyUrl = StringUtils.isEmpty(serverUrl); - final boolean emptyUsername = StringUtils.isEmpty(username); - final boolean emptyPassword = StringUtils.isEmpty(password); - final boolean emptyPrefix = StringUtils.isEmpty(doiPrefix); - if (emptyUrl || - emptyUsername || - emptyPassword || - emptyPrefix) { - StringBuilder report = new StringBuilder("DOI configuration is not complete. Check in System Configuration to fill the DOI configuration."); - if (emptyUrl) { - report.append("\n* URL MUST be set"); - } - if (emptyUsername) { - report.append("\n* Username MUST be set"); - } - if (emptyPassword) { - report.append("\n* Password MUST be set"); - } - if (emptyPrefix) { - report.append("\n* Prefix MUST be set"); - } - Log.warning(DoiSettings.LOGGER_NAME, - report.toString()); - } else { - Log.debug(DoiSettings.LOGGER_NAME, - "DOI configuration looks perfect."); - isMedra = serverUrl.contains(MEDRA_SEARCH_KEY); - this.client = - isMedra ? - new DoiMedraClient(serverUrl, username, password, doiPublicUrl) : - new DoiDataciteClient(serverUrl, username, password, doiPublicUrl); - initialised = true; - } - } - } + public String checkDoiUrl(DoiServer doiServer, AbstractMetadata metadata) throws DoiClientException { + checkInitialised(doiServer); + checkCanHandleMetadata(doiServer, metadata); - public String checkDoiUrl(AbstractMetadata metadata) { - return doiBuilder.create(doiPattern, doiPrefix, metadata); + return doiBuilder.create(doiServer.getPattern(), doiServer.getPrefix(), metadata); } - public Map check(ServiceContext serviceContext, AbstractMetadata metadata, Element dataciteMetadata) throws Exception { + public Map check(ServiceContext serviceContext, DoiServer doiServer, AbstractMetadata metadata, Element dataciteMetadata) throws Exception { Map conditions = new HashMap<>(); - checkInitialised(); + checkInitialised(doiServer); + checkCanHandleMetadata(doiServer, metadata); conditions.put(DoiConditions.API_CONFIGURED, true); - String doi = doiBuilder.create(doiPattern, doiPrefix, metadata); - checkPreConditions(metadata, doi); + IDoiClient doiClient = createDoiClient(doiServer); + String doi = doiBuilder.create(doiServer.getPattern(), doiServer.getPrefix(), metadata); + checkPreConditions(doiClient, metadata, doi); conditions.put(DoiConditions.RECORD_IS_PUBLIC, true); conditions.put(DoiConditions.STANDARD_SUPPORT, true); @@ -188,26 +124,26 @@ public Map check(ServiceContext serviceContext, AbstractMetadat // ** Convert to DataCite format Element dataciteFormatMetadata = dataciteMetadata == null ? - convertXmlToDataCiteFormat(metadata.getDataInfo().getSchemaId(), - metadata.getXmlData(false), doi) : dataciteMetadata; - checkPreConditionsOnDataCite(metadata, doi, dataciteFormatMetadata, serviceContext.getLanguage()); + convertXmlToDataCiteFormat(doiServer, metadata.getDataInfo().getSchemaId(), + metadata.getXmlData(false), doi) : dataciteMetadata; + checkPreConditionsOnDataCite(doiClient, metadata, doi, dataciteFormatMetadata, serviceContext.getLanguage()); conditions.put(DoiConditions.DATACITE_FORMAT_IS_VALID, true); return conditions; } - public Map register(ServiceContext context, AbstractMetadata metadata) throws Exception { + public Map register(ServiceContext context, DoiServer doiServer, AbstractMetadata metadata) throws Exception { Map doiInfo = new HashMap<>(3); // The new DOI for this record - String doi = doiBuilder.create(doiPattern, doiPrefix, metadata); + String doi = doiBuilder.create(doiServer.getPattern(), doiServer.getPrefix(), metadata); doiInfo.put("doi", doi); // The record in datacite format Element dataciteFormatMetadata = - convertXmlToDataCiteFormat(metadata.getDataInfo().getSchemaId(), - metadata.getXmlData(false), doi); + convertXmlToDataCiteFormat(doiServer, metadata.getDataInfo().getSchemaId(), + metadata.getXmlData(false), doi); try { - check(context, metadata, dataciteFormatMetadata); + check(context, doiServer, metadata, dataciteFormatMetadata); } catch (ResourceAlreadyExistException ignore) { // Update DOI doiInfo.put("update", "true"); @@ -215,7 +151,8 @@ public Map register(ServiceContext context, AbstractMetadata met throw e; } - createDoi(context, metadata, doiInfo, dataciteFormatMetadata); + IDoiClient doiClient = createDoiClient(doiServer); + createDoi(context, doiClient, doiServer, metadata, doiInfo, dataciteFormatMetadata); checkDoiCreation(metadata, doiInfo); return doiInfo; @@ -230,7 +167,7 @@ public Map register(ServiceContext context, AbstractMetadata met * @throws IOException * @throws JDOMException */ - private void checkPreConditions(AbstractMetadata metadata, String doi) throws DoiClientException, IOException, JDOMException, ResourceAlreadyExistException { + private void checkPreConditions(IDoiClient doiClient, AbstractMetadata metadata, String doi) throws DoiClientException, IOException, JDOMException, ResourceAlreadyExistException { // Record MUST be public AccessManager am = ApplicationContextHolder.get().getBean(AccessManager.class); boolean visibleToAll = false; @@ -239,11 +176,11 @@ private void checkPreConditions(AbstractMetadata metadata, String doi) throws Do } catch (Exception e) { throw new DoiClientException(String.format( "Failed to check if record '%s' is visible to all for DOI creation." + - " Error is %s.", + " Error is %s.", metadata.getUuid(), e.getMessage())) .withMessageKey("exception.doi.failedVisibilityCheck") .withDescriptionKey("exception.doi.failedVisibilityCheck.description", - new String[]{ metadata.getUuid(), e.getMessage() }); + new String[]{metadata.getUuid(), e.getMessage()}); } if (!visibleToAll) { @@ -251,7 +188,7 @@ private void checkPreConditions(AbstractMetadata metadata, String doi) throws Do "Record '%s' is not public and we cannot request a DOI for such a record. Publish this record first.", metadata.getUuid())) .withMessageKey("exception.doi.recordNotPublic") - .withDescriptionKey("exception.doi.recordNotPublic.description", new String[]{ metadata.getUuid() }); + .withDescriptionKey("exception.doi.recordNotPublic.description", new String[]{metadata.getUuid()}); } // Record MUST not contains a DOI @@ -259,7 +196,7 @@ private void checkPreConditions(AbstractMetadata metadata, String doi) throws Do String currentDoi = metadataUtils.getDoi(metadata.getUuid()); if (StringUtils.isNotEmpty(currentDoi)) { // Current doi does not match the one going to be inserted. This is odd - String newDoi = client.createPublicUrl(doi); + String newDoi = doiClient.createPublicUrl(doi); if (!currentDoi.equals(newDoi)) { throw new DoiClientException(String.format( "Record '%s' already contains a DOI %s which is not equal " + @@ -269,7 +206,7 @@ private void checkPreConditions(AbstractMetadata metadata, String doi) throws Do "an existing DOI.", metadata.getUuid(), currentDoi, currentDoi, newDoi)) .withMessageKey("exception.doi.resourcesContainsDoiNotEqual") - .withDescriptionKey("exception.doi.resourcesContainsDoiNotEqual.description", new String[]{ metadata.getUuid(), currentDoi, currentDoi, newDoi }); + .withDescriptionKey("exception.doi.resourcesContainsDoiNotEqual.description", new String[]{metadata.getUuid(), currentDoi, currentDoi, newDoi}); } throw new ResourceAlreadyExistException(String.format( @@ -279,7 +216,7 @@ private void checkPreConditions(AbstractMetadata metadata, String doi) throws Do metadata.getUuid(), currentDoi, currentDoi)) .withMessageKey("exception.doi.resourceContainsDoi") .withDescriptionKey("exception.doi.resourceContainsDoi.description", - new String[]{ metadata.getUuid(), currentDoi, currentDoi }); + new String[]{metadata.getUuid(), currentDoi, currentDoi}); } } catch (ResourceNotFoundException e) { final MetadataSchema schema = schemaUtils.getSchema(metadata.getDataInfo().getSchemaId()); @@ -299,24 +236,23 @@ private void checkPreConditions(AbstractMetadata metadata, String doi) throws Do schema.getName())) .withMessageKey("exception.doi.missingSavedquery") .withDescriptionKey("exception.doi.missingSavedquery.description", - new String[]{ metadata.getUuid(), schema.getName(), - SavedQuery.DOI_GET, e.getMessage(), - schema.getName() }); + new String[]{metadata.getUuid(), schema.getName(), + SavedQuery.DOI_GET, e.getMessage(), + schema.getName()}); } } /** * Check conditions on DataCite side. + * * @param metadata * @param doi * @param dataciteMetadata * @param language */ - private void checkPreConditionsOnDataCite(AbstractMetadata metadata, String doi, Element dataciteMetadata, String language) throws DoiClientException, ResourceAlreadyExistException { + private void checkPreConditionsOnDataCite(IDoiClient doiClient, AbstractMetadata metadata, String doi, Element dataciteMetadata, String language) throws DoiClientException, ResourceAlreadyExistException { // * DataCite API is up an running ? - - try { List validations = new ArrayList<>(); List applicableSchematron = Lists.newArrayList(); @@ -341,7 +277,7 @@ private void checkPreConditionsOnDataCite(AbstractMetadata metadata, String doi, StringBuilder message = new StringBuilder(); if (!failures.isEmpty()) { message.append("

    "); - failures.forEach(f -> message.append("
  • ").append(((Element)f).getTextNormalize()).append("
  • ")); + failures.forEach(f -> message.append("
  • ").append(((Element) f).getTextNormalize()).append("
  • ")); message.append("
"); throw new DoiClientException(String.format( @@ -349,9 +285,9 @@ private void checkPreConditionsOnDataCite(AbstractMetadata metadata, String doi, metadata.getUuid(), failures.size(), message)) .withMessageKey("exception.doi.recordNotConformantMissingInfo") .withDescriptionKey("exception.doi.recordNotConformantMissingInfo.description", - new String[]{ metadata.getUuid(), String.valueOf(failures.size()), message.toString() }); + new String[]{metadata.getUuid(), String.valueOf(failures.size()), message.toString()}); } - } catch (IOException|JDOMException e) { + } catch (IOException | JDOMException e) { throw new DoiClientException(String.format( "Record '%s' is not conform with DataCite validation rules for mandatory fields. Error is: %s. " + "Required fields in DataCite are: identifier, creators, titles, publisher, publicationYear, resourceType. " + @@ -360,7 +296,7 @@ private void checkPreConditionsOnDataCite(AbstractMetadata metadata, String doi, metadata.getUuid(), e.getMessage(), sm.getNodeURL(), metadata.getUuid())) .withMessageKey("exception.doi.recordNotConformantMissingMandatory") .withDescriptionKey("exception.doi.recordNotConformantMissingMandatory.description", - new String[]{ metadata.getUuid(), e.getMessage(), sm.getNodeURL(), metadata.getUuid() }); + new String[]{metadata.getUuid(), e.getMessage(), sm.getNodeURL(), metadata.getUuid()}); } // XSD validation @@ -375,24 +311,24 @@ private void checkPreConditionsOnDataCite(AbstractMetadata metadata, String doi, metadata.getUuid(), e.getMessage(), sm.getNodeURL(), metadata.getUuid())) .withMessageKey("exception.doi.recordInvalid") .withDescriptionKey("exception.doi.recordInvalid.description", - new String[]{ metadata.getUuid(), e.getMessage(), sm.getNodeURL(), metadata.getUuid() }); + new String[]{metadata.getUuid(), e.getMessage(), sm.getNodeURL(), metadata.getUuid()}); } // * MDS / DOI does not exist already // curl -i --user username:password https://mds.test.datacite.org/doi/10.5072/GN // Return 404 - final String doiResponse = client.retrieveDoi(doi); + final String doiResponse = doiClient.retrieveDoi(doi); if (doiResponse != null) { throw new ResourceAlreadyExistException(String.format( "Record '%s' looks to be already published on DataCite with DOI '%s'. DOI on Datacite point to: %s. " + "If the DOI is not correct, remove it from the record and ask for a new one.", metadata.getUuid(), - client.createUrl("doi") + "/" + doi, + doiClient.createUrl("doi") + "/" + doi, doi, doi, doiResponse)) .withMessageKey("exception.doi.resourceAlreadyPublished") - .withDescriptionKey("exception.doi.resourceAlreadyPublished.description", new String[]{ metadata.getUuid(), - client.createUrl("doi") + "/" + doi, - doi, doi, doiResponse }); + .withDescriptionKey("exception.doi.resourceAlreadyPublished.description", new String[]{metadata.getUuid(), + doiClient.createUrl("doi") + "/" + doi, + doi, doi, doiResponse}); } // TODO: Could be relevant at some point to return states (draft/findable) @@ -404,10 +340,12 @@ private void checkPreConditionsOnDataCite(AbstractMetadata metadata, String doi, /** * Use the DataCite API to register the new DOI. + * * @param context * @param metadata */ - private void createDoi(ServiceContext context, AbstractMetadata metadata, Map doiInfo, Element dataciteMetadata) throws Exception { + private void createDoi(ServiceContext context, IDoiClient doiClient, DoiServer doiServer, + AbstractMetadata metadata, Map doiInfo, Element dataciteMetadata) throws Exception { // * Now, let's create the DOI // picking a DOI name, @@ -418,29 +356,30 @@ private void createDoi(ServiceContext context, AbstractMetadata metadata, Map doi } - public void unregisterDoi(AbstractMetadata metadata, ServiceContext context) throws DoiClientException, ResourceNotFoundException { - checkInitialised(); + public void unregisterDoi(DoiServer doiServer, AbstractMetadata metadata, ServiceContext context) throws DoiClientException, ResourceNotFoundException { + checkInitialised(doiServer); + checkCanHandleMetadata(doiServer, metadata); - final String doi = doiBuilder.create(doiPattern, doiPrefix, metadata); - final String doiResponse = client.retrieveDoi(doi); + IDoiClient doiClient = createDoiClient(doiServer); + final String doi = doiBuilder.create(doiServer.getPattern(), doiServer.getPrefix(), metadata); + final String doiResponse = doiClient.retrieveDoi(doi); if (doiResponse == null) { throw new ResourceNotFoundException(String.format( "Record '%s' is not available on DataCite. DOI '%s' does not exist.", @@ -467,31 +408,40 @@ public void unregisterDoi(AbstractMetadata metadata, ServiceContext context) thr Element md = metadata.getXmlData(false); String doiUrl = metadataUtils.getDoi(metadata.getUuid()); - client.deleteDoiMetadata(doi); - client.deleteDoi(doi); + doiClient.deleteDoiMetadata(doi); + doiClient.deleteDoi(doi); Element recordWithoutDoi = removeDOIValue(doiUrl, metadata.getDataInfo().getSchemaId(), md); - dm.updateMetadata(context, metadata.getId() + "", recordWithoutDoi, false, true, + metadataManager.updateMetadata(context, metadata.getId() + "", recordWithoutDoi, false, true, context.getLanguage(), new ISODate().toString(), true, IndexingMode.full); } catch (Exception ex) { - throw new DoiClientException(ex.getMessage()); + throw new DoiClientException(String.format( + "Error unregistering DOI: %s", + ex.getMessage())) + .withMessageKey("exception.doi.serverErrorUnregister") + .withDescriptionKey("exception.doi.serverErrorUnregister.description", new String[]{ex.getMessage()}); } } /** * Sets the DOI URL value in the metadata record using the process DOI_ADD_XSL_PROCESS. - * */ - public Element setDOIValue(String doi, String schema, Element md) throws Exception { - Path styleSheet = dm.getSchemaDir(schema).resolve(DOI_ADD_XSL_PROCESS); + public Element setDOIValue(IDoiClient doiClient, String doi, String schema, Element md) throws Exception { + Path styleSheet = schemaUtils.getSchemaDir(schema).resolve(DOI_ADD_XSL_PROCESS); boolean exists = Files.exists(styleSheet); if (!exists) { - throw new DoiClientException(String.format("To create a DOI, the schema has to defined how to insert a DOI in the record. The schema_plugins/%s/process/%s was not found. Create the XSL transformation.", - schema, DOI_ADD_XSL_PROCESS)); + String message = String.format("To create a DOI, the schema has to defined how to insert a DOI in the record. The schema_plugins/%s/process/%s was not found. Create the XSL transformation.", + schema, DOI_ADD_XSL_PROCESS); + + throw new DoiClientException(String.format( + "Error creating DOI: %s", + message)) + .withMessageKey("exception.doi.serverErrorCreate") + .withDescriptionKey("exception.doi.serverErrorCreate.description", new String[]{message}); } - String doiPublicUrl = client.createPublicUrl(""); + String doiPublicUrl = doiClient.createPublicUrl(""); Map params = new HashMap<>(1); params.put("doi", doi); @@ -501,14 +451,20 @@ public Element setDOIValue(String doi, String schema, Element md) throws Excepti /** * Sets the DOI URL value in the metadata record using the process DOI_ADD_XSL_PROCESS. - * */ public Element removeDOIValue(String doi, String schema, Element md) throws Exception { - Path styleSheet = dm.getSchemaDir(schema).resolve(DOI_REMOVE_XSL_PROCESS); + Path styleSheet = schemaUtils.getSchemaDir(schema).resolve(DOI_REMOVE_XSL_PROCESS); boolean exists = Files.exists(styleSheet); if (!exists) { - throw new DoiClientException(String.format("To remove a DOI, the schema has to defined how to remove a DOI in the record. The schema_plugins/%s/process/%s was not found. Create the XSL transformation.", - schema, DOI_REMOVE_XSL_PROCESS)); + String message = String.format("To remove a DOI, the schema has to defined how to remove a DOI in the record. The schema_plugins/%s/process/%s was not found. Create the XSL transformation.", + schema, DOI_REMOVE_XSL_PROCESS); + + throw new DoiClientException(String.format( + "Error deleting DOI: %s", + message)) + .withMessageKey("exception.doi.serverErrorDelete") + .withDescriptionKey("exception.doi.serverErrorDelete.description", new String[]{message}); + } Map params = new HashMap<>(1); @@ -523,24 +479,68 @@ public Element removeDOIValue(String doi, String schema, Element md) throws Exce * @return The record converted into the DataCite format. * @throws Exception if there is no conversion available. */ - private Element convertXmlToDataCiteFormat(String schema, Element md, String doi) throws Exception { - final Path styleSheet = dm.getSchemaDir(schema).resolve( - isMedra ? DATACITE_MEDRA_XSL_CONVERSION_FILE : DATACITE_XSL_CONVERSION_FILE); + private Element convertXmlToDataCiteFormat(DoiServer doiServer, String schema, Element md, String doi) throws Exception { + final Path styleSheet = schemaUtils.getSchemaDir(schema).resolve( + isMedraServer(doiServer) ? DATACITE_MEDRA_XSL_CONVERSION_FILE : DATACITE_XSL_CONVERSION_FILE); final boolean exists = Files.exists(styleSheet); if (!exists) { - throw new DoiClientException(String.format("To create a DOI, the record needs to be converted to the DataCite format (https://schema.datacite.org/). You need to create a formatter for this in schema_plugins/%s/%s. If the standard is a profile of ISO19139, you can simply point to the ISO19139 formatter.", - schema, DATACITE_XSL_CONVERSION_FILE)); + String message = String.format("To create a DOI, the record needs to be converted to the DataCite format (https://schema.datacite.org/). You need to create a formatter for this in schema_plugins/%s/%s. If the standard is a profile of ISO19139, you can simply point to the ISO19139 formatter.", + schema, DATACITE_XSL_CONVERSION_FILE); + + throw new DoiClientException(String.format( + "Error creating DOI: %s", + message)) + .withMessageKey("exception.doi.serverErrorCreate") + .withDescriptionKey("exception.doi.serverErrorCreate.description", new String[]{message}); } - Map params = new HashMap<>(); + Map params = new HashMap<>(); params.put(DOI_ID_PARAMETER, doi); return Xml.transform(md, styleSheet, params); } - private void checkInitialised() throws DoiClientException { - if (!initialised) { - throw new DoiClientException("DOI configuration is not complete. Check System Configuration and set the DOI configuration."); + private void checkInitialised(DoiServer doiServer) throws DoiClientException { + final boolean emptyUrl = StringUtils.isEmpty(doiServer.getUrl()); + final boolean emptyUsername = StringUtils.isEmpty(doiServer.getUsername()); + final boolean emptyPassword = StringUtils.isEmpty(doiServer.getPassword()); + final boolean emptyPrefix = StringUtils.isEmpty(doiServer.getPrefix()); + + if (emptyUrl || + emptyUsername || + emptyPassword || + emptyPrefix) { + throw new DoiClientException("DOI server configuration is not complete. Check the DOI server configuration to complete it.") + .withMessageKey("exception.doi.configurationMissing") + .withDescriptionKey("exception.doi.configurationMissing.description", new String[]{}); + + } + } + + /** + * Checks if the DOI server can handle the metadata: + * - The DOI server is not publishing metadata for certain metadata group(s) or + * - it publishes metadata from the metadata group owner. + * + * @param doiServer The DOI server. + * @param metadata The metadata to process. + * @throws DoiClientException + */ + private void checkCanHandleMetadata(DoiServer doiServer, AbstractMetadata metadata) throws DoiClientException { + if (!doiServer.getPublicationGroups().isEmpty()) { + Integer groupOwner = metadata.getSourceInfo().getGroupOwner(); + + if (doiServer.getPublicationGroups().stream().noneMatch(g -> g.getId() == groupOwner)) { + throw new DoiClientException( + String.format("DOI server '%s' can not handle the metadata with UUID '%s'.", + doiServer.getName(), metadata.getUuid())) + .withMessageKey("exception.doi.serverCanNotHandleRecord") + .withDescriptionKey("exception.doi.serverCanNotHandleRecord.description", new String[]{doiServer.getName(), metadata.getUuid()}); + } } + } + private boolean isMedraServer(DoiServer doiServer) { + return doiServer.getUrl().contains(MEDRA_SEARCH_KEY); + } } diff --git a/doi/src/main/java/org/fao/geonet/doi/client/DoiMedraClient.java b/doi/src/main/java/org/fao/geonet/doi/client/DoiMedraClient.java index 823545decfa..fd7f7b2699e 100644 --- a/doi/src/main/java/org/fao/geonet/doi/client/DoiMedraClient.java +++ b/doi/src/main/java/org/fao/geonet/doi/client/DoiMedraClient.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2010 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -58,7 +58,10 @@ public String retrieveDoi(String doi) throws DoiClientException { @Override public String retrieveAllDoi(String doi) throws DoiClientException { - throw new DoiClientException(MEDRA_NOT_SUPPORTED_EXCEPTION_MESSAGE); + throw new DoiClientException(MEDRA_NOT_SUPPORTED_EXCEPTION_MESSAGE) + .withMessageKey("exception.doi.operationNotSupported") + .withDescriptionKey("exception.doi.operationNotSupported.description", + new String[]{ MEDRA_NOT_SUPPORTED_EXCEPTION_MESSAGE }); } /** diff --git a/domain/pom.xml b/domain/pom.xml index 48d5cd34fa4..685c96638c2 100644 --- a/domain/pom.xml +++ b/domain/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 diff --git a/domain/src/main/java/org/fao/geonet/domain/DoiServer.java b/domain/src/main/java/org/fao/geonet/domain/DoiServer.java new file mode 100644 index 00000000000..90c93c31c6d --- /dev/null +++ b/domain/src/main/java/org/fao/geonet/domain/DoiServer.java @@ -0,0 +1,284 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.domain; + +import org.fao.geonet.entitylistener.DoiServerEntityListenerManager; +import org.hibernate.annotations.Type; + +import javax.persistence.*; +import java.util.HashSet; +import java.util.Set; + +@Entity +@Table(name = "Doiservers") +@Cacheable +@Access(AccessType.PROPERTY) +@EntityListeners(DoiServerEntityListenerManager.class) +@SequenceGenerator(name = DoiServer.ID_SEQ_NAME, initialValue = 100, allocationSize = 1) +public class DoiServer extends GeonetEntity { + static final String ID_SEQ_NAME = "doiserver_id_seq"; + + private int id; + private String name; + private String description; + private String url; + private String username; + private String password; + private String landingPageTemplate; + private String publicUrl; + private String pattern = "{{uuid}}"; + private String prefix; + private Set publicationGroups = new HashSet<>(); + + /** + * Get the id of the DOI server.

This is autogenerated and when a new DOI server is created + * the DOI server will be assigned a new value.

+ * + * @return the id of the DOI server. + */ + @Id + @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = ID_SEQ_NAME) + @Column(nullable = false) + public int getId() { + return id; + } + + /** + * Set the id of the DOI server.

If you want to update an existing DOI server then you should + * set this id to the DOI server you want to update and set the other values to the desired + * values.

+ * + * @param id the id of the group. + * @return this DOI server object + */ + public DoiServer setId(int id) { + this.id = id; + return this; + } + + /** + * Get the basic/default name of the DOI server. This is non-translated and can be used to look + * up the DOI server like an id can.

This is a required property.

There is a max length + * to the name allowed. See the annotation for the length value.

+ * + * @return DOI server name + */ + @Column(nullable = false, length = 32) + public String getName() { + return name; + } + + /** + * Set the basic/default name of the DOI server. This is non-translated and can be used to look + * up the DOI server like an id can.

This is a required property.

There is a max length + * to the name allowed. See the annotation on {@link #getName()} for the length value.

+ */ + public DoiServer setName(String name) { + this.name = name; + return this; + } + + /** + * Get a description of the DOI server. + * + * @return the description. + */ + @Column(length = 255) + public String getDescription() { + return description; + } + + /** + * Set the DOI server description. + * + * @param description the description. + * @return this DOI server object. + */ + public DoiServer setDescription(String description) { + this.description = description; + return this; + } + + + /** + * Get the API URL for the DOI server. + * + * @return the DOI server API URL. + */ + @Column(nullable = false, length = 255) + public String getUrl() { + return url; + } + + /** + * Set the REST API configuration URL for the DOI server. + * + * @param url the server URL. + * @return this DOI server object. + */ + public DoiServer setUrl(String url) { + this.url = url; + return this; + } + + /** + * Get the username to use for connecting to the DOI server. + * + * @return the username. + */ + @Column(length = 128) + public String getUsername() { + return username; + } + + public DoiServer setUsername(String username) { + this.username = username; + return this; + } + + /** + * Get the password to use for connecting to the DOI server. + * + * @return the password. + */ + @Column(length = 128) + @Type(type="encryptedString") + public String getPassword() { + return password; + } + + public DoiServer setPassword(String password) { + this.password = password; + return this; + } + + /** + * Set the DOI landing page URL template. + * + * @param landingPageTemplate the landing page URL template. + * @return this DOI server object. + */ + public DoiServer setLandingPageTemplate(String landingPageTemplate) { + this.landingPageTemplate = landingPageTemplate; + return this; + } + + /** + * Get the DOI landing page URL template. + * + * @return the landing page URL template. + */ + @Column(nullable = false, length = 255) + public String getLandingPageTemplate() { + return landingPageTemplate; + } + + /** + * Set the DOI URL prefix. + * + * @param publicUrl the URL prefix. + * @return this DOI server object. + */ + public DoiServer setPublicUrl(String publicUrl) { + this.publicUrl = publicUrl; + return this; + } + + /** + * Get the DOI URL prefix. + * + * @return the URL prefix. + */ + @Column(nullable = false, length = 255) + public String getPublicUrl() { + return publicUrl; + } + + /** + * Set the DOI identifier pattern. + * + * @param pattern the identifier pattern. + * @return this DOI server object. + */ + public DoiServer setPattern(String pattern) { + this.pattern = pattern; + return this; + } + + /** + * Get the DOI identifier pattern. + * + * @return the identifier pattern. + */ + @Column(nullable = false, length = 255) + public String getPattern() { + return pattern; + } + + + /** + * Set the DOI prefix. + * + * @param prefix the DOI prefix. + * @return this DOI server object. + */ + public DoiServer setPrefix(String prefix) { + this.prefix = prefix; + return this; + } + + /** + * Get the DOI prefix. + * + * @return the DOI prefix. + */ + @Column(nullable = false, length = 15) + public String getPrefix() { + return prefix; + } + + /** + * Sets the groups which metadata should be published to the DOI server. + * + * @param publicationGroups Publication groups. + * @return + */ + public void setPublicationGroups(Set publicationGroups) { + this.publicationGroups = publicationGroups; + } + + /** + * Get the groups which metadata is published to the DOI server. + * + * @return Publication groups. + */ + @ManyToMany(fetch = FetchType.EAGER, cascade = CascadeType.PERSIST) + @JoinTable( + name = "doiservers_group", + joinColumns = @JoinColumn(name = "doiserver_id"), + inverseJoinColumns = @JoinColumn(name = "group_id")) + public Set getPublicationGroups() { + return publicationGroups; + } +} diff --git a/domain/src/main/java/org/fao/geonet/domain/page/Page.java b/domain/src/main/java/org/fao/geonet/domain/page/Page.java index c778952efd3..fa5cf56045d 100644 --- a/domain/src/main/java/org/fao/geonet/domain/page/Page.java +++ b/domain/src/main/java/org/fao/geonet/domain/page/Page.java @@ -23,10 +23,13 @@ package org.fao.geonet.domain.page; import java.io.Serializable; +import java.util.LinkedHashSet; import java.util.List; +import java.util.Set; import javax.annotation.Nullable; import javax.persistence.Basic; +import javax.persistence.CascadeType; import javax.persistence.CollectionTable; import javax.persistence.Column; import javax.persistence.ElementCollection; @@ -35,10 +38,14 @@ import javax.persistence.EnumType; import javax.persistence.Enumerated; import javax.persistence.FetchType; +import javax.persistence.JoinColumn; +import javax.persistence.JoinTable; import javax.persistence.Lob; +import javax.persistence.ManyToMany; import javax.persistence.Table; import org.fao.geonet.domain.GeonetEntity; +import org.fao.geonet.domain.Group; import org.hibernate.annotations.Type; /** @@ -56,6 +63,7 @@ public class Page extends GeonetEntity implements Serializable { private PageFormat format; private List sections; private PageStatus status; + private Set groups = new LinkedHashSet<>(); private String label; private String icon; @@ -64,7 +72,7 @@ public Page() { } - public Page(PageIdentity pageIdentity, byte[] data, String link, PageFormat format, List sections, PageStatus status, String label, String icon) { + public Page(PageIdentity pageIdentity, byte[] data, String link, PageFormat format, List sections, PageStatus status, String label, String icon, Set groups) { super(); this.pageIdentity = pageIdentity; this.data = data; @@ -74,10 +82,11 @@ public Page(PageIdentity pageIdentity, byte[] data, String link, PageFormat form this.status = status; this.label = label; this.icon = icon; + this.groups = groups; } public enum PageStatus { - PUBLIC, PUBLIC_ONLY, PRIVATE, HIDDEN; + PUBLIC, PUBLIC_ONLY, GROUPS, PRIVATE, HIDDEN; } public enum PageFormat { @@ -146,6 +155,28 @@ public String getIcon() { return icon; } + /** + * Get all the page's groups. + * + * @return all the page's groups. + */ + @ManyToMany(fetch = FetchType.EAGER, cascade = {CascadeType.DETACH, CascadeType.PERSIST, CascadeType.REFRESH}) + @JoinTable(name = "spg_page_group", joinColumns = {@JoinColumn(name = "language"), @JoinColumn(name = "linktext")}, + inverseJoinColumns = {@JoinColumn(name = "groupid", referencedColumnName = "id", unique = false)}) + public Set getGroups() { + return groups; + } + + /** + * Set all the page's groups. + * + * @param groups all the page's groups. + * @return this group object + */ + public void setGroups(Set groups) { + this.groups = groups; + } + public void setPageIdentity(PageIdentity pageIdentity) { this.pageIdentity = pageIdentity; } diff --git a/domain/src/main/java/org/fao/geonet/entitylistener/DoiServerEntityListenerManager.java b/domain/src/main/java/org/fao/geonet/entitylistener/DoiServerEntityListenerManager.java new file mode 100644 index 00000000000..8d4af1bdf92 --- /dev/null +++ b/domain/src/main/java/org/fao/geonet/entitylistener/DoiServerEntityListenerManager.java @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.entitylistener; + +import org.fao.geonet.domain.DoiServer; + +import javax.persistence.*; + +public class DoiServerEntityListenerManager extends AbstractEntityListenerManager { + @PrePersist + public void prePresist(final DoiServer entity) { + handleEvent(PersistentEventType.PrePersist, entity); + } + + @PreRemove + public void preRemove(final DoiServer entity) { + handleEvent(PersistentEventType.PreRemove, entity); + } + + @PostPersist + public void postPersist(final DoiServer entity) { + handleEvent(PersistentEventType.PostPersist, entity); + } + + @PostRemove + public void postRemove(final DoiServer entity) { + handleEvent(PersistentEventType.PostRemove, entity); + } + + @PreUpdate + public void preUpdate(final DoiServer entity) { + handleEvent(PersistentEventType.PreUpdate, entity); + } + + @PostUpdate + public void postUpdate(final DoiServer entity) { + handleEvent(PersistentEventType.PostUpdate, entity); + } + + @PostLoad + public void postLoad(final DoiServer entity) { + handleEvent(PersistentEventType.PostLoad, entity); + } +} diff --git a/domain/src/main/java/org/fao/geonet/repository/DoiServerRepository.java b/domain/src/main/java/org/fao/geonet/repository/DoiServerRepository.java new file mode 100644 index 00000000000..25ca32429ce --- /dev/null +++ b/domain/src/main/java/org/fao/geonet/repository/DoiServerRepository.java @@ -0,0 +1,36 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.repository; + +import org.fao.geonet.domain.DoiServer; +import org.springframework.data.jpa.repository.JpaSpecificationExecutor; + +import java.util.Optional; + +public interface DoiServerRepository extends + GeonetRepository, + JpaSpecificationExecutor { + + Optional findOneById(int id); +} diff --git a/domain/src/main/java/org/fao/geonet/repository/GeonetRepositoryImpl.java b/domain/src/main/java/org/fao/geonet/repository/GeonetRepositoryImpl.java index 280b24dc2a8..765b55b7b9c 100644 --- a/domain/src/main/java/org/fao/geonet/repository/GeonetRepositoryImpl.java +++ b/domain/src/main/java/org/fao/geonet/repository/GeonetRepositoryImpl.java @@ -110,6 +110,7 @@ protected static Element findAllAsXml(EntityManager ent return rootEl; } + @Transactional public T update(ID id, Updater updater) { final T entity = _entityManager.find(this._entityClass, id); diff --git a/domain/src/main/java/org/fao/geonet/repository/LinkRepository.java b/domain/src/main/java/org/fao/geonet/repository/LinkRepository.java index eb5cb852a9b..ef6f510df98 100644 --- a/domain/src/main/java/org/fao/geonet/repository/LinkRepository.java +++ b/domain/src/main/java/org/fao/geonet/repository/LinkRepository.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2016 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -27,8 +27,8 @@ import org.springframework.data.jpa.repository.JpaSpecificationExecutor; import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.List; +import java.util.Optional; /** * Data Access object for the {@link Link} entities. @@ -39,9 +39,7 @@ public interface LinkRepository extends GeonetRepository, LinkRep * * @return one link or null. */ - @Nullable - Link findOneByUrl(@Nonnull String url); + Optional findOneByUrl(@Nonnull String url); - @Nullable List findAllByUrlIn(@Nonnull List url); } diff --git a/domain/src/main/java/org/fao/geonet/repository/MetadataValidationRepositoryCustom.java b/domain/src/main/java/org/fao/geonet/repository/MetadataValidationRepositoryCustom.java index 6b723f8eaf9..cf53f7fb36b 100644 --- a/domain/src/main/java/org/fao/geonet/repository/MetadataValidationRepositoryCustom.java +++ b/domain/src/main/java/org/fao/geonet/repository/MetadataValidationRepositoryCustom.java @@ -51,7 +51,7 @@ public interface MetadataValidationRepositoryCustom { * @param metadataId the id of the metadata. * @return the number of rows deleted */ - @Modifying(clearAutomatically=true) + @Modifying(flushAutomatically = true, clearAutomatically = true) @Transactional @Query(value="DELETE FROM MetadataValidation v where v.id.metadataId = ?1 AND valtype != 'inspire'") int deleteAllInternalValidationById_MetadataId(Integer metadataId); diff --git a/domain/src/main/java/org/fao/geonet/repository/page/PageRepository.java b/domain/src/main/java/org/fao/geonet/repository/page/PageRepository.java index b76094c770f..1245a752d99 100644 --- a/domain/src/main/java/org/fao/geonet/repository/page/PageRepository.java +++ b/domain/src/main/java/org/fao/geonet/repository/page/PageRepository.java @@ -29,7 +29,9 @@ import org.springframework.data.jpa.repository.JpaRepository; public interface PageRepository extends JpaRepository { - + List findByPageIdentityLanguage(String language); + List findPageByStatus(Page.PageStatus status); + } diff --git a/domain/src/main/java/org/fao/geonet/repository/specification/LinkSpecs.java b/domain/src/main/java/org/fao/geonet/repository/specification/LinkSpecs.java index cd77c680549..8ddfc576891 100644 --- a/domain/src/main/java/org/fao/geonet/repository/specification/LinkSpecs.java +++ b/domain/src/main/java/org/fao/geonet/repository/specification/LinkSpecs.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2016 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -23,31 +23,15 @@ package org.fao.geonet.repository.specification; -import org.fao.geonet.domain.Link; -import org.fao.geonet.domain.Link_; -import org.fao.geonet.domain.Metadata; -import org.fao.geonet.domain.MetadataLink; -import org.fao.geonet.domain.MetadataLink_; -import org.fao.geonet.domain.MetadataSourceInfo_; -import org.fao.geonet.domain.Metadata_; -import org.fao.geonet.domain.OperationAllowed; -import org.fao.geonet.domain.OperationAllowedId_; -import org.fao.geonet.domain.OperationAllowed_; -import org.fao.geonet.domain.ReservedGroup; -import org.fao.geonet.domain.ReservedOperation; +import com.google.common.collect.Sets; +import org.fao.geonet.domain.*; import org.springframework.data.jpa.domain.Specification; -import javax.persistence.criteria.CriteriaBuilder; -import javax.persistence.criteria.CriteriaQuery; -import javax.persistence.criteria.Join; -import javax.persistence.criteria.JoinType; -import javax.persistence.criteria.Path; -import javax.persistence.criteria.Predicate; -import javax.persistence.criteria.Root; -import javax.persistence.criteria.Subquery; +import javax.persistence.criteria.*; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Set; public class LinkSpecs { private LinkSpecs() { @@ -58,92 +42,126 @@ public static Specification filter(String urlPartToContain, List associatedRecords, Integer[] groupPublishedIds, Integer[] groupOwnerIds, + Integer[] httpStatusValueFilter, + boolean excludeHarvestedMetadataFilter, Integer[] editingGroupIds) { - return new Specification() { - @Override - public Predicate toPredicate(Root root, CriteriaQuery query, CriteriaBuilder cb) { - List predicates = new ArrayList<>(); + return (root, query, cb) -> { + List predicates = new ArrayList<>(); - if (state != null) { - Path statePath = root.get(Link_.lastState); - predicates.add(cb.equal(statePath, state)); - } + if (state != null) { + Path statePath = root.get(Link_.lastState); + predicates.add(cb.equal(statePath, state)); + } - if (urlPartToContain != null) { - Path urlPath = root.get(Link_.url); - predicates.add( - cb.like(urlPath, - cb.literal(String.format("%%%s%%", urlPartToContain)))); - } + if (urlPartToContain != null) { + Path urlPath = root.get(Link_.url); + predicates.add( + cb.like(urlPath, + cb.literal(String.format("%%%s%%", urlPartToContain)))); + } + + if (associatedRecords != null) { + Join metadataJoin = root.join(Link_.records, JoinType.INNER); + predicates.add(metadataJoin.get("metadataUuid").in(associatedRecords)); + } + + if (excludeHarvestedMetadataFilter) { + Join metadataJoin = root.join(Link_.records, JoinType.INNER); - if (associatedRecords != null) { - Join metadataJoin = root.join(Link_.records, JoinType.INNER); - predicates.add(metadataJoin.get("metadataUuid").in(associatedRecords)); + Subquery subquery = query.subquery(Integer.class); + final Root metadataRoot = subquery.from(Metadata.class); + Path isHarvestedAttributePath = metadataRoot.get(AbstractMetadata_.harvestInfo).get(MetadataHarvestInfo_.harvested_JPAWorkaround); + Predicate equalHarvestPredicate = cb.equal(isHarvestedAttributePath, cb.literal(Constants.toYN_EnabledChar(false))); + subquery.where( + equalHarvestPredicate); + + Path metadataId = metadataRoot.get(AbstractMetadata_.id); + subquery.select(metadataId); + + predicates.add(metadataJoin.get(MetadataLink_.metadataId).in(subquery)); + query.distinct(true); + } + + if (httpStatusValueFilter != null && httpStatusValueFilter.length > 0) { + Join linkLinkStatusJoin = root.join(Link_.linkStatus, JoinType.LEFT); + + Integer[] valuesIn = Arrays.stream(httpStatusValueFilter).filter(i -> i >= 0).toArray(Integer[]::new); + Set setValuesNotIn = Sets.newHashSet(httpStatusValueFilter); + setValuesNotIn.removeAll(Arrays.asList(valuesIn)); + Integer[] valuesNotIn = setValuesNotIn.stream().map(i -> -1 * i).toArray(Integer[]::new); + + if (valuesIn.length > 0) { + predicates.add(cb.and( + cb.equal(linkLinkStatusJoin.get(LinkStatus_.checkDate), root.get(Link_.lastCheck)), + linkLinkStatusJoin.get((LinkStatus_.statusValue)).in(Arrays.asList( + Arrays.stream(valuesIn).map(String::valueOf).toArray())))); } - if (editingGroupIds != null && editingGroupIds.length > 0) { - Join metadataJoin = root.join(Link_.records, JoinType.INNER); - - Subquery subquery = query.subquery(Integer.class); - final Root opAllowRoot = subquery.from(OperationAllowed.class); - final Root metadataRoot = subquery.from(Metadata.class); - final Predicate groupOwnerPredicate = - metadataRoot.get(Metadata_.sourceInfo).get(MetadataSourceInfo_.groupOwner).in(editingGroupIds); - final Predicate metadataOperations = cb.equal(metadataRoot.get(Metadata_.id), opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.metadataId)); - Predicate editableGroups = opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.groupId).in(Arrays.asList(editingGroupIds)); - Predicate operationTypeEdit = - cb.equal( - opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.operationId), - cb.literal(ReservedOperation.editing.getId())); - subquery.where( - cb.or( - cb.and(metadataOperations, groupOwnerPredicate), - cb.and(editableGroups, operationTypeEdit))); - - Path opAllowedMetadataId = opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.metadataId); - subquery.select(opAllowedMetadataId); - - predicates.add(metadataJoin.get(MetadataLink_.metadataId).in(subquery)); - query.distinct(true); + if (valuesNotIn.length > 0) { + predicates.add(cb.and( + cb.equal(linkLinkStatusJoin.get(LinkStatus_.checkDate), root.get(Link_.lastCheck)), + cb.not(linkLinkStatusJoin.get((LinkStatus_.statusValue)).in(Arrays.asList( + Arrays.stream(valuesNotIn).map(String::valueOf).toArray()))))); } + } - if (groupPublishedIds != null && groupPublishedIds.length > 0) { - Join metadataJoin = root.join(Link_.records, JoinType.INNER); + Join metadataJoin = root.join(Link_.records, JoinType.INNER); + Subquery subquery = query.subquery(Integer.class); + final Root opAllowRoot = subquery.from(OperationAllowed.class); + final Root metadataRoot = subquery.from(Metadata.class); - Subquery subquery = query.subquery(Integer.class); - Root opAllowRoot = subquery.from(OperationAllowed.class); - Predicate publishedToIndicatedGroup = - opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.groupId).in(groupPublishedIds); - Predicate operationTypeView = cb.equal( + boolean editinGroupQuery = editingGroupIds != null && editingGroupIds.length > 0; + boolean groupPublishedQuery = groupPublishedIds != null && groupPublishedIds.length > 0; + boolean groupOwnerQuery = groupOwnerIds != null && groupOwnerIds.length > 0; + + List subQueryPredicates = new ArrayList<>(); + + if (editinGroupQuery) { + final Predicate groupOwnerPredicate = + metadataRoot.get(Metadata_.sourceInfo).get(MetadataSourceInfo_.groupOwner).in(editingGroupIds); + final Predicate metadataOperations = cb.equal(metadataRoot.get(Metadata_.id), opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.metadataId)); + Predicate editableGroups = opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.groupId).in(Arrays.asList(editingGroupIds)); + Predicate operationTypeEdit = + cb.equal( opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.operationId), - cb.literal(ReservedOperation.view.getId())); - subquery.where( - cb.and(publishedToIndicatedGroup, operationTypeView)); + cb.literal(ReservedOperation.editing.getId())); - Path opAllowedMetadataId = opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.metadataId); - subquery.select(opAllowedMetadataId); + subQueryPredicates.add(cb.or( + cb.and(metadataOperations, groupOwnerPredicate), + cb.and(editableGroups, operationTypeEdit))); + } - predicates.add(metadataJoin.get(MetadataLink_.metadataId).in(subquery)); - query.distinct(true); - } + if (groupPublishedQuery) { + Predicate publishedToIndicatedGroup = + opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.groupId).in(groupPublishedIds); + Predicate operationTypeView = cb.equal( + opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.operationId), + cb.literal(ReservedOperation.view.getId())); - if (groupOwnerIds != null && groupOwnerIds.length > 0) { - Join metadataJoin = root.join(Link_.records, JoinType.INNER); - Subquery subquery = query.subquery(Integer.class); - final Root metadataRoot = subquery.from(Metadata.class); - final Predicate groupOwnerPredicate = - metadataRoot.get(Metadata_.sourceInfo).get(MetadataSourceInfo_.groupOwner).in(groupOwnerIds); - subquery.where(groupOwnerPredicate); + subQueryPredicates.add(cb.and(publishedToIndicatedGroup, operationTypeView)); + } - Path metadataId = metadataRoot.get(Metadata_.id); - subquery.select(metadataId); + if (groupOwnerQuery) { + final Predicate groupOwnerPredicate = + metadataRoot.get(Metadata_.sourceInfo).get(MetadataSourceInfo_.groupOwner).in(groupOwnerIds); - predicates.add(metadataJoin.get(MetadataLink_.metadataId).in(subquery)); - query.distinct(true); - } - return cb.and(predicates.toArray(new Predicate[]{})); + subQueryPredicates.add(groupOwnerPredicate); } + + + if (subQueryPredicates.size() > 0) { + subquery.where(subQueryPredicates.toArray(new Predicate[]{})); + + Path opAllowedMetadataId = opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.metadataId); + subquery.select(opAllowedMetadataId); + + predicates.add(metadataJoin.get(MetadataLink_.metadataId).in(subquery)); + } + + query.distinct(true); + + return cb.and(predicates.toArray(new Predicate[]{})); }; } } diff --git a/domain/src/test/java/org/fao/geonet/repository/DoiServerRepositoryTest.java b/domain/src/test/java/org/fao/geonet/repository/DoiServerRepositoryTest.java new file mode 100644 index 00000000000..bc8daaf4bb6 --- /dev/null +++ b/domain/src/test/java/org/fao/geonet/repository/DoiServerRepositoryTest.java @@ -0,0 +1,142 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.repository; + +import org.fao.geonet.domain.DoiServer; +import org.fao.geonet.domain.Group; +import org.jasypt.encryption.pbe.StandardPBEStringEncryptor; +import org.jasypt.hibernate5.encryptor.HibernatePBEEncryptorRegistry; +import org.junit.BeforeClass; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; + +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.junit.Assert.assertEquals; + +public class DoiServerRepositoryTest extends AbstractSpringDataTest { + + @Autowired + private DoiServerRepository doiServerRepository; + + @Autowired + private GroupRepository groupRepository; + + @PersistenceContext + EntityManager entityManager; + + @BeforeClass + public static void init() { + StandardPBEStringEncryptor strongEncryptor = new StandardPBEStringEncryptor(); + strongEncryptor.setPassword("testpassword"); + + HibernatePBEEncryptorRegistry registry = + HibernatePBEEncryptorRegistry.getInstance(); + registry.registerPBEStringEncryptor("STRING_ENCRYPTOR", strongEncryptor); + } + + public static DoiServer newDoiServer(AtomicInteger nextId) { + int id = nextId.incrementAndGet(); + return new DoiServer() + .setName("Name " + id) + .setDescription("Desc " + id) + .setUrl("http://server" + id) + .setUsername("username" + id) + .setPassword("password" + id) + .setLandingPageTemplate("http://landingpage" + id) + .setPublicUrl("http://publicurl" + id) + .setPattern("pattern" + id) + .setPrefix("prefix" + id); + } + + @Test + public void test_Save_Count_FindOnly_DeleteAll() throws Exception { + assertEquals(0, doiServerRepository.count()); + DoiServer doiServer = newDoiServer(); + DoiServer savedDoiServer = doiServerRepository.save(doiServer); + + doiServerRepository.flush(); + entityManager.flush(); + entityManager.clear(); + + doiServer.setId(savedDoiServer.getId()); + assertEquals(1, doiServerRepository.count()); + Optional retrievedDoiServerByIdOpt = doiServerRepository.findOneById(doiServer.getId()); + assertEquals(true, retrievedDoiServerByIdOpt.isPresent()); + assertSameContents(doiServer, retrievedDoiServerByIdOpt.get()); + + doiServerRepository.deleteAll(); + + doiServerRepository.flush(); + entityManager.flush(); + entityManager.clear(); + + assertEquals(0, doiServerRepository.count()); + } + + @Test + public void testUpdate() throws Exception { + Group group1 = groupRepository.save(GroupRepositoryTest.newGroup(_inc)); + Group group2 = groupRepository.save(GroupRepositoryTest.newGroup(_inc)); + + assertEquals(0, doiServerRepository.count()); + DoiServer doiServer = newDoiServer(); + doiServer.getPublicationGroups().add(group1); + + DoiServer savedDoiServer = doiServerRepository.save(doiServer); + + doiServerRepository.flush(); + entityManager.flush(); + entityManager.clear(); + + doiServer.setId(savedDoiServer.getId()); + + assertEquals(1, doiServerRepository.count()); + Optional retrievedDoiServerByIdOpt = doiServerRepository.findOneById(doiServer.getId()); + assertEquals(true, retrievedDoiServerByIdOpt.isPresent()); + assertSameContents(doiServer, retrievedDoiServerByIdOpt.get()); + + doiServer.setName("New Name"); + doiServer.getPublicationGroups().add(group2); + DoiServer savedDoiServer2 = doiServerRepository.save(doiServer); + + doiServerRepository.flush(); + entityManager.flush(); + entityManager.clear(); + + assertSameContents(savedDoiServer, savedDoiServer2); + + assertEquals(1, doiServerRepository.count()); + retrievedDoiServerByIdOpt = doiServerRepository.findOneById(doiServer.getId()); + assertSameContents(doiServer, retrievedDoiServerByIdOpt.get()); + } + + + private DoiServer newDoiServer() { + return newDoiServer(_inc); + } +} diff --git a/domain/src/test/java/org/fao/geonet/repository/LinkRespositoryTest.java b/domain/src/test/java/org/fao/geonet/repository/LinkRespositoryTest.java new file mode 100644 index 00000000000..4bcdcab74c7 --- /dev/null +++ b/domain/src/test/java/org/fao/geonet/repository/LinkRespositoryTest.java @@ -0,0 +1,92 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.repository; + +import org.fao.geonet.domain.Link; +import org.fao.geonet.domain.LinkType; +import org.junit.Assert; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + + +public class LinkRespositoryTest extends AbstractSpringDataTest { + + @Autowired + private LinkRepository repository; + + @Test + public void testFindAllByUrlIn() { + Link link = new Link(); + link.setLinkType(LinkType.HTTP); + link.setUrl("https://test.com/link"); + + repository.save(link); + + List links = new ArrayList<>(); + links.add(link.getUrl()); + List linkList = repository.findAllByUrlIn(links); + + Assert.assertNotNull(linkList); + Assert.assertEquals(1, linkList.size()); + Assert.assertEquals(link.getUrl(), linkList.get(0).getUrl()); + } + + @Test + public void testFindAllByUrlInNoResults() { + List links = new ArrayList<>(); + links.add("https://test.com/link"); + List linkList = repository.findAllByUrlIn(links); + + Assert.assertNotNull(linkList); + Assert.assertEquals(0, linkList.size()); + } + + @Test + public void testFindOneByUrl() { + Link link = new Link(); + link.setLinkType(LinkType.HTTP); + link.setUrl("https://test.com/link"); + + repository.save(link); + + Optional linkToCheck = repository.findOneByUrl("https://test.com/link"); + + Assert.assertNotNull(linkToCheck); + Assert.assertTrue(linkToCheck.isPresent()); + Assert.assertEquals(link.getUrl(), linkToCheck.get().getUrl()); + } + + @Test + public void testFindOneByUrlNoResult() { + Optional link = repository.findOneByUrl("https://test.com/link"); + + Assert.assertNotNull(link); + Assert.assertTrue(link.isEmpty()); + } + +} diff --git a/domain/src/test/java/org/fao/geonet/repository/specification/LinkSpecsTest.java b/domain/src/test/java/org/fao/geonet/repository/specification/LinkSpecsTest.java new file mode 100644 index 00000000000..6f025abd585 --- /dev/null +++ b/domain/src/test/java/org/fao/geonet/repository/specification/LinkSpecsTest.java @@ -0,0 +1,324 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.repository.specification; + +import org.fao.geonet.domain.*; +import org.fao.geonet.repository.*; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.*; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; + +public class LinkSpecsTest extends AbstractSpringDataTest { + @Autowired + MetadataRepository metadataRepository; + + @Autowired + MetadataLinkRepository metadataLinkRepository; + + @Autowired + LinkStatusRepository linkStatusRepository; + + @Autowired + OperationAllowedRepository operationAllowedRepository; + + @Autowired + LinkRepository linkRepository; + + AtomicInteger inc = new AtomicInteger(); + + @Before + public void createTestData() { + // Create a non harvested metadata + Metadata metadata = MetadataRepositoryTest.newMetadata(inc); + metadata.getSourceInfo().setGroupOwner(2); + metadataRepository.save(metadata); + + Link link = new Link(); + link.setLinkType(LinkType.HTTP); + link.setUrl("https://test.com/link"); + link.setLastState(1); + + ISODate checkStatusDate = new ISODate(); + + Set linkStatuses = new HashSet<>(); + LinkStatus linkStatus = new LinkStatus(); + linkStatus.setLink(link); + linkStatus.setStatusValue("200"); + linkStatus.setCheckDate(checkStatusDate); + linkStatuses.add(linkStatus); + + link.setLinkStatus(linkStatuses); + + MetadataLink metadataLink = new MetadataLink(); + metadataLink.setMetadataId(metadata.getId()); + metadataLink.setMetadataUuid(metadata.getUuid()); + metadataLink.setLink(link); + + Set recordLinks = new HashSet<>(); + recordLinks.add(metadataLink); + link.setRecords(recordLinks); + link.setLastCheck(checkStatusDate); + linkRepository.save(link); + + metadataLinkRepository.save(metadataLink); + linkStatusRepository.save(linkStatus); + + // View in group 2, edit in group 2 in implicit from metadata owner group + OperationAllowed operationAllowedViewMd1 = new OperationAllowed(); + OperationAllowedId operationAllowedIdViewMd1 = new OperationAllowedId(); + operationAllowedIdViewMd1.setMetadataId(metadata.getId()); + operationAllowedIdViewMd1.setGroupId(2); + operationAllowedIdViewMd1.setOperationId(ReservedOperation.view.getId()); + operationAllowedViewMd1.setId(operationAllowedIdViewMd1); + operationAllowedRepository.save(operationAllowedViewMd1); + + // Edit in group 3 + OperationAllowed operationAllowedEditMd1 = new OperationAllowed(); + OperationAllowedId operationAllowedIdEditMd1 = new OperationAllowedId(); + operationAllowedIdEditMd1.setMetadataId(metadata.getId()); + operationAllowedIdEditMd1.setGroupId(3); + operationAllowedIdEditMd1.setOperationId(ReservedOperation.editing.getId()); + operationAllowedEditMd1.setId(operationAllowedIdEditMd1); + operationAllowedRepository.save(operationAllowedEditMd1); + + // Create a harvested metadata + Metadata metadata2 = MetadataRepositoryTest.newMetadata(inc); + metadata2.getSourceInfo().setGroupOwner(2); + MetadataHarvestInfo metadataHarvestInfo = new MetadataHarvestInfo(); + metadataHarvestInfo.setHarvested(true); + metadataHarvestInfo.setUuid(UUID.randomUUID().toString()); + metadata2.setHarvestInfo(metadataHarvestInfo); + + metadataRepository.save(metadata2); + + Link link2 = new Link(); + link2.setLinkType(LinkType.HTTP); + link2.setUrl("https://test.com/link2"); + link2.setLastCheck(checkStatusDate); + link2.setLastState(-1); + + Set linkStatuses2 = new HashSet<>(); + LinkStatus linkStatus2 = new LinkStatus(); + linkStatus2.setLink(link2); + linkStatus2.setStatusValue("404"); + linkStatus2.setCheckDate(checkStatusDate); + linkStatuses2.add(linkStatus2); + + link2.setLinkStatus(linkStatuses2); + + MetadataLink metadataLink2 = new MetadataLink(); + metadataLink2.setMetadataId(metadata2.getId()); + metadataLink2.setMetadataUuid(metadata2.getUuid()); + metadataLink2.setLink(link2); + + Set recordLinks2 = new HashSet<>(); + recordLinks2.add(metadataLink2); + link2.setRecords(recordLinks2); + linkRepository.save(link2); + + metadataLinkRepository.save(metadataLink2); + linkStatusRepository.save(linkStatus2); + + // View in group 2, edit in group 2 in implicit from metadata owner group + OperationAllowed operationAllowedViewMd2 = new OperationAllowed(); + OperationAllowedId operationAllowedIdViewMd2 = new OperationAllowedId(); + operationAllowedIdViewMd2.setMetadataId(metadata2.getId()); + operationAllowedIdViewMd2.setGroupId(2); + operationAllowedIdViewMd2.setOperationId(ReservedOperation.view.getId()); + operationAllowedViewMd2.setId(operationAllowedIdViewMd2); + operationAllowedRepository.save(operationAllowedViewMd2); + } + + @Test + public void testLinkSpecsFilterUrlPartToContainMatch() { + // Query excluding harvested metadata + List linkList = linkRepository.findAll(LinkSpecs.filter("https://test.com", null, null, null, null, null, true, null)); + Assert.assertEquals(1, linkList.size()); + + // Query not excluding harvested metadata + List linkList2 = linkRepository.findAll(LinkSpecs.filter("https://test.com", null, null, null, null, null, false, null)); + Assert.assertEquals(2, linkList2.size()); + + } + + @Test + public void testLinkSpecsFilterUrlPartToContainNoMatch() { + List linkList = linkRepository.findAll(LinkSpecs.filter("https://test2.com", null, null, null, null, null, false, null)); + Assert.assertEquals(0, linkList.size()); + } + + @Test + public void testLinkSpecsFilterAssociatedRecordsMatch() { + List associatedRecords = metadataRepository.findAll().stream().map(Metadata::getUuid).collect(Collectors.toList()); + + // Query excluding harvested metadata + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, associatedRecords, null, null, null, true, null)); + Assert.assertEquals(1, linkList.size()); + + // Query not excluding harvested metadata + List linkList2 = linkRepository.findAll(LinkSpecs.filter(null, null, associatedRecords, null, null, null, false, null)); + Assert.assertEquals(2, linkList2.size()); + } + + @Test + public void testLinkSpecsFilterAssociatedRecordsNoMatch() { + List associatedRecords = new ArrayList<>(); + associatedRecords.add("aaaa"); + + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, associatedRecords, null, null, null, false, null)); + Assert.assertEquals(0, linkList.size()); + } + + @Test + public void testLinkSpecsFilterHttpStatusFilterMatch() { + Integer[] httpStatusValueFilter = new Integer[]{200, 404}; + + // Query excluding harvested metadata + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, null, httpStatusValueFilter, true, null)); + Assert.assertEquals(1, linkList.size()); + + // Query not excluding harvested metadata + List linkList2 = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, null, httpStatusValueFilter, false, null)); + Assert.assertEquals(2, linkList2.size()); + } + + @Test + public void testLinkSpecsFilterHttpStatusFilterNoMatch() { + Integer[] httpStatusValueFilter = new Integer[]{500}; + + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, null, httpStatusValueFilter, false, null)); + Assert.assertEquals(0, linkList.size()); + } + + @Test + public void testLinkSpecsFilterGroupOwnersIdsMatch() { + Integer[] groupOwnerIds = new Integer[]{2}; + + // Query excluding harvested metadata + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, groupOwnerIds, null, true, null)); + Assert.assertEquals(1, linkList.size()); + + // Query not excluding harvested metadata + List linkList2 = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, groupOwnerIds, null, false, null)); + Assert.assertEquals(2, linkList2.size()); + } + + @Test + public void testLinkSpecsFilterGroupOwnersIdsNoMatch() { + Integer[] groupOwnerIds = new Integer[]{3}; + + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, groupOwnerIds, null, false, null)); + Assert.assertEquals(0, linkList.size()); + } + + @Test + public void testLinkSpecsFilterGroupPublishedIdsMatch() { + Integer[] groupPublishedIds = new Integer[]{2}; + + // Query excluding harvested metadata + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, null, groupPublishedIds, null, null, true, null)); + Assert.assertEquals(1, linkList.size()); + + // Query not excluding harvested metadata + List linkList2 = linkRepository.findAll(LinkSpecs.filter(null, null, null, groupPublishedIds, null, null, false, null)); + Assert.assertEquals(2, linkList2.size()); + } + + @Test + public void testLinkSpecsFilterGroupPublishedIdsNoMatch() { + Integer[] groupPublishedIds = new Integer[]{3}; + + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, null, groupPublishedIds, null, null, false, null)); + Assert.assertEquals(0, linkList.size()); + } + + @Test + public void testLinkSpecsFilterEditingGroupIdsMatch() { + Integer[] editingGroupIds1 = new Integer[]{2}; + + // Query excluding harvested metadata + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, null, null, true, editingGroupIds1)); + Assert.assertEquals(1, linkList.size()); + + // Query not excluding harvested metadata + List linkList2 = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, null, null, false, editingGroupIds1)); + Assert.assertEquals(2, linkList2.size()); + + Integer[] editingGroupIds2 = new Integer[]{3}; + + // Query excluding harvested metadata + List linkList3 = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, null, null, true, editingGroupIds2)); + Assert.assertEquals(1, linkList3.size()); + + // Query not excluding harvested metadata + List linkList4 = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, null, null, false, editingGroupIds2)); + Assert.assertEquals(1, linkList4.size()); + } + + @Test + public void testLinkSpecsFilterEditingGroupIdsNoMatch() { + Integer[] editingGroupIds = new Integer[]{4}; + + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, null, null, false, editingGroupIds)); + Assert.assertEquals(0, linkList.size()); + } + + @Test + public void testLinkSpecsStateMatch() { + List linkList = linkRepository.findAll(LinkSpecs.filter(null, -1, null, null, null, null, false, null)); + Assert.assertEquals(1, linkList.size()); + + // Query not excluding harvested metadata + List linkList2 = linkRepository.findAll(LinkSpecs.filter(null, 1, null, null, null, null, false, null)); + Assert.assertEquals(1, linkList2.size()); + } + + @Test + public void testLinkSpecsStateNoMatch() { + List linkList = linkRepository.findAll(LinkSpecs.filter(null, 0, null, null, null, null, false, null)); + Assert.assertEquals(0, linkList.size()); + } + + + @Test + public void testLinkSpecsSeveralFilters() { + // Find links with state 1, related to metadata published to group 2 + Integer[] groupPublishedIds = new Integer[]{2}; + + List linkList = linkRepository.findAll(LinkSpecs.filter(null, 1, null, groupPublishedIds, null, null, false, null)); + Assert.assertEquals(1, linkList.size()); + + // Find links that contain the url 'https://test.com', with http status 200 / 404 / 500, related to metadata owned by groups 2 / 3 + Integer[] httpStatusValueFilter = new Integer[]{200, 404, 500}; + Integer[] groupOwnerIds = new Integer[]{2, 3}; + + List linkList2 = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, groupOwnerIds, httpStatusValueFilter, false, null)); + Assert.assertEquals(2, linkList2.size()); + } +} diff --git a/es/README.md b/es/README.md index c27cee0b4b0..d46d0574d06 100644 --- a/es/README.md +++ b/es/README.md @@ -1,33 +1,48 @@ # Install, configure and start Elasticsearch +## Installation options + This section describes several methods for configuring Elasticsearch for development. These configurations should not be used for a production deployment. -## Manual installation +### Docker installation (Recommended) -1. Download Elasticsearch 8.x (tested with 8.11.3 for Geonetwork 4.4.x) from https://www.elastic.co/downloads/elasticsearch -and copy to the ES module, e.g., es/elasticsearch-8.11.3 +1. Use docker pull to download the image (you can check version in the :file:`pom.xml` file): -2. Disable the security + ``` + docker pull docker.elastic.co/elasticsearch/elasticsearch:8.14.3 + ``` -Elasticsearch 8 has security enabled by default. To disable this configuration for development, update the file `config/elasticsearch.yml` adding at the end: +2. Use docker run, leaving 9200 available: -``` -xpack.security.enabled: false -xpack.security.enrollment.enabled: false -``` + ``` + docker run -p 9200:9200 -p 9300:9300 \ + -e "discovery.type=single-node" \ + -e "xpack.security.enabled=false" \ + -e "xpack.security.enrollment.enabled=false" \ + docker.elastic.co/elasticsearch/elasticsearch:8.14.3 + ``` +3. Check that elasticsearch is running by visiting http://localhost:9200 in a browser -3. Start ES using: +### Docker compose installation + +1. Use docker compose with the provided [docker-compose.yml](docker-compose.yml): - ```shell script - ./bin/elasticsearch + ``` + cd es + docker-compose up ``` -4. Check that elasticsearch is running by visiting http://localhost:9200 in a browser +3. Check that it is running using your browser: + + * Elasticsearch: http://localhost:9200 + * Kibana: http://localhost:5601 + +### Maven installation -## Maven installation +Maven installation ensure you always are using the ``es.version`` version specified in ``pom.xml``. 1. Maven can take care of the installation steps: @@ -44,42 +59,32 @@ xpack.security.enrollment.enabled: false ``` 3. Check that elasticsearch is running by visiting http://localhost:9200 in a browser -## Docker installation +## Manual installation -1. Use docker pull to download the image (you can check version in the :file:`pom.xml` file): +1. Download Elasticsearch 8.14.3 from https://www.elastic.co/downloads/elasticsearch +and copy to the ES module, e.g., ``es/elasticsearch-8.14.3` - ``` - docker pull docker.elastic.co/elasticsearch/elasticsearch:8.11.3 - ``` +2. Disable the security -2. Use docker run, leaving 9200 available: + Elasticsearch 8 has security enabled by default. To disable this configuration for development, update the file `config/elasticsearch.yml` adding at the end: ``` - docker run -p 9200:9200 -p 9300:9300 \ - -e "discovery.type=single-node" \ - -e "xpack.security.enabled=false" \ - -e "xpack.security.enrollment.enabled=false" \ - docker.elastic.co/elasticsearch/elasticsearch:8.11.3 + xpack.security.enabled: false + xpack.security.enrollment.enabled: false ``` -3. Check that elasticsearch is running by visiting http://localhost:9200 in a browser - -## Docker compose installation - -1. Use docker compose with the provided [docker-compose.yml](docker-compose.yml): +3. Start ES using: - ``` - cd es - docker-compose up + ```shell script + ./bin/elasticsearch ``` -3. Check that it is running using your browser: - - * Elasticsearch: http://localhost:9200 - * Kibana: http://localhost:5601 +4. Check that elasticsearch is running by visiting http://localhost:9200 in a browser # Configuration +## Index management + Optionally you can manually create index but they will be created by the catalogue when the Elastic instance is available and if index does not exist. @@ -122,7 +127,7 @@ Don't hesitate to propose a Pull Request with the new language. 1. Configure ES to start on server startup. It is recommended to protect `gn-records` index from the Internet access. - * Note that for debian-based servers the current deb download (7.3.2) can be installed rather than installing manually and can be configured to run as a service using the instructions here: https://www.elastic.co/guide/en/elasticsearch/reference/current/starting-elasticsearch.html + * Note that for debian-based servers the current deb download (8.14.3) can be installed rather than installing manually and can be configured to run as a service using the instructions here: https://www.elastic.co/guide/en/elasticsearch/reference/current/starting-elasticsearch.html # Troubleshoot @@ -163,3 +168,22 @@ field expansion for [*] matches too many fields, limit: 1024 An option is to restrict `queryBase` to limit the number of field to query on. `any:(${any}) resourceTitleObject.default:(${any})^2` is a good default. Using `${any}` will probably trigger the error if the number of records is high. The other option is to increase `indices.query.bool.max_clause_count`. + + +## Disk space threshold + +The server application will refuse to write new content unless there is enough free space available (by default 1/4 of your hard drive). + +To turn off this check: + +``` + curl -XPUT http://localhost:9200/_cluster/settings -H 'Content-Type: application/json' -d '{ "transient" : { "cluster.routing.allocation.disk.threshold_enabled" : false } }' +``` + +## Blocked by index read-only / allow delete + +To recover: + +``` +curl -XPUT -H "Content-Type: application/json" http://localhost:9200/_all/_settings -d '{"index.blocks.read_only_allow_delete": null}' +``` diff --git a/es/docker-compose.yml b/es/docker-compose.yml index cd255c6f531..994c6089a01 100644 --- a/es/docker-compose.yml +++ b/es/docker-compose.yml @@ -2,7 +2,7 @@ version: '3' services: elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:8.11.3 + image: docker.elastic.co/elasticsearch/elasticsearch:8.14.3 container_name: elasticsearch8 environment: - cluster.name=docker-cluster @@ -20,7 +20,7 @@ services: ports: - "9200:9200" kibana: - image: docker.elastic.co/kibana/kibana:8.11.3 + image: docker.elastic.co/kibana/kibana:8.14.3 container_name: kibana8 ports: - "5601:5601" diff --git a/es/es-dashboards/README.md b/es/es-dashboards/README.md index b95aa299cb4..e5e87790e31 100644 --- a/es/es-dashboards/README.md +++ b/es/es-dashboards/README.md @@ -1,24 +1,20 @@ # Install, configure and start Kibana -## Manual installation - -Download Kibana from https://www.elastic.co/downloads/kibana. For Geonetwork 3.8.x download at least version 7.2.1 +## Installation options -Set Kibana base path and index name in config/kibana.yml: +### Docker compose installation (Recommended) -``` -server.basePath: "/geonetwork/dashboards" -server.rewriteBasePath: false -``` +1. Use docker compose with the provided [docker-compose.yml](es/docker-compose.yml): -Adapt if needed ```elasticsearch.url``` and ```server.host```. - -Start Kibana manually: + ``` + cd es + docker-compose up + ``` -``` -cd kibana/bin -./kibana -``` +3. Check that it is running using your browser: + + * Elasticsearch: http://localhost:9200 + * Kabana: http://localhost:5601 ## Maven installation @@ -41,20 +37,26 @@ cd kibana/bin mvn exec:exec -Dkb-start ``` -## Docker compose installation +## Manual installation + +1. Download Kibana 8.14.3 from https://www.elastic.co/downloads/kibana + +2. Set Kibana base path and index name in config/kibana.yml: + + ``` + server.basePath: "/geonetwork/dashboards" + server.rewriteBasePath: false + ``` + +3. Adapt if needed ```elasticsearch.url``` and ```server.host```. -1. Use docer compose with the provided [docker-compose.yml](docker-compose.yml): +4. Start Kibana manually: ``` - cd es - docker-compose up + cd kibana/bin + ./kibana ``` -3. Check that it is running using your browser: - - * Elasticsearch: http://localhost:9200 - * Kabana: http://localhost:5601 - ## Import Configuration 1. Kibana should be running from: @@ -69,16 +71,17 @@ cd kibana/bin http://localhost:8080/geonetwork/dashboards ``` + ## Troubleshoot If it does not start properly, check Kibana log files (eg. it may fail if Elasticsearch version is not compatible with Kibana version). -Visit Kibana in a browser using one of the above links and go to 'Saved Objects'. Import export.ndjson from https://github.com/geonetwork/core-geonetwork/blob/4.0.x/es/es-dashboards/data/export.ndjson +Visit Kibana in a browser using one of the above links and go to 'Saved Objects'. Import export.ndjson from https://github.com/geonetwork/core-geonetwork/blob/main/es/es-dashboards/data/export.ndjson ### Production Use -Kibana can be installed from the debian files, and 7.3.2 is confirmed as working with Geonetwork 3.8.x. +Kibana can be installed from the debian files, and Kibana 8.14.3 is confirmed as working with Geonetwork 4.4.x. Set Kibana to start when the server starts up, using the instructions at https://www.elastic.co/guide/en/kibana/current/start-stop.html diff --git a/es/es-dashboards/pom.xml b/es/es-dashboards/pom.xml index cc49025ac53..19ccbe6aefe 100644 --- a/es/es-dashboards/pom.xml +++ b/es/es-dashboards/pom.xml @@ -24,12 +24,74 @@ 4.0.0 gn-es-dashboards GeoNetwork dashboard app based on Kibana - gn-es org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.7-SNAPSHOT + + + + + org.apache.maven.plugins + maven-enforcer-plugin + + + check-readme + + enforce + + + + + Update README.md examples for Elasticsearch ${es.version} + + import java.util.regex.Pattern; + + esVersion = "${es.version}"; + print("Scanning README for " + esVersion); + + docker = Pattern.compile("Kibana (\\d.\\d\\d.\\d)"); + download = Pattern.compile("Download Kibana (\\d.\\d\\d.\\d)"); + + patterns = new Pattern[]{ docker, download}; + + readme = new BufferedReader(new FileReader("README.md")); + + number = 0; + while ((line = readme.readLine()) != null) { + number++; + for (pattern : patterns ){ + matcher = pattern.matcher(line); + if (matcher.find()) { + if (!esVersion.equals(matcher.group(1))) { + print("README.md:"+number+" FAILURE: " + line); + return false; + } + } + } + } + readme.close(); + true; + + + + + + + check-docker + + enforce + + + true + + + + + + + kb-download diff --git a/es/pom.xml b/es/pom.xml index 5e6a40550da..4397414592b 100644 --- a/es/pom.xml +++ b/es/pom.xml @@ -5,13 +5,112 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 gn-es GeoNetwork index using Elasticsearch pom + + + + org.apache.maven.plugins + maven-enforcer-plugin + + + check-readme + + enforce + + + + + Update README.md examples for Elasticsearch ${es.version} + + import java.util.regex.Pattern; + + esVersion = "${es.version}"; + print("Scanning README for " + esVersion); + + docker = Pattern.compile("elasticsearch:(\\d.\\d\\d.\\d)"); + download = Pattern.compile("Download Elasticsearch (\\d.\\d\\d.\\d)"); + folder = Pattern.compile("es/elasticsearch-(\\d.\\d\\d.\\d)"); + + patterns = new Pattern[]{ docker, download, folder}; + + readme = new BufferedReader(new FileReader("README.md")); + + number = 0; + while ((line = readme.readLine()) != null) { + number++; + for (pattern : patterns ){ + matcher = pattern.matcher(line); + if (matcher.find()) { + if (!esVersion.equals(matcher.group(1))) { + print("README.md:"+number+" FAILURE: " + line); + return false; + } + } + } + } + readme.close(); + true; + + + + + + + check-docker + + enforce + + + + + Update docker-compose.yml for Elasticsearch ${es.version} + + import java.util.regex.Pattern; + + boolean scanDockerCompose(String filename){ + esVersion = "${es.version}"; + print("Scanning "+filename+" for " + esVersion); + + docker = Pattern.compile("elasticsearch:(\\d.\\d\\d.\\d)"); + kibana = Pattern.compile("kibana:(\\d.\\d\\d.\\d)"); + patterns = new Pattern[]{ docker, kibana}; + + reader = new BufferedReader(new FileReader("${project.basedir}"+"/"+filename)); + + number = 0; + while ((line = reader.readLine()) != null) { + number++; + for (pattern : patterns ){ + matcher = pattern.matcher(line); + if (matcher.find()) { + if (!esVersion.equals(matcher.group(1))) { + print(filename+":"+number+" FAILURE: " + line); + return false; + } + } + } + } + reader.close(); + return true; + } + + return scanDockerCompose("docker-compose.yml"); + + + + + + + + + + es-download diff --git a/estest/pom.xml b/estest/pom.xml index e9ba611bc41..2c447fa74dd 100644 --- a/estest/pom.xml +++ b/estest/pom.xml @@ -5,7 +5,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 diff --git a/events/pom.xml b/events/pom.xml index 8b8405f0b5e..f1a335196a4 100644 --- a/events/pom.xml +++ b/events/pom.xml @@ -28,7 +28,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.7-SNAPSHOT GeoNetwork Events diff --git a/harvesters/pom.xml b/harvesters/pom.xml index c830794b85b..7a69abe98fa 100644 --- a/harvesters/pom.xml +++ b/harvesters/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/BaseAligner.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/BaseAligner.java index 09502913bfb..101f6fd78ab 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/BaseAligner.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/BaseAligner.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2016 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -28,6 +28,7 @@ import org.fao.geonet.domain.AbstractMetadata; import org.fao.geonet.domain.MetadataCategory; import org.fao.geonet.kernel.DataManager; +import org.fao.geonet.kernel.SchemaManager; import org.fao.geonet.kernel.datamanager.IMetadataManager; import org.fao.geonet.kernel.harvest.harvester.AbstractHarvester; import org.fao.geonet.kernel.harvest.harvester.AbstractParams; @@ -35,14 +36,20 @@ import org.fao.geonet.kernel.harvest.harvester.GroupMapper; import org.fao.geonet.kernel.harvest.harvester.Privileges; import org.fao.geonet.kernel.setting.SettingManager; +import org.fao.geonet.kernel.setting.Settings; import org.fao.geonet.repository.MetadataCategoryRepository; import org.fao.geonet.repository.OperationAllowedRepository; +import org.fao.geonet.utils.Xml; import org.jdom.Element; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.util.StringUtils; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Arrays; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; @@ -62,7 +69,7 @@ public abstract class BaseAligner

extends AbstractAlig public final AtomicBoolean cancelMonitor; - public BaseAligner(AtomicBoolean cancelMonitor) { + protected BaseAligner(AtomicBoolean cancelMonitor) { this.cancelMonitor = cancelMonitor; } @@ -71,7 +78,7 @@ public void addCategories(AbstractMetadata metadata, Iterable categories String serverCategory, boolean saveMetadata) { MetadataCategoryRepository metadataCategoryRepository = context.getBean(MetadataCategoryRepository.class); - Map nameToCategoryMap = new HashMap(); + Map nameToCategoryMap = new HashMap<>(); for (MetadataCategory metadataCategory : metadataCategoryRepository.findAll()) { nameToCategoryMap.put("" + metadataCategory.getId(), metadataCategory); } @@ -119,9 +126,9 @@ public void addPrivileges(String id, Iterable privilegesIterable, Gr String name = localGroups.getName(priv.getGroupId()); if (name == null) { - LOGGER.debug(" - Skipping removed group with id:{}", priv.getGroupId()); + LOGGER.debug(" - Skipping removed group with id: {}", priv.getGroupId()); } else { - LOGGER.debug(" - Setting privileges for group : {}", name); + LOGGER.debug(" - Setting privileges for group: {}", name); for (int opId : priv.getOperations()) { name = dataManager.getAccessManager().getPrivilegeName(opId); //--- all existing operation @@ -133,4 +140,63 @@ public void addPrivileges(String id, Iterable privilegesIterable, Gr } } } + + /** + * Applies a xslt process (schema_folder/process/translate.xsl) to translate create the metadata + * fields configured in the harvester to the languages configured, using the translation provider + * configured in the application settings. + * + * If no translation provider is configured or if the schema doesn't have the translation xslt, + * the translation process is not applied to the metadata. + * + * @param context + * @param md + * @param schema + * @return + */ + public Element translateMetadataContent(ServiceContext context, + Element md, + String schema) { + + SettingManager settingManager = context.getBean(SettingManager.class); + + String translationProvider = settingManager.getValue(Settings.SYSTEM_TRANSLATION_PROVIDER); + + if (!StringUtils.hasLength(translationProvider)) { + LOGGER.warn(" metadata content can't be translated. Translation provider not configured."); + return md; + } + + if (!StringUtils.hasLength(params.getTranslateContentLangs()) || + !StringUtils.hasLength(params.getTranslateContentFields())) { + LOGGER.warn(" metadata content can't be translated. No languages or fields provided to translate."); + return md; + } + + SchemaManager schemaManager = context.getBean(SchemaManager.class); + + Path filePath = schemaManager.getSchemaDir(schema).resolve("process").resolve( "translate.xsl"); + + if (!Files.exists(filePath)) { + LOGGER.debug(String.format(" metadata content translation process not available for schema %s", schema)); + } else { + Element processedMetadata; + try { + Map processParams = new HashMap<>(); + List langs = Arrays.asList(params.getTranslateContentLangs().split(",")); + processParams.put("languages", langs); + + List fields = Arrays.asList(params.getTranslateContentFields().split("\\n")); + processParams.put("fieldsToTranslate", fields); + + processedMetadata = Xml.transform(md, filePath, processParams); + LOGGER.debug(" metadata content translated."); + md = processedMetadata; + } catch (Exception e) { + LOGGER.warn(String.format(" metadata content translated error: %s", e.getMessage())); + } + } + return md; + } + } diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractHarvester.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractHarvester.java index bec3d2cda1a..2398aa96c10 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractHarvester.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractHarvester.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2020 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -85,15 +85,7 @@ import java.time.OffsetDateTime; import java.time.ZoneOffset; import java.time.format.DateTimeFormatter; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.TimeZone; -import java.util.UUID; +import java.util.*; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReentrantLock; @@ -140,8 +132,12 @@ public abstract class AbstractHarvester ownedByHarvester = Specification.where(MetadataSpecs.hasHarvesterUuid(getParams().getUuid())); Set sources = new HashSet<>(); - for (Integer id : metadataRepository.findAllIdsBy(ownedByHarvester)) { - sources.add(metadataUtils.findOne(id).getSourceInfo().getSourceId()); - metadataManager.deleteMetadata(context, "" + id); + for (Integer metadataId : metadataRepository.findAllIdsBy(ownedByHarvester)) { + sources.add(metadataUtils.findOne(metadataId).getSourceInfo().getSourceId()); + metadataManager.deleteMetadata(context, "" + metadataId); } // Remove all sources related to the harvestUuid if they are not linked to any record anymore @@ -569,7 +565,10 @@ private void login() throws Exception { UserRepository repository = this.context.getBean(UserRepository.class); User user = null; if (StringUtils.isNotEmpty(ownerId)) { - user = repository.findById(Integer.parseInt(ownerId)).get(); + Optional userOptional = repository.findById(Integer.parseInt(ownerId)); + if (userOptional.isPresent()) { + user = userOptional.get(); + } } // for harvesters created before owner was added to the harvester code, @@ -693,21 +692,21 @@ protected OperResult harvest() { private void logHarvest(String logfile, Logger logger, String nodeName, String lastRun, long elapsedTime) { try { // record the results/errors for this harvest in the database - Element result = getResult(); + Element resultEl = getResult(); if (error != null) { - result = JeevesException.toElement(error); + resultEl = JeevesException.toElement(error); } - Element priorLogfile_ = result.getChild("logfile"); - if (priorLogfile_ != null) { + Element priorLogfileEl = resultEl.getChild("logfile"); + if (priorLogfileEl != null) { // removing prior logfile - logger.warning("Detected duplicate logfile: " + priorLogfile_.getText()); - result.getChildren().remove(priorLogfile_); + logger.warning("Detected duplicate logfile: " + priorLogfileEl.getText()); + resultEl.getChildren().remove(priorLogfileEl); } - Element logfile_ = new Element("logfile"); - logfile_.setText(logfile); - result.addContent(logfile_); + Element logfileEl = new Element("logfile"); + logfileEl.setText(logfile); + resultEl.addContent(logfileEl); - result.addContent(toElement(errors)); + resultEl.addContent(toElement(errors)); final HarvestHistoryRepository historyRepository = context.getBean(HarvestHistoryRepository.class); final HarvestHistory history = new HarvestHistory() .setHarvesterType(getType()) @@ -716,7 +715,7 @@ private void logHarvest(String logfile, Logger logger, String nodeName, String l .setElapsedTime((int) elapsedTime) .setHarvestDate(new ISODate(lastRun)) .setParams(getParams().getNodeElement()) - .setInfo(result); + .setInfo(resultEl); historyRepository.save(history); @@ -742,18 +741,18 @@ private void logHarvest(String logfile, Logger logger, String nodeName, String l */ private Element toElement(List errors) { Element res = new Element("errors"); - for (HarvestError error : errors) { + for (HarvestError harvestError : errors) { Element herror = new Element("error"); Element desc = new Element("description"); - desc.setText(error.getDescription()); + desc.setText(harvestError.getDescription()); herror.addContent(desc); Element hint = new Element("hint"); - hint.setText(error.getHint()); + hint.setText(harvestError.getHint()); herror.addContent(hint); - herror.addContent(JeevesException.toElement(error.getOrigin())); + herror.addContent(JeevesException.toElement(harvestError.getOrigin())); res.addContent(herror); } return res; @@ -810,8 +809,8 @@ private final String doAdd(Element node) throws BadInputEx, SQLException { //--- force the creation of a new uuid params.setUuid(UUID.randomUUID().toString()); - String id = harvesterSettingsManager.add("harvesting", "node", getType()); - storeNode(params, "id:" + id); + String nodeId = harvesterSettingsManager.add("harvesting", "node", getType()); + storeNode(params, "id:" + nodeId); Source source = new Source(params.getUuid(), params.getName(), params.getTranslations(), SourceType.harvester); final String icon = params.getIcon(); @@ -822,7 +821,7 @@ private final String doAdd(Element node) throws BadInputEx, SQLException { } context.getBean(SourceRepository.class).save(source); - return id; + return nodeId; } private void doUpdate(String id, Element node) throws BadInputEx, SQLException { @@ -915,6 +914,9 @@ private void storeNode(P params, String path) throws SQLException { harvesterSettingsManager.add(ID_PREFIX + contentId, "importxslt", params.getImportXslt()); harvesterSettingsManager.add(ID_PREFIX + contentId, "batchEdits", params.getBatchEdits()); harvesterSettingsManager.add(ID_PREFIX + contentId, "validate", params.getValidate()); + harvesterSettingsManager.add(ID_PREFIX + contentId, "translateContent", params.isTranslateContent()); + harvesterSettingsManager.add(ID_PREFIX + contentId, "translateContentLangs", params.getTranslateContentLangs()); + harvesterSettingsManager.add(ID_PREFIX + contentId, "translateContentFields", params.getTranslateContentFields()); //--- setup stats node ---------------------------------------- @@ -948,8 +950,8 @@ private void storePrivileges(P params, String path) { private void storeCategories(P params, String path) { String categId = harvesterSettingsManager.add(path, "categories", ""); - for (String id : params.getCategories()) { - harvesterSettingsManager.add(ID_PREFIX + categId, "category", id); + for (String cId : params.getCategories()) { + harvesterSettingsManager.add(ID_PREFIX + categId, "category", cId); } } diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractParams.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractParams.java index 0a405e2390b..3f67e5d82af 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractParams.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractParams.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2007 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -95,6 +95,10 @@ public enum OverrideUuid { private String ownerIdUser; private OverrideUuid overrideUuid; + private boolean translateContent; + private String translateContentLangs; + private String translateContentFields; + /** * When more than one harvester harvest the same record, then record is usually rejected. * It can override existing, but the privileges are not preserved. This option @@ -200,6 +204,9 @@ public void create(Element node) throws BadInputEx { setImportXslt(Util.getParam(content, "importxslt", "none")); setBatchEdits(Util.getParam(content, "batchEdits", "")); + setTranslateContent(Util.getParam(content, "translateContent", false)); + setTranslateContentLangs(Util.getParam(content, "translateContentLangs", "")); + setTranslateContentFields(Util.getParam(content, "translateContentFields", "")); this.setValidate(readValidateFromParams(content)); @@ -280,6 +287,9 @@ public void update(Element node) throws BadInputEx { setImportXslt(Util.getParam(content, "importxslt", "none")); setBatchEdits(Util.getParam(content, "batchEdits", getBatchEdits())); + setTranslateContent(Util.getParam(content, "translateContent", false)); + setTranslateContentLangs(Util.getParam(content, "translateContentLangs", "")); + setTranslateContentFields(Util.getParam(content, "translateContentFields", "")); this.setValidate(readValidateFromParams(content)); if (privil != null) { @@ -330,7 +340,9 @@ protected void copyTo(AbstractParams copy) { copy.setImportXslt(getImportXslt()); copy.setBatchEdits(getBatchEdits()); + copy.setTranslateContent(isTranslateContent()); copy.setValidate(getValidate()); + copy.setTranslateContent(isTranslateContent()); for (Privileges p : alPrivileges) { copy.addPrivilege(p.copy()); @@ -643,4 +655,28 @@ public String getBatchEdits() { public void setBatchEdits(String batchEdits) { this.batchEdits = batchEdits; } + + public boolean isTranslateContent() { + return translateContent; + } + + public void setTranslateContent(boolean translateContent) { + this.translateContent = translateContent; + } + + public String getTranslateContentLangs() { + return translateContentLangs; + } + + public void setTranslateContentLangs(String translateContentLangs) { + this.translateContentLangs = translateContentLangs; + } + + public String getTranslateContentFields() { + return translateContentFields; + } + + public void setTranslateContentFields(String translateContentFields) { + this.translateContentFields = translateContentFields; + } } diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/HarvesterUtil.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/HarvesterUtil.java index cf30c71312c..ce411b33256 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/HarvesterUtil.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/HarvesterUtil.java @@ -23,18 +23,19 @@ package org.fao.geonet.kernel.harvest.harvester; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.HashMap; +import java.util.Map; +import org.fao.geonet.ApplicationContextHolder; import org.fao.geonet.constants.Geonet; import org.fao.geonet.domain.Pair; +import org.fao.geonet.kernel.GeonetworkDataDirectory; import org.fao.geonet.kernel.schema.MetadataSchema; import org.fao.geonet.utils.Xml; import org.jdom.Element; import org.slf4j.LoggerFactory; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.HashMap; -import java.util.Map; - /** * Created by francois on 3/7/14. */ @@ -74,8 +75,7 @@ public static Element processMetadata(MetadataSchema metadataSchema, Element md, String processName, Map processParams) { - - Path filePath = metadataSchema.getSchemaDir().resolve("process").resolve(processName + ".xsl"); + Path filePath = ApplicationContextHolder.get().getBean(GeonetworkDataDirectory.class).getXsltConversion(processName); if (!Files.exists(filePath)) { LOGGER.info(" processing instruction not found for {} schema. metadata not filtered.", metadataSchema.getName()); } else { diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/UriMapper.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/UriMapper.java index d25c803cb68..b15c89ab302 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/UriMapper.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/UriMapper.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2007 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -26,6 +26,7 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; +import java.util.Optional; import org.fao.geonet.domain.AbstractMetadata; import org.fao.geonet.kernel.datamanager.IMetadataUtils; @@ -41,7 +42,7 @@ */ public class UriMapper { - private HashMap> hmUriRecords = new HashMap>(); + private HashMap> hmUriRecords = new HashMap<>(); //-------------------------------------------------------------------------- //--- @@ -49,21 +50,21 @@ public class UriMapper { //--- //-------------------------------------------------------------------------- - public UriMapper(ServiceContext context, String harvestUuid) throws Exception { + public UriMapper(ServiceContext context, String harvestUuid) { final IMetadataUtils metadataRepository = context.getBean(IMetadataUtils.class); final List metadataList = metadataRepository.findAll(MetadataSpecs.hasHarvesterUuid(harvestUuid)); - for (AbstractMetadata record : metadataList) { - String uri = record.getHarvestInfo().getUri(); + for (AbstractMetadata metadataRecord : metadataList) { + String uri = Optional.ofNullable(metadataRecord.getHarvestInfo().getUri()).orElse(""); - List records = hmUriRecords.get(uri); + List records = hmUriRecords.computeIfAbsent(uri, k -> new ArrayList<>()); if (records == null) { - records = new ArrayList(); + records = new ArrayList<>(); hmUriRecords.put(uri, records); } - records.add(new RecordInfo(record)); + records.add(new RecordInfo(metadataRecord)); } } diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/csw/Aligner.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/csw/Aligner.java index 7a224fa1345..5097d9a600c 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/csw/Aligner.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/csw/Aligner.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2007 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -232,7 +232,7 @@ private void insertOrUpdate(Collection records, Collection { private UUIDMapper localUuids; private String processName; private String preferredSchema; - private Map processParams = new HashMap(); + private Map processParams = new HashMap<>(); private MetadataRepository metadataRepository; - private Map> hmRemoteGroups = new HashMap>(); + private Map> hmRemoteGroups = new HashMap<>(); private SettingManager settingManager; public Aligner(AtomicBoolean cancelMonitor, Logger log, ServiceContext context, XmlRequest req, @@ -119,7 +120,7 @@ private void setupLocEntity(List list, Map> for (Element entity : list) { String name = entity.getChildText("name"); - Map hm = new HashMap(); + Map hm = new HashMap<>(); hmEntity.put(name, hm); @SuppressWarnings("unchecked") @@ -163,7 +164,7 @@ public HarvestResult align(SortedSet records, List err result.locallyRemoved++; } - } catch (Throwable t) { + } catch (Exception t) { log.error("Couldn't remove metadata with uuid " + uuid); log.error(t); result.unchangedMetadata++; @@ -197,7 +198,6 @@ public HarvestResult align(SortedSet records, List err String id = dataMan.getMetadataId(ri.uuid); // look up value of localrating/enable - SettingManager settingManager = context.getBean(SettingManager.class); String localRating = settingManager.getValue(Settings.SYSTEM_LOCALRATING_ENABLE); if (id == null) { @@ -216,7 +216,6 @@ public HarvestResult align(SortedSet records, List err params.useChangeDateForUpdate(), localUuids.getChangeDate(ri.uuid), true); log.info("Overriding record with uuid " + ri.uuid); - result.updatedMetadata++; if (params.isIfRecordExistAppendPrivileges()) { addPrivileges(id, params.getPrivileges(), localGroups, context); @@ -230,6 +229,7 @@ public HarvestResult align(SortedSet records, List err case SKIP: log.debug("Skipping record with uuid " + ri.uuid); result.uuidSkipped++; + break; default: break; } @@ -248,7 +248,7 @@ public HarvestResult align(SortedSet records, List err } } - } catch (Throwable t) { + } catch (Exception t) { log.error("Couldn't insert or update metadata with uuid " + ri.uuid); log.error(t); result.unchangedMetadata++; @@ -282,7 +282,7 @@ private Element extractValidMetadataForImport(DirectoryStream files, Eleme Log.debug(Geonet.MEF, "Multiple metadata files"); Map> mdFiles = - new HashMap>(); + new HashMap<>(); for (Path file : files) { if (Files.isRegularFile(file)) { Element metadata = Xml.loadFile(file); @@ -353,8 +353,8 @@ private Element extractValidMetadataForImport(DirectoryStream files, Eleme } private void addMetadata(final RecordInfo ri, final boolean localRating, String uuid) throws Exception { - final String id[] = {null}; - final Element md[] = {null}; + final String[] id = {null}; + final Element[] md = {null}; //--- import metadata from MEF file @@ -462,6 +462,11 @@ private String addMetadata(RecordInfo ri, Element md, Element info, boolean loca if (log.isDebugEnabled()) log.debug(" - Adding metadata with remote uuid:" + ri.uuid); + // Translate metadata + if (params.isTranslateContent()) { + md = translateMetadataContent(context, md, schema); + } + try { Integer groupIdVal = null; if (StringUtils.isNotEmpty(params.getOwnerIdGroup())) { @@ -595,13 +600,13 @@ private void addPrivilegesFromGroupPolicy(String id, Element privil) throws Exce } private Map> buildPrivileges(Element privil) { - Map> map = new HashMap>(); + Map> map = new HashMap<>(); for (Object o : privil.getChildren("group")) { Element group = (Element) o; String name = group.getAttributeValue("name"); - Set set = new HashSet(); + Set set = new HashSet<>(); map.put(name, set); for (Object op : group.getChildren("operation")) { @@ -662,9 +667,9 @@ private String createGroup(String name) throws Exception { */ private void updateMetadata(final RecordInfo ri, final String id, final boolean localRating, final boolean useChangeDate, String localChangeDate, Boolean force) throws Exception { - final Element md[] = {null}; - final Element publicFiles[] = {null}; - final Element privateFiles[] = {null}; + final Element[] md = {null}; + final Element[] publicFiles = {null}; + final Element[] privateFiles = {null}; if (localUuids.getID(ri.uuid) == null && !force) { if (log.isDebugEnabled()) @@ -743,6 +748,11 @@ private void updateMetadata(RecordInfo ri, String id, Element md, String date = localUuids.getChangeDate(ri.uuid); + // Translate metadata + if (params.isTranslateContent()) { + md = translateMetadataContent(context, md, ri.schema); + } + try { Integer groupIdVal = null; if (StringUtils.isNotEmpty(params.getOwnerIdGroup())) { @@ -756,7 +766,6 @@ private void updateMetadata(RecordInfo ri, String id, Element md, return; } - final IMetadataManager metadataManager = context.getBean(IMetadataManager.class); Metadata metadata; if (!force && !ri.isMoreRecentThan(date)) { if (log.isDebugEnabled()) @@ -883,12 +892,18 @@ private void saveFile(final Store store, String metadataUuid, String file, ISODate remIsoDate = new ISODate(changeDate); boolean saveFile; - final MetadataResource description = store.getResourceDescription(context, metadataUuid, visibility, file, true); - if (description == null) { - saveFile = true; - } else { - ISODate locIsoDate = new ISODate(description.getLastModification().getTime(), false); + Store.ResourceHolder resourceHolder; + try { + resourceHolder = store.getResource(context, metadataUuid, visibility, file, true); + } catch (ResourceNotFoundException ex) { + resourceHolder = null; + } + + if ((resourceHolder != null) && (resourceHolder.getMetadata() != null)) { + ISODate locIsoDate = new ISODate(resourceHolder.getMetadata().getLastModification().getTime(), false); saveFile = (remIsoDate.timeDifferenceInSeconds(locIsoDate) > 0); + } else { + saveFile = true; } if (saveFile) { diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFilesystemHarvester.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFilesystemHarvester.java index 312a0285b5f..640ddbee67b 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFilesystemHarvester.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFilesystemHarvester.java @@ -45,7 +45,6 @@ import org.fao.geonet.kernel.harvest.harvester.HarvestResult; import org.fao.geonet.kernel.search.IndexingMode; import org.fao.geonet.repository.MetadataRepository; -import org.fao.geonet.repository.OperationAllowedRepository; import org.fao.geonet.repository.specification.MetadataSpecs; import org.fao.geonet.utils.IO; import org.jdom.Element; @@ -146,6 +145,12 @@ void updateMetadata(Element xml, final String id, GroupMapper localGroups, final String language = context.getLanguage(); + // Translate metadata + if (params.isTranslateContent()) { + String schema = dataMan.getMetadataSchema(id); + xml = aligner.translateMetadataContent(context, xml, schema); + } + final AbstractMetadata metadata = metadataManager.updateMetadata(context, id, xml, false, false, language, changeDate, true, IndexingMode.none); @@ -158,8 +163,6 @@ void updateMetadata(Element xml, final String id, GroupMapper localGroups, final metadataManager.save(metadata); } - OperationAllowedRepository repository = context.getBean(OperationAllowedRepository.class); - repository.deleteAllByMetadataId(Integer.parseInt(id)); aligner.addPrivileges(id, params.getPrivileges(), localGroups, context); metadata.getCategories().clear(); @@ -193,6 +196,12 @@ String addMetadata(Element xml, String uuid, String schema, GroupMapper localGro if (!uuid.equals(xmlUuid)) { md = metadataUtils.setUUID(schema, uuid, md); } + + // Translate metadata + if (params.isTranslateContent()) { + md = aligner.translateMetadataContent(context, md, schema); + } + metadata.getDataInfo(). setSchemaId(schema). setRoot(xml.getQualifiedName()). diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFsHarvesterFileVisitor.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFsHarvesterFileVisitor.java index c188611e549..791f9a17cda 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFsHarvesterFileVisitor.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFsHarvesterFileVisitor.java @@ -54,8 +54,6 @@ import org.json.JSONException; import org.json.JSONObject; import org.json.XML; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import javax.annotation.Nonnull; import java.io.IOException; @@ -75,7 +73,6 @@ * @author Jesse on 11/6/2014. */ class LocalFsHarvesterFileVisitor extends SimpleFileVisitor { - private Logger LOGGER = LoggerFactory.getLogger(Geonet.HARVESTER); private final LocalFilesystemParams params; private final DataManager dataMan; @@ -110,9 +107,7 @@ public LocalFsHarvesterFileVisitor(AtomicBoolean cancelMonitor, ServiceContext c this.repo = context.getBean(IMetadataUtils.class); this.startTime = System.currentTimeMillis(); - String harvesterName = params.getName().replaceAll("\\W+", "_"); - LOGGER = LoggerFactory.getLogger(harvesterName); - LOGGER.debug("Start visiting files at {}.", this.startTime); + harvester.getLogger().debug(String.format("Start visiting files at %s.", this.startTime)); } @Override @@ -136,9 +131,9 @@ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IO try { result.totalMetadata++; - if (LOGGER.isDebugEnabled() && result.totalMetadata % 1000 == 0) { + if (harvester.getLogger().isDebugEnabled() && result.totalMetadata % 1000 == 0) { long elapsedTime = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - startTime); - LOGGER.debug("{} records inserted in {} s ({} records/s).", new Object[] { + harvester.getLogger().debug("{} records inserted in {} s ({} records/s).", new Object[] { result.totalMetadata, elapsedTime, result.totalMetadata / elapsedTime}); @@ -152,7 +147,7 @@ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IO processXml(file); } } catch (Exception e) { - LOGGER.error("An error occurred while harvesting file {}. Error is: {}.", + harvester.getLogger().error("An error occurred while harvesting file {}. Error is: {}.", file.toAbsolutePath().normalize(), e.getMessage()); } return FileVisitResult.CONTINUE; @@ -168,7 +163,7 @@ private void processJson(Path file) throws Exception { ObjectMapper objectMapper = new ObjectMapper(); Element recordAsElement; try { - LOGGER.debug("reading file: {}", filePath); + harvester.getLogger().debug("reading file: {}", filePath); String uuid = com.google.common.io.Files.getNameWithoutExtension(file.getFileName().toString()); String recordAsJson = objectMapper.readTree(filePath.toFile()).toString(); JSONObject sanitizedJson = sanitize(new JSONObject(recordAsJson)); @@ -180,18 +175,18 @@ private void processJson(Path file) throws Exception { recordAsElement = Xml.loadString(recordAsXml, false); recordAsElement.addContent(new Element("uuid").setText(uuid)); } catch (JsonProcessingException e) { - LOGGER.error("Error processing JSON from file {}, ignoring", filePath); - LOGGER.error("full stack", e); + harvester.getLogger().error("Error processing JSON from file {}, ignoring", filePath); + harvester.getLogger().error("full stack", e); result.badFormat++; return; } catch (JDOMException e) { - LOGGER.error("Error transforming JSON into XML from file {}, ignoring", filePath); - LOGGER.error("full stack", e); + harvester.getLogger().error("Error transforming JSON into XML from file {}, ignoring", filePath); + harvester.getLogger().error("full stack", e); result.badFormat++; return; } catch (Exception e) { - LOGGER.error("Error retrieving JSON from file {}, ignoring", filePath); - LOGGER.error("full stack", e); + harvester.getLogger().error("Error retrieving JSON from file {}, ignoring", filePath); + harvester.getLogger().error("full stack", e); result.unretrievable++; return; } @@ -241,16 +236,16 @@ private void processXml(Path file) throws Exception { Element xml; try { - LOGGER.debug("reading file: {}", filePath); + harvester.getLogger().debug(String.format("reading file: %s", filePath)); xml = Xml.loadFile(file); } catch (JDOMException e) { - LOGGER.error("Error loading XML from file {}, ignoring", filePath); - LOGGER.error("full stack", e); + harvester.getLogger().error("Error loading XML from file {}, ignoring", filePath); + harvester.getLogger().error("full stack", e); result.badFormat++; return; } catch (Exception e) { - LOGGER.error("Error retrieving XML from file {}, ignoring", filePath); - LOGGER.error("full stack", e); + harvester.getLogger().error("Error retrieving XML from file {}, ignoring", filePath); + harvester.getLogger().error("full stack", e); result.unretrievable++; return; } @@ -266,7 +261,7 @@ private void processXmlData(Path file, Element rawXml) throws Exception { try { xml = Xml.transform(xml, thisXslt); } catch (Exception e) { - LOGGER.error("Cannot transform XML from file {}, ignoring. Error was: {}", filePath, e.getMessage()); + harvester.getLogger().error("Cannot transform XML from file {}, ignoring. Error was: {}", filePath, e.getMessage()); result.badFormat++; return; } @@ -288,7 +283,7 @@ private void processXmlData(Path file, Element rawXml) throws Exception { params.getValidate().validate(dataMan, context, xml, groupIdVal); } catch (Exception e) { - LOGGER.error("Cannot validate XML from file {}, ignoring. Error was: {}", filePath, e.getMessage()); + harvester.getLogger().error("Cannot validate XML from file {}, ignoring. Error was: {}", filePath, e.getMessage()); result.doesNotValidate++; return; } @@ -315,14 +310,14 @@ private void processXmlData(Path file, Element rawXml) throws Exception { updateMetadata(file, filePath, xml, schema, id, metadata, true); break; case RANDOM: - LOGGER.debug("Generating random uuid for remote record with uuid " + metadata.getUuid()); + harvester.getLogger().debug("Generating random uuid for remote record with uuid " + metadata.getUuid()); String createDate = getCreateDate(file, xml, schema, uuid); String newUuid = UUID.randomUUID().toString(); id = addMetadata(xml, schema, newUuid, createDate); break; case SKIP: - LOGGER.debug("Skipping record with uuid " + metadata.getUuid()); + harvester.getLogger().debug("Skipping record with uuid " + metadata.getUuid()); result.uuidSkipped++; result.unchangedMetadata++; @@ -351,7 +346,7 @@ private String getCreateDate(Path file, Element xml, String schema, String uuid) try { createDate = dataMan.extractDateModified(schema, xml); } catch (Exception ex) { - LOGGER.error("LocalFilesystemHarvester - addMetadata - can't get metadata modified date for metadata uuid= {} " + + harvester.getLogger().error("LocalFilesystemHarvester - addMetadata - can't get metadata modified date for metadata uuid= {} " + "using current date for modified date", uuid); createDate = new ISODate().toString(); } @@ -376,25 +371,25 @@ private void updateMetadata(Path file, Path filePath, Element xml, String schema String changeDate = new ISODate(fileDate.getTime(), false).getDateAndTime(); - LOGGER.debug(" File date is: {} / record date is: {}", filePath, modified); + harvester.getLogger().debug(" File date is: {} / record date is: {}", filePath, modified); if (DateUtils.truncate(recordDate, Calendar.SECOND) .before(DateUtils.truncate(fileDate, Calendar.SECOND))) { - LOGGER.debug(" Db record is older than file. Updating record with id: {}", id); + harvester.getLogger().debug(String.format(" Db record is older than file. Updating record with id: %s", id)); updateMedata(xml, id, changeDate, force); } else { - LOGGER.debug(" Db record is not older than last modified date of file. No need for update."); + harvester.getLogger().debug(" Db record is not older than last modified date of file. No need for update."); result.unchangedMetadata++; } } else { - LOGGER.debug(" updating existing metadata, id is: " + id); + harvester.getLogger().debug(" updating existing metadata, id is: " + id); String changeDate; try { changeDate = dataMan.extractDateModified(schema, xml); } catch (Exception ex) { - LOGGER.error("LocalFilesystemHarvester - updateMetadata - can't get metadata modified date for " + + harvester.getLogger().error("LocalFilesystemHarvester - updateMetadata - can't get metadata modified date for " + "metadata id= {}, using current date for modified date", id); changeDate = new ISODate().toString(); } @@ -406,7 +401,7 @@ private void updateMetadata(Path file, Path filePath, Element xml, String schema private void processMef(Path file) { Path filePath = file.toAbsolutePath().normalize(); - LOGGER.debug("reading file: {}", filePath); + harvester.getLogger().debug(String.format("reading file: %s", filePath)); try { String xsl = params.getImportXslt(); MEFLib.Version version = MEFLib.getMEFVersion(file); @@ -439,7 +434,7 @@ private void processMef(Path file) { params.getValidate() != NOVALIDATION, false, context, file); for (String id : ids) { - LOGGER.debug("Metadata imported from MEF: {}", id); + harvester.getLogger().debug(String.format("Metadata imported from MEF: %s", id)); context.getBean(MetadataRepository.class).update(Integer.valueOf(id), new Updater() { @Override public void apply(@Nonnull final Metadata metadata) { @@ -454,8 +449,8 @@ public void apply(@Nonnull final Metadata metadata) { result.addedMetadata++; } } catch (Exception e) { - LOGGER.error("Error retrieving MEF from file {}, ignoring", filePath); - LOGGER.error("Error: ", e); + harvester.getLogger().error("Error retrieving MEF from file {}, ignoring", filePath); + harvester.getLogger().error("Error: ", e); result.unretrievable++; } } @@ -465,26 +460,26 @@ private String getUuidFromFile(Element xml, Path filePath, String schema) { try { uuid = dataMan.extractUUID(schema, xml); } catch (Exception e) { - LOGGER.debug("Failed to extract metadata UUID for file {}" + + harvester.getLogger().debug("Failed to extract metadata UUID for file {}" + " using XSL extract-uuid. The record is probably " + "a subtemplate. Will check uuid attribute on root element.", filePath); // Extract UUID from uuid attribute in subtemplates String uuidAttribute = xml.getAttributeValue("uuid"); if (uuidAttribute != null) { - LOGGER.debug("Found uuid attribute {} for file {}.", uuidAttribute, filePath); + harvester.getLogger().debug("Found uuid attribute {} for file {}.", uuidAttribute, filePath); uuid = uuidAttribute; } else { // Assigning a new UUID uuid = UUID.randomUUID().toString(); - LOGGER.debug("No UUID found, the record will be assigned a random uuid {} for file {}.", uuid, filePath); + harvester.getLogger().debug("No UUID found, the record will be assigned a random uuid {} for file {}.", uuid, filePath); } } return uuid; } private String addMetadata(Element xml, String schema, String uuid, String createDate) throws Exception { - LOGGER.debug("adding new metadata"); + harvester.getLogger().debug("adding new metadata"); String id = harvester.addMetadata(xml, uuid, schema, localGroups, localCateg, createDate, aligner, false); listOfRecordsToIndex.add(Integer.valueOf(id)); result.addedMetadata++; diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/oaipmh/Harvester.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/oaipmh/Harvester.java index e22b3dc96be..79bc1fb174b 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/oaipmh/Harvester.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/oaipmh/Harvester.java @@ -396,6 +396,11 @@ private void addMetadata(XmlRequest t, RecordInfo ri, String processName, Map { public HarvestResult getResult() { return result; } - private Map processParams = new HashMap(); + private Logger log; public Aligner(AtomicBoolean cancelMonitor, ServiceContext sc, SimpleUrlParams params, Logger log) throws OperationAbortedEx { @@ -142,6 +141,7 @@ private void insertOrUpdate(Map records, Collection records) throws Exception } - private void addMetadata(Map.Entry record, String overrideUuidValue) throws Exception { + private void addMetadata(Map.Entry recordInfo, String overrideUuidValue) throws Exception { if (cancelMonitor.get()) { return; } - Element xml = record.getValue(); + Element xml = recordInfo.getValue(); if (xml == null) { result.unretrievable++; return; @@ -205,20 +205,25 @@ private void addMetadata(Map.Entry record, String overrideUuidV String schema = dataMan.autodetectSchema(xml, null); if (schema == null) { - log.debug(" - Metadata skipped due to unknown schema. uuid:" + record.getKey()); + log.debug(" - Metadata skipped due to unknown schema. uuid:" + recordInfo.getKey()); result.unknownSchema++; return; } - String uuid = record.getKey(); + String uuid = recordInfo.getKey(); if (overrideUuidValue != null) { - log.debug(String.format(" - Overriding UUID %s by %s", record.getKey(), overrideUuidValue)); + log.debug(String.format(" - Overriding UUID %s by %s", recordInfo.getKey(), overrideUuidValue)); uuid = overrideUuidValue; - xml = dataMan.setUUID(schema, uuid, record.getValue()); + xml = dataMan.setUUID(schema, uuid, recordInfo.getValue()); } applyBatchEdits(uuid, xml, schema, params.getBatchEdits(), context, null); + // Translate metadata + if (params.isTranslateContent()) { + xml = translateMetadataContent(context, xml, schema); + } + log.debug(" - Adding metadata with uuid:" + uuid + " schema:" + schema); final String dateModified = dataMan.extractDateModified(schema, xml); @@ -274,12 +279,12 @@ boolean updateMetadata(Map.Entry ri, String id, Boolean force) final AbstractMetadata metadata = metadataManager.updateMetadata(context, id, md, validate, ufo, language, dateModified, true, IndexingMode.none); - if (force) { + if (Boolean.TRUE.equals(force)) { //change ownership of metadata to new harvester metadata.getHarvestInfo().setUuid(params.getUuid()); metadata.getSourceInfo().setSourceId(params.getUuid()); - metadataManager.save((Metadata) metadata); + metadataManager.save(metadata); } OperationAllowedRepository repository = context.getBean(OperationAllowedRepository.class); diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/simpleurl/Harvester.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/simpleurl/Harvester.java index 2cd1100dc6d..254fac91f84 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/simpleurl/Harvester.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/simpleurl/Harvester.java @@ -105,6 +105,7 @@ public HarvestResult harvest(Logger log) throws Exception { String[] urlList = params.url.split("\n"); boolean error = false; Aligner aligner = new Aligner(cancelMonitor, context, params, log); + Set listOfUuids = new HashSet<>(); for (String url : urlList) { log.debug("Loading URL: " + url); @@ -151,7 +152,6 @@ public HarvestResult harvest(Logger log) throws Exception { params.numberOfRecordPath, e.getMessage())); } } - Map allUuids = new HashMap<>(); try { List listOfUrlForPages = buildListOfUrl(params, numberOfRecordsToHarvest); for (int i = 0; i < listOfUrlForPages.size(); i++) { @@ -166,7 +166,6 @@ public HarvestResult harvest(Logger log) throws Exception { if (StringUtils.isNotEmpty(params.loopElement) || type == SimpleUrlResourceType.RDFXML) { Map uuids = new HashMap<>(); - try { if (type == SimpleUrlResourceType.XML) { collectRecordsFromXml(xmlObj, uuids, aligner); @@ -176,7 +175,7 @@ public HarvestResult harvest(Logger log) throws Exception { collectRecordsFromJson(jsonObj, uuids, aligner); } aligner.align(uuids, errors); - allUuids.putAll(uuids); + listOfUuids.addAll(uuids.keySet()); } catch (Exception e) { errors.add(new HarvestError(this.context, e)); log.error(String.format("Failed to collect record in response at path %s. Error is: %s", @@ -184,7 +183,6 @@ public HarvestResult harvest(Logger log) throws Exception { } } } - aligner.cleanupRemovedRecords(allUuids.keySet()); } catch (Exception t) { error = true; log.error("Unknown error trying to harvest"); @@ -198,11 +196,12 @@ public HarvestResult harvest(Logger log) throws Exception { errors.add(new HarvestError(context, t)); } - log.info("Total records processed in all searches :" + allUuids.size()); + log.info("Total records processed in all searches :" + listOfUuids.size()); if (error) { log.warning("Due to previous errors the align process has not been called"); } } + aligner.cleanupRemovedRecords(listOfUuids); return aligner.getResult(); } diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/Harvester.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/Harvester.java index 789cbc44ba2..cf8717e5213 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/Harvester.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/Harvester.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2007 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -22,32 +22,21 @@ //============================================================================== package org.fao.geonet.kernel.harvest.harvester.webdav; -import java.util.LinkedList; -import java.util.List; -import java.util.UUID; +import java.util.*; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.commons.lang.StringUtils; import org.fao.geonet.GeonetContext; import org.fao.geonet.Logger; import org.fao.geonet.constants.Geonet; -import org.fao.geonet.domain.AbstractMetadata; -import org.fao.geonet.domain.ISODate; -import org.fao.geonet.domain.Metadata; -import org.fao.geonet.domain.MetadataType; +import org.fao.geonet.domain.*; import org.fao.geonet.exceptions.NoSchemaMatchesException; import org.fao.geonet.kernel.DataManager; import org.fao.geonet.kernel.SchemaManager; import org.fao.geonet.kernel.UpdateDatestamp; import org.fao.geonet.kernel.datamanager.IMetadataManager; import org.fao.geonet.kernel.harvest.BaseAligner; -import org.fao.geonet.kernel.harvest.harvester.CategoryMapper; -import org.fao.geonet.kernel.harvest.harvester.GroupMapper; -import org.fao.geonet.kernel.harvest.harvester.HarvestError; -import org.fao.geonet.kernel.harvest.harvester.HarvestResult; -import org.fao.geonet.kernel.harvest.harvester.IHarvester; -import org.fao.geonet.kernel.harvest.harvester.RecordInfo; -import org.fao.geonet.kernel.harvest.harvester.UriMapper; +import org.fao.geonet.kernel.harvest.harvester.*; import org.fao.geonet.kernel.search.IndexingMode; import org.fao.geonet.repository.MetadataRepository; import org.fao.geonet.repository.OperationAllowedRepository; @@ -94,7 +83,9 @@ class Harvester extends BaseAligner implements IHarvester errors = new LinkedList(); + private List errors = new LinkedList<>(); + private String processName; + private Map processParams = new HashMap<>(); public Harvester(AtomicBoolean cancelMonitor, Logger log, ServiceContext context, WebDavParams params) { super(cancelMonitor); @@ -154,6 +145,10 @@ private void align(final List files) throws Exception { localGroups = new GroupMapper(context); localUris = new UriMapper(context, params.getUuid()); + Pair> filter = HarvesterUtil.parseXSLFilter(params.xslfilter); + processName = filter.one(); + processParams = filter.two(); + //----------------------------------------------------------------------- //--- remove old metadata for (final String uri : localUris.getUris()) { @@ -259,6 +254,7 @@ private void addMetadata(RemoteFile rf) throws Exception { case SKIP: log.info("Skipping record with uuid " + uuid); result.uuidSkipped++; + return; default: return; } @@ -287,6 +283,18 @@ private void addMetadata(RemoteFile rf) throws Exception { if (log.isDebugEnabled()) log.debug(" - Adding metadata with remote path : " + rf.getPath()); + // Translate metadata + if (params.isTranslateContent()) { + md = translateMetadataContent(context, md, schema); + } + + if (StringUtils.isNotEmpty(params.xslfilter)) { + md = HarvesterUtil.processMetadata(dataMan.getSchema(schema), + md, processName, processParams); + + schema = dataMan.autodetectSchema(md); + } + // // insert metadata // @@ -305,6 +313,11 @@ private void addMetadata(RemoteFile rf) throws Exception { date = rf.getChangeDate(); } } + + if (date == null) { + date = new ISODate(); + } + AbstractMetadata metadata = new Metadata(); metadata.setUuid(uuid); metadata.getDataInfo(). @@ -380,11 +393,11 @@ private Element retrieveMetadata(RemoteFile rf) { * harvester are applied. Also, it changes the ownership of the record so it is assigned to the * new harvester that last updated it. * @param rf - * @param record + * @param recordInfo * @param force * @throws Exception */ - private void updateMetadata(RemoteFile rf, RecordInfo record, Boolean force) throws Exception { + private void updateMetadata(RemoteFile rf, RecordInfo recordInfo, boolean force) throws Exception { Element md = null; // Get the change date from the metadata content. If not possible, get it from the file change date if available @@ -406,8 +419,8 @@ private void updateMetadata(RemoteFile rf, RecordInfo record, Boolean force) thr //Update only if different String uuid = dataMan.extractUUID(schema, md); - if (!record.uuid.equals(uuid)) { - md = dataMan.setUUID(schema, record.uuid, md); + if (!recordInfo.uuid.equals(uuid)) { + md = dataMan.setUUID(schema, recordInfo.uuid, md); } } catch (Exception e) { log.error(" - Failed to set uuid for metadata with remote path : " + rf.getPath()); @@ -419,7 +432,7 @@ private void updateMetadata(RemoteFile rf, RecordInfo record, Boolean force) thr date = dataMan.extractDateModified(schema, md); } catch (Exception ex) { log.error("WebDavHarvester - updateMetadata - Can't get metadata modified date for metadata id= " - + record.id + ", using current date for modified date"); + + recordInfo.id + ", using current date for modified date"); // WAF harvester, rf.getChangeDate() returns null if (rf.getChangeDate() != null) { date = rf.getChangeDate().getDateAndTime(); @@ -429,7 +442,7 @@ private void updateMetadata(RemoteFile rf, RecordInfo record, Boolean force) thr } - if (!force && !rf.isMoreRecentThan(record.changeDate)) { + if (!force && !rf.isMoreRecentThan(recordInfo.changeDate)) { if (log.isDebugEnabled()) log.debug(" - Metadata XML not changed for path : " + rf.getPath()); result.unchangedMetadata++; @@ -449,8 +462,8 @@ private void updateMetadata(RemoteFile rf, RecordInfo record, Boolean force) thr //Update only if different String uuid = dataMan.extractUUID(schema, md); - if (!record.uuid.equals(uuid)) { - md = dataMan.setUUID(schema, record.uuid, md); + if (!recordInfo.uuid.equals(uuid)) { + md = dataMan.setUUID(schema, recordInfo.uuid, md); } } catch (Exception e) { log.error(" - Failed to set uuid for metadata with remote path : " + rf.getPath()); @@ -462,7 +475,7 @@ private void updateMetadata(RemoteFile rf, RecordInfo record, Boolean force) thr date = dataMan.extractDateModified(schema, md); } catch (Exception ex) { log.error("WebDavHarvester - updateMetadata - Can't get metadata modified date for metadata id= " - + record.id + ", using current date for modified date"); + + recordInfo.id + ", using current date for modified date"); // WAF harvester, rf.getChangeDate() returns null if (rf.getChangeDate() != null) { date = rf.getChangeDate().getDateAndTime(); @@ -470,7 +483,15 @@ private void updateMetadata(RemoteFile rf, RecordInfo record, Boolean force) thr } } + // Translate metadata + if (params.isTranslateContent()) { + md = translateMetadataContent(context, md, schema); + } + if (StringUtils.isNotEmpty(params.xslfilter)) { + md = HarvesterUtil.processMetadata(dataMan.getSchema(schema), + md, processName, processParams); + } // // update metadata @@ -479,7 +500,7 @@ private void updateMetadata(RemoteFile rf, RecordInfo record, Boolean force) thr boolean ufo = false; String language = context.getLanguage(); - final AbstractMetadata metadata = metadataManager.updateMetadata(context, record.id, md, validate, ufo, language, + final AbstractMetadata metadata = metadataManager.updateMetadata(context, recordInfo.id, md, validate, ufo, language, date, false, IndexingMode.none); if(force) { @@ -493,15 +514,15 @@ private void updateMetadata(RemoteFile rf, RecordInfo record, Boolean force) thr //--- the administrator could change privileges and categories using the //--- web interface so we have to re-set both OperationAllowedRepository repository = context.getBean(OperationAllowedRepository.class); - repository.deleteAllByMetadataId(Integer.parseInt(record.id)); - addPrivileges(record.id, params.getPrivileges(), localGroups, context); + repository.deleteAllByMetadataId(Integer.parseInt(recordInfo.id)); + addPrivileges(recordInfo.id, params.getPrivileges(), localGroups, context); metadata.getCategories().clear(); addCategories(metadata, params.getCategories(), localCateg, context, null, true); dataMan.flush(); - dataMan.indexMetadata(record.id, true); + dataMan.indexMetadata(recordInfo.id, true); } } diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/WebDavHarvester.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/WebDavHarvester.java index e6cc3af1a9d..e745a5b3311 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/WebDavHarvester.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/WebDavHarvester.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2007 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -28,40 +28,23 @@ import java.sql.SQLException; -//============================================================================= - public class WebDavHarvester extends AbstractHarvester { - //--------------------------------------------------------------------------- - //--- - //--- Add - //--- - //--------------------------------------------------------------------------- - @Override protected WebDavParams createParams() { return new WebDavParams(dataMan); } //--------------------------------------------------------------------------- + @Override protected void storeNodeExtra(WebDavParams params, String path, String siteId, String optionsId) throws SQLException { harvesterSettingsManager.add("id:" + siteId, "url", params.url); harvesterSettingsManager.add("id:" + siteId, "icon", params.icon); harvesterSettingsManager.add("id:" + optionsId, "validate", params.getValidate()); harvesterSettingsManager.add("id:" + optionsId, "recurse", params.recurse); harvesterSettingsManager.add("id:" + optionsId, "subtype", params.subtype); + harvesterSettingsManager.add("id:" + siteId, "xslfilter", params.xslfilter); } - //--------------------------------------------------------------------------- - //--- - //--- Variables - //--- - //--------------------------------------------------------------------------- - - //--------------------------------------------------------------------------- - //--- - //--- Harvest - //--- - //--------------------------------------------------------------------------- public void doHarvest(Logger log) throws Exception { log.info("WebDav doHarvest start"); Harvester h = new Harvester(cancelMonitor, log, context, params); diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/WebDavParams.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/WebDavParams.java index d264bb908fb..c32bfd40cda 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/WebDavParams.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/WebDavParams.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2007 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -29,61 +29,44 @@ import org.fao.geonet.kernel.harvest.harvester.AbstractParams; import org.jdom.Element; -//============================================================================= - public class WebDavParams extends AbstractParams { - //-------------------------------------------------------------------------- - //--- - //--- Constructor - //--- - //-------------------------------------------------------------------------- + /** * url of webdav folder to harvest */ public String url; - //--------------------------------------------------------------------------- - //--- - //--- Create : called when a new entry must be added. Reads values from the - //--- provided entry, providing default values - //--- - //--------------------------------------------------------------------------- /** * Icon to use for harvester */ public String icon; - //--------------------------------------------------------------------------- - //--- - //--- Update : called when an entry has changed and variables must be updated - //--- - //--------------------------------------------------------------------------- /** * If true recurse into directories. */ public boolean recurse; - //--------------------------------------------------------------------------- - //--- - //--- Other API methods - //--- - //--------------------------------------------------------------------------- /** * Flag indicating if WAFRetriever or WebDavRetriever should be used. */ public String subtype; - //--------------------------------------------------------------------------- - //--- - //--- Variables - //--- - //--------------------------------------------------------------------------- + /** + * The filter is a process (see schema/process folder) which depends on the schema. It could be + * composed of parameter which will be sent to XSL transformation using the following syntax : + *

+     * anonymizer?protocol=MYLOCALNETWORK:FILEPATH&email=gis@organisation.org&thesaurus=MYORGONLYTHEASURUS
+     * 
+ */ + public String xslfilter; + public WebDavParams(DataManager dm) { super(dm); } + @Override public void create(Element node) throws BadInputEx { super.create(node); @@ -92,12 +75,14 @@ public void create(Element node) throws BadInputEx { url = Util.getParam(site, "url", ""); icon = Util.getParam(site, "icon", ""); + xslfilter = Util.getParam(site, "xslfilter", ""); recurse = Util.getParam(opt, "recurse", false); subtype = Util.getParam(opt, "subtype", ""); } + @Override public void update(Element node) throws BadInputEx { super.update(node); @@ -106,6 +91,7 @@ public void update(Element node) throws BadInputEx { url = Util.getParam(site, "url", url); icon = Util.getParam(site, "icon", icon); + xslfilter = Util.getParam(site, "xslfilter", ""); recurse = Util.getParam(opt, "recurse", recurse); subtype = Util.getParam(opt, "subtype", subtype); @@ -117,6 +103,7 @@ public WebDavParams copy() { copy.url = url; copy.icon = icon; + copy.xslfilter = xslfilter; copy.setValidate(getValidate()); copy.recurse = recurse; @@ -131,7 +118,3 @@ public String getIcon() { return icon; } } - -//============================================================================= - - diff --git a/healthmonitor/pom.xml b/healthmonitor/pom.xml index 805dae47790..6f71092a00f 100644 --- a/healthmonitor/pom.xml +++ b/healthmonitor/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 diff --git a/healthmonitor/src/main/java/org/fao/geonet/monitor/health/NoIndexErrorsHealthCheck.java b/healthmonitor/src/main/java/org/fao/geonet/monitor/health/NoIndexErrorsHealthCheck.java index d3afc90e4f1..3c60aa7f000 100644 --- a/healthmonitor/src/main/java/org/fao/geonet/monitor/health/NoIndexErrorsHealthCheck.java +++ b/healthmonitor/src/main/java/org/fao/geonet/monitor/health/NoIndexErrorsHealthCheck.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2023 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -44,7 +44,8 @@ protected Result check() throws Exception { GeonetContext gc = (GeonetContext) context.getHandlerContext(Geonet.CONTEXT_NAME); EsSearchManager searchMan = gc.getBean(EsSearchManager.class); - long numDocs = searchMan.getNumDocs("+" + IndexFields.INDEXING_ERROR_FIELD + ":true"); + long numDocs = searchMan.getNumDocs("-" + IndexFields.INDEXING_ERROR_MSG + ".type:warning +" + + IndexFields.INDEXING_ERROR_FIELD + ":true"); if (numDocs > 0) { return Result.unhealthy(String.format("Found %d metadata that had errors during indexing", numDocs)); diff --git a/index/pom.xml b/index/pom.xml index 9e82d3e9130..ec7d47af0ea 100644 --- a/index/pom.xml +++ b/index/pom.xml @@ -5,7 +5,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 gn-index diff --git a/index/src/main/java/org/fao/geonet/index/es/EsRestClient.java b/index/src/main/java/org/fao/geonet/index/es/EsRestClient.java index 2f031f726b2..4b21f7c61a3 100644 --- a/index/src/main/java/org/fao/geonet/index/es/EsRestClient.java +++ b/index/src/main/java/org/fao/geonet/index/es/EsRestClient.java @@ -413,47 +413,23 @@ public Map getDocument(String index, String id) throws Exception /** * Query the index for a specific record and return values for a set of fields. */ - public Map getFieldsValues(String index, String id, Set fields) throws IOException { + public Map getFieldsValues(String index, String id, Set fields, String language) throws Exception { if (!activated) { return Collections.emptyMap(); } - Map fieldValues = new HashMap<>(fields.size()); - try { - String query = String.format("_id:\"%s\"", id); - // TODO: Check maxRecords - // TODO: Use _doc API? - - - final SearchResponse searchResponse = this.query(index, query, null, fields, new HashMap<>(), 0, 1, null); - - List totalHits = searchResponse.hits().hits(); - long matches = totalHits.size(); - if (matches == 0) { - return fieldValues; - } else if (matches == 1) { - final Hit hit = totalHits.get(0); - - fields.forEach(f -> { - final Object o = hit.fields().get(f); - if (o instanceof String) { - fieldValues.put(f, (String) o); - } else if (o instanceof HashMap && f.endsWith("Object")) { - fieldValues.put(f, (String) ((HashMap) o).get("default")); - } - }); - } else { - throw new IOException(String.format( - "Your query '%s' returned more than one record, %d in fact. Can't retrieve field values for more than one record.", - query, - matches - )); + Map fieldValues = new HashMap<>(); + Map sources = getDocument(index, id); + + for (String field : fields) { + Object value = sources.get(field); + if (value instanceof String) { + fieldValues.put(field, (String) value); + } else if (value instanceof Map && field.endsWith("Object")) { + Map valueMap = (Map) value; + String languageValue = (String) valueMap.get("lang" + language); + fieldValues.put(field, languageValue != null ? languageValue : (String) valueMap.get("default")); } - - } catch (Exception e) { - throw new IOException(String.format( - "Error during fields value retrieval. Errors is '%s'.", e.getMessage() - )); } return fieldValues; } diff --git a/inspire-atom/pom.xml b/inspire-atom/pom.xml index 31849c58ccb..7d4bf0cf74f 100644 --- a/inspire-atom/pom.xml +++ b/inspire-atom/pom.xml @@ -28,7 +28,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 diff --git a/inspire-atom/src/main/java/org/fao/geonet/inspireatom/util/InspireAtomUtil.java b/inspire-atom/src/main/java/org/fao/geonet/inspireatom/util/InspireAtomUtil.java index a452d0733d0..622f8fe4ca3 100644 --- a/inspire-atom/src/main/java/org/fao/geonet/inspireatom/util/InspireAtomUtil.java +++ b/inspire-atom/src/main/java/org/fao/geonet/inspireatom/util/InspireAtomUtil.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2023 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -63,7 +63,7 @@ * @author Jose García */ public class InspireAtomUtil { - private final static String EXTRACT_DATASETS_FROM_SERVICE_XSLT = "extract-datasetinfo-from-service-feed.xsl"; + private static final String EXTRACT_DATASETS_FROM_SERVICE_XSLT = "extract-datasetinfo-from-service-feed.xsl"; /** * Xslt process to get the related datasets in service metadata. @@ -395,7 +395,15 @@ public static String retrieveDatasetUuidFromIdentifier(EsSearchManager searchMan " \"value\": \"%s\"" + " }" + " }" + + " }," + + " {" + + " \"term\": {" + + " \"isPublishedToAll\": {" + + " \"value\": \"true\"" + + " }" + + " }" + " }" + + " ]" + " }" + "}"; diff --git a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomDescribe.java b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomDescribe.java index 97091e008e1..95871555b1d 100644 --- a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomDescribe.java +++ b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomDescribe.java @@ -24,6 +24,8 @@ import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -107,7 +109,7 @@ public class AtomDescribe { ) @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Feeds."), - @ApiResponse(responseCode = "204", description = "Not authenticated.") + @ApiResponse(responseCode = "204", description = "Not authenticated.", content = {@Content(schema = @Schema(hidden = true))}) }) @ResponseStatus(OK) @ResponseBody diff --git a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomGetData.java b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomGetData.java index a9133fe38a7..33d0ace6128 100644 --- a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomGetData.java +++ b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomGetData.java @@ -23,6 +23,8 @@ package org.fao.geonet.services.inspireatom; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -84,7 +86,7 @@ public class AtomGetData { ) @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Get a data file related to dataset"), - @ApiResponse(responseCode = "204", description = "Not authenticated.") + @ApiResponse(responseCode = "204", description = "Not authenticated.", content = {@Content(schema = @Schema(hidden = true))}) }) @ResponseStatus(OK) @ResponseBody diff --git a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomHarvester.java b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomHarvester.java index 94eeb33e4ce..a30dcbb0331 100644 --- a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomHarvester.java +++ b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomHarvester.java @@ -23,6 +23,8 @@ package org.fao.geonet.services.inspireatom; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -61,7 +63,7 @@ public class AtomHarvester { @PreAuthorize("hasAuthority('Administrator')") @ApiResponses(value = { @ApiResponse(responseCode = "201", description = "Scan completed."), - @ApiResponse(responseCode = "204", description = "Not authenticated.") + @ApiResponse(responseCode = "204", description = "Not authenticated.", content = {@Content(schema = @Schema(hidden = true))}) }) @ResponseStatus(CREATED) @ResponseBody diff --git a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomSearch.java b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomSearch.java index 0e27e9c8763..5253d3146ac 100644 --- a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomSearch.java +++ b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomSearch.java @@ -27,6 +27,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -114,7 +115,7 @@ public class AtomSearch { ) @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Get a list of feeds."), - @ApiResponse(responseCode = "204", description = "Not authenticated.") + @ApiResponse(responseCode = "204", description = "Not authenticated.", content = {@io.swagger.v3.oas.annotations.media.Content(schema = @Schema(hidden = true))}) }) @ResponseStatus(OK) public Object feeds( diff --git a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomServiceDescription.java b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomServiceDescription.java index 87a255411b2..6c7b99ffbc2 100644 --- a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomServiceDescription.java +++ b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomServiceDescription.java @@ -23,6 +23,8 @@ package org.fao.geonet.services.inspireatom; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -91,7 +93,7 @@ public class AtomServiceDescription { produces = MediaType.APPLICATION_XML_VALUE) @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Feeds."), - @ApiResponse(responseCode = "204", description = "Not authenticated.") + @ApiResponse(responseCode = "204", description = "Not authenticated.", content = {@Content(schema = @Schema(hidden = true))}) }) @ResponseStatus(OK) @ResponseBody diff --git a/jmeter/pom.xml b/jmeter/pom.xml index bf6daf9d308..f1906b9d728 100644 --- a/jmeter/pom.xml +++ b/jmeter/pom.xml @@ -29,7 +29,7 @@ org.geonetwork-opensource geonetwork - 4.4.3-SNAPSHOT + 4.4.7-SNAPSHOT diff --git a/listeners/pom.xml b/listeners/pom.xml index d46c1217e63..082cf9bad29 100644 --- a/listeners/pom.xml +++ b/listeners/pom.xml @@ -28,7 +28,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.7-SNAPSHOT GeoNetwork Events diff --git a/listeners/src/main/java/org/fao/geonet/listener/history/GenericMetadataEventListener.java b/listeners/src/main/java/org/fao/geonet/listener/history/GenericMetadataEventListener.java index 06299c4be91..b9e052d5e99 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/history/GenericMetadataEventListener.java +++ b/listeners/src/main/java/org/fao/geonet/listener/history/GenericMetadataEventListener.java @@ -70,7 +70,7 @@ public final void handleEvent(AbstractHistoryEvent event) { */ public final void storeContentHistoryEvent(AbstractHistoryEvent event) { - if(settingManager.getValueAsBool(Settings.SYSTEM_METADATA_HISTORY_ENABLED)) { + if(settingManager.getValueAsBool(Settings.METADATA_HISTORY_ENABLED)) { Integer metadataid = Math.toIntExact(event.getMdId()); diff --git a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApprovePublishedRecord.java b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApprovePublishedRecord.java index 823a76e2ea5..546571cec96 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApprovePublishedRecord.java +++ b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApprovePublishedRecord.java @@ -121,7 +121,7 @@ private void changeToApproved(AbstractMetadata md, MetadataStatus previousStatus status.setChangeDate(new ISODate()); status.setUserId(ServiceContext.get().getUserSession().getUserIdAsInt()); - metadataStatus.setStatusExt(status); + metadataStatus.setStatusExt(status, true); Log.trace(Geonet.DATA_MANAGER, "Metadata with id " + md.getId() + " automatically approved due to publishing."); } diff --git a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApproveRecord.java b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApproveRecord.java index 1a315ca05af..a987943bf39 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApproveRecord.java +++ b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApproveRecord.java @@ -148,7 +148,7 @@ private AbstractMetadata approveWithDraft(MetadataStatusChanged event) throws Nu status.setChangeDate(new ISODate()); status.setUserId(event.getUser()); - metadataStatus.setStatusExt(status); + metadataStatus.setStatusExt(status, false); } else if (md instanceof Metadata) { draft = null; //metadataDraftRepository.findOneByUuid(md.getUuid()); diff --git a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/DraftUtilities.java b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/DraftUtilities.java index 4bd04801be7..ce418b4062f 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/DraftUtilities.java +++ b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/DraftUtilities.java @@ -87,6 +87,7 @@ public AbstractMetadata replaceMetadataWithDraft(AbstractMetadata md) { * @return */ public AbstractMetadata replaceMetadataWithDraft(AbstractMetadata md, AbstractMetadata draft) { + Log.info(Geonet.DATA_MANAGER, String.format("Replacing metadata approved record (%d) with draft record (%d)", md.getId(), draft.getId())); Log.trace(Geonet.DATA_MANAGER, "Found approved record with id " + md.getId()); Log.trace(Geonet.DATA_MANAGER, "Found draft with id " + draft.getId()); // Reassign metadata validations @@ -131,6 +132,7 @@ public AbstractMetadata replaceMetadataWithDraft(AbstractMetadata md, AbstractMe } // Reassign file uploads + Log.info(Geonet.DATA_MANAGER, String.format("Copying draft record '%d' resources to approved record '%d'", draft.getId(), md.getId())); draftMetadataUtils.replaceFiles(draft, md); metadataFileUploadRepository.deleteAll(MetadataFileUploadSpecs.hasMetadataId(md.getId())); @@ -146,7 +148,6 @@ public AbstractMetadata replaceMetadataWithDraft(AbstractMetadata md, AbstractMe Element xmlData = draft.getXmlData(false); String changeDate = draft.getDataInfo().getChangeDate().getDateAndTime(); - store.delResources(context, draft.getUuid(), false); removeDraft((MetadataDraft) draft); // Copy contents @@ -155,8 +156,10 @@ public AbstractMetadata replaceMetadataWithDraft(AbstractMetadata md, AbstractMe xmlData, false, false, context.getLanguage(), changeDate, true, IndexingMode.full); - Log.info(Geonet.DATA_MANAGER, "Record updated with draft contents: " + md.getId()); + Log.info(Geonet.DATA_MANAGER, "Record '" + md.getUuid() + "(" +md.getId() +")' update with draft contents from metadata id '" + draft.getId() +"'."); + Log.info(Geonet.DATA_MANAGER, "Cleaning up draft record resources for metadata '" + draft.getUuid() + "(" +draft.getId() +")'"); + store.delResources(context, draft.getId()); } catch (Exception e) { Log.error(Geonet.DATA_MANAGER, "Error upgrading from draft record with id " + md.getId(), e); } diff --git a/messaging/pom.xml b/messaging/pom.xml index 54eb690f6d2..f9261e43932 100644 --- a/messaging/pom.xml +++ b/messaging/pom.xml @@ -5,7 +5,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 diff --git a/oaipmh/pom.xml b/oaipmh/pom.xml index efa7fe0facc..9ebbe0674c9 100644 --- a/oaipmh/pom.xml +++ b/oaipmh/pom.xml @@ -30,7 +30,7 @@ org.geonetwork-opensource geonetwork - 4.4.3-SNAPSHOT + 4.4.7-SNAPSHOT diff --git a/pom.xml b/pom.xml index 47993745350..894126224eb 100644 --- a/pom.xml +++ b/pom.xml @@ -29,7 +29,7 @@ org.geonetwork-opensource geonetwork pom - 4.4.3-SNAPSHOT + 4.4.7-SNAPSHOT GeoNetwork opensource GeoNetwork opensource is a standards based, Free and Open Source catalog application to manage spatially referenced @@ -122,7 +122,7 @@ maven-dependency-plugin - 3.1.2 + 3.7.0 maven-clean-plugin @@ -245,12 +245,36 @@ maven-toolchains-plugin 3.0.0 + + org.apache.maven.plugins + maven-enforcer-plugin + 3.4.1 + + org.apache.maven.plugins + maven-enforcer-plugin + + + enforce-maven + + enforce + + + + + 3.8.3 + + + + + + + maven-compiler-plugin 11 @@ -269,7 +293,6 @@ - org.apache.maven.plugins maven-resources-plugin UTF-8 @@ -295,7 +318,6 @@ - org.apache.maven.plugins maven-surefire-plugin org.fao.geonet.repository.AbstractSpringDataTest @@ -447,7 +469,7 @@ org.apache.jena apache-jena-libs pom - 3.17.0 + 4.10.0 @@ -532,6 +554,11 @@ commons-email 1.5 + + commons-codec + commons-codec + 1.15 + org.apache.xmlgraphics xmlgraphics-commons @@ -868,7 +895,7 @@ com.google.guava guava - 30.0-jre + 33.2.1-jre com.yammer.metrics @@ -1258,7 +1285,7 @@ org.apache.jclouds jclouds-all - 2.3.0 + 2.5.0 @@ -1385,6 +1412,7 @@ estest index datastorages + translationproviders @@ -1428,6 +1456,21 @@ darwin-x86 tar.gz +
+ + macOS_aarch64 + + + mac + aarch64 + + + + darwin-aarch64 + kibana.sh + darwin-aarch64 + tar.gz + windows @@ -1527,7 +1570,7 @@ 8080 8090 - 8.11.3 + 8.14.3 linux-x86_64 tar.gz http @@ -1585,21 +1628,21 @@ request the list of hosts (but JPA cache db queries). --> * - 9.4.53.v20231009 + 9.4.54.v20240208 jetty-distribution-${jetty.version} https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-distribution/${jetty.version}/${jetty.file}.tar.gz - 30.0 - 1.19.0 - 42.6.0 + 32.0 + 1.20.0 + 42.7.3 - 5.3.31 - 5.8.8 + 5.3.39 + 5.8.15 2.7.18 2.7.0 - 1.5.13 + 1.8.0 5.6.15.Final 2.2.0 @@ -1609,8 +1652,8 @@ 1.10.1 true 2.7 - 2.1.1 - 2.15.3 + 2.10.0 + 2.16.2 9.1.22 2.25.1 2.17.2 diff --git a/release-build.sh b/release-build.sh new file mode 100755 index 00000000000..f9b39c11f33 --- /dev/null +++ b/release-build.sh @@ -0,0 +1,85 @@ +#!/bin/bash + +buildRequiredApps=( "java" "git" "mvn" "ant" "xmlstarlet" ) + +for app in "${buildRequiredApps[@]}"; do : + if ! [ -x "$(command -v ${app})" ]; then + echo "Error: ${app} is not installed." >&2 + exit 1 + fi +done + +function showUsage +{ + echo -e "\nThis script is used to build a release for the current branch" + echo +} + +if [ "$1" = "-h" ] +then + showUsage + exit +fi + +projectVersion=`xmlstarlet sel -t -m "/_:project/_:version" -v . -n pom.xml` +subVersion=`cut -d "-" -f 2 <<< $projectVersion` +mainVersion=`cut -d "-" -f 1 <<< $projectVersion` +mainVersionMajor=`cut -d "." -f 1 <<< $mainVersion` +mainVersionMinor=`cut -d "." -f 2 <<< $mainVersion` +mainVersionSub=`cut -d "." -f 3 <<< $mainVersion` + +gitBranch=`git branch --show-current` + +nextVersionNumber="${mainVersionMajor}.${mainVersionMinor}.$((mainVersionSub+1))" +previousVersionNumber="${mainVersionMajor}.${mainVersionMinor}.$((mainVersionSub-1))" + +from=origin +frombranch=origin/${gitBranch} +series=${mainVersionMajor}.${mainVersionMinor} +versionbranch=${gitBranch} +version=${projectVersion} +minorversion=0 +release=latest +newversion=${mainVersion}-$minorversion +currentversion=${projectVersion} +previousversion=${previousVersionNumber} +nextversion=${nextVersionNumber}-SNAPSHOT + +echo "Building release for version ${newversion} (from ${currentversion})." +echo "" +echo "Before you start:" +echo "1. Use web-ui/download-from-transifex.sh to update translations" +echo "2. Use release-notes.sh to update change log and release notes" +echo "" +echo "After being build you can test the release before publishing. Git branch ${gitBranch}." +read -p "Press enter to continue" + +# Update version number (in pom.xml, installer config and SQL) +./update-version.sh $currentversion $newversion + +# Then commit the new version +git add . +git commit -m "Update version to $newversion" +git tag -a $mainVersion -m "Tag for $newversion release" + +# Build the new release +mvn clean install -DskipTests -ntp -Pwar -Pwro4j-prebuild-cache + +(cd datastorages && mvn clean install -DskipTests -ntp -Drelease -DskipTests) + +# Download Jetty and create the installer +(cd release && mvn clean install -Pjetty-download && ant) + +# generate checksum for download artifacts + +if [ -f "release/target/GeoNetwork-$version/geonetwork-bundle-$newversion.zip.MD5" ]; then + rm release/target/GeoNetwork-$version/geonetwork-bundle-$newversion.zip.MD5 +fi + +if [[ ${OSTYPE:0:6} == 'darwin' ]]; then + md5 -r web/target/geonetwork.war > web/target/geonetwork.war.md5 + md5 -r release/target/GeoNetwork-$nextVersionNumber/geonetwork-bundle-$newversion.zip > release/target/GeoNetwork-$nextVersionNumber/geonetwork-bundle-$newversion.zip.md5 +else + (cd web/target && md5sum geonetwork.war > geonetwork.war.md5) + (cd release/target/GeoNetwork-$nextVersionNumber && md5sum geonetwork-bundle-$newversion.zip > geonetwork-bundle-$newversion.zip.md5) +fi diff --git a/release-notes.sh b/release-notes.sh new file mode 100755 index 00000000000..0c4e40c3d00 --- /dev/null +++ b/release-notes.sh @@ -0,0 +1,94 @@ +#!/bin/bash + +buildRequiredApps=( "java" "git" "mvn" "ant" "xmlstarlet" ) + +for app in "${buildRequiredApps[@]}"; do : + if ! [ -x "$(command -v ${app})" ]; then + echo "Error: ${app} is not installed." >&2 + exit 1 + fi +done + +function showUsage +{ + echo -e "\nThis script is used to build a release for the current branch" + echo +} + +if [ "$1" = "-h" ] +then + showUsage + exit +fi + +projectVersion=`xmlstarlet sel -t -m "/_:project/_:version" -v . -n pom.xml` +subVersion=`cut -d "-" -f 2 <<< $projectVersion` +mainVersion=`cut -d "-" -f 1 <<< $projectVersion` +mainVersionMajor=`cut -d "." -f 1 <<< $mainVersion` +mainVersionMinor=`cut -d "." -f 2 <<< $mainVersion` +mainVersionSub=`cut -d "." -f 3 <<< $mainVersion` + +gitBranch=`git branch --show-current` + +nextVersionNumber="${mainVersionMajor}.${mainVersionMinor}.$((mainVersionSub+1))" +previousVersionNumber="${mainVersionMajor}.${mainVersionMinor}.$((mainVersionSub-1))" + +from=origin +frombranch=origin/${gitBranch} +series=${mainVersionMajor}.${mainVersionMinor} +versionbranch=${gitBranch} +version=${projectVersion} +minorversion=0 +release=latest +newversion=${mainVersion}-$minorversion +currentversion=${projectVersion} +previousversion=${previousVersionNumber} +nextversion=${nextVersionNumber}-SNAPSHOT + +echo "Creating change log and release notes for version ${newversion} (from ${currentversion}). Git branch ${gitBranch}:" +echo " docs/changes/changes$newversion.txt" +echo " docs/manual/docs/overview/change-log/version-$mainVersion.md" +echo "When generated please review and update:" +echo " docs/manual/mkdocs.yml" +echo " docs/manual/docs/overview/latest/index.md" +echo " docs/manual/docs/overview/change-log/version-$mainVersion.md" +echo "" +read -p "Press enter to continue" + +# Generate list of changes +cat < docs/changes/changes$newversion.txt +================================================================================ +=== +=== GeoNetwork $version: List of changes +=== +================================================================================ +EOF +git log --pretty='format:- %s' $previousversion... >> docs/changes/changes$newversion.txt + +# Generate release notes + +cat < docs/manual/docs/overview/change-log/version-$mainVersion.md +# Version $mainVersion + +GeoNetwork $mainVersion is a minor release. + +## Migration notes + +### API changes + +### Installation changes + +### Index changes + +## List of changes + +Major changes: + +EOF + +git log --pretty='format:* %N' $previousversion.. | grep -v "^* $" >> docs/manual/docs/overview/change-log/version-$mainVersion.md + +cat <> docs/manual/docs/overview/change-log/version-$mainVersion.md + +and more \... see [$newversion issues](https://github.com/geonetwork/core-geonetwork/issues?q=is%3Aissue+milestone%3A$mainVersion+is%3Aclosed) and [pull requests](https://github.com/geonetwork/core-geonetwork/pulls?page=3&q=is%3Apr+milestone%3A$mainVersion+is%3Aclosed) for full details. +EOF diff --git a/release-publish.sh b/release-publish.sh new file mode 100755 index 00000000000..a62f7229ebd --- /dev/null +++ b/release-publish.sh @@ -0,0 +1,56 @@ +#!/bin/bash + +function showUsage +{ + echo -e "\nThis script is used to publish a release on sourceforge, github and maven repository" + echo + echo -e "Usage: ./`basename $0` sourceforge_username [remote]" + echo + echo -e "Example:" + echo -e "\t./`basename $0` sourceforgeusername" + echo -e "\t./`basename $0` sourceforgeusername upstream" + echo +} + +if [ "$1" = "-h" ] +then + showUsage + exit +fi + +if [[ ($# -ne 1) && ($# -ne 2) ]] +then + showUsage + exit +fi + +projectVersion=`xmlstarlet sel -t -m "/_:project/_:version" -v . -n pom.xml` +version=`cut -d "-" -f 1 <<< $projectVersion` +versionbranch=`git branch --show-current` +sourceforge_username=$1 +remote=origin + +if [ $# -eq 2 ] +then + remote=$2 +fi + +# Push the branch and tag to github +git push $remote $versionbranch +git push $remote $version +# TODO: attach release notes to version + +sftp $sourceforge_username,geonetwork@frs.sourceforge.net << EOT +cd /home/frs/project/g/ge/geonetwork/GeoNetwork_opensource +mkdir v${version} +cd v${version} +put docs/changes/changes${version}-0.txt +put release/target/GeoNetwork*/geonetwork-bundle*.zip* +put web/target/geonetwork.war* +put datastorages/*/target/*.zip +bye +EOT + +# Deploy to osgeo repository (requires credentials in ~/.m2/settings.xml) +mvn deploy -DskipTests -Drelease + diff --git a/release-restore.sh b/release-restore.sh new file mode 100755 index 00000000000..2b98413ce4c --- /dev/null +++ b/release-restore.sh @@ -0,0 +1,49 @@ +#!/bin/bash + +buildRequiredApps=( "java" "git" "mvn" "ant" "xmlstarlet" ) + +for app in "${buildRequiredApps[@]}"; do : + if ! [ -x "$(command -v ${app})" ]; then + echo "Error: ${app} is not installed." >&2 + exit 1 + fi +done + +projectVersion=`xmlstarlet sel -t -m "/_:project/_:version" -v . -n pom.xml` +subVersion=`cut -d "-" -f 2 <<< $projectVersion` +mainVersion=`cut -d "-" -f 1 <<< $projectVersion` +mainVersionMajor=`cut -d "." -f 1 <<< $mainVersion` +mainVersionMinor=`cut -d "." -f 2 <<< $mainVersion` +mainVersionSub=`cut -d "." -f 3 <<< $mainVersion` + +gitBranch=`git branch --show-current` + +nextVersionNumber="${mainVersionMajor}.${mainVersionMinor}.$((mainVersionSub+1))" +previousVersionNumber="${mainVersionMajor}.${mainVersionMinor}.$((mainVersionSub-1))" + +from=origin +frombranch=origin/${gitBranch} +series=${mainVersionMajor}.${mainVersionMinor} +versionbranch=${gitBranch} +version=${projectVersion} +minorversion=0 +release=latest +newversion=${mainVersion}-$minorversion +currentversion=${projectVersion} +previousversion=${previousVersionNumber} +nextversion=${nextVersionNumber}-SNAPSHOT + + +echo "Update version number to ${nextversion} (from ${newversion})." +echo "" +echo "After update. Push changes to Git branch ${gitBranch}." +read -p "Press enter to continue" + + +# Set version number to SNAPSHOT +./update-version.sh $newversion $nextversion + +git add . +git commit -m "Update version to $nextversion" + + diff --git a/release-test.sh b/release-test.sh new file mode 100755 index 00000000000..98e49da4b0b --- /dev/null +++ b/release-test.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +projectVersion=`xmlstarlet sel -t -m "/_:project/_:version" -v . -n pom.xml` +version=`cut -d "-" -f 1 <<< $projectVersion` +versionbranch=`git branch --show-current` + +echo "Testing zip in release/target/GeoNetwork-$version ..." + +cd "release/target/GeoNetwork-$version" +unzip -q "geonetwork-bundle-$projectVersion.zip" -d "geonetwork-bundle-$projectVersion" +cd "geonetwork-bundle-$projectVersion/bin" +./startup.sh -f diff --git a/release/build.properties b/release/build.properties index e7183fe9410..d730f4f0d03 100644 --- a/release/build.properties +++ b/release/build.properties @@ -5,11 +5,11 @@ homepage=https://geonetwork-opensource.org supportEmail=geonetwork-users@lists.sourceforge.net # Application version properties -version=4.4.3 +version=4.4.7 subVersion=SNAPSHOT # Java runtime properties javaVersion=11 javaDisplayVersion=11 -jreUrl=https://adoptium.net/en-GB/temurin/releases/?version=4.4.3 +jreUrl=https://adoptium.net/en-GB/temurin/releases/?version=4.4.7 jreName=AdoptOpenJDK diff --git a/release/pom.xml b/release/pom.xml index 920336a32a0..b63014f0f57 100644 --- a/release/pom.xml +++ b/release/pom.xml @@ -7,7 +7,7 @@ org.geonetwork-opensource geonetwork - 4.4.3-SNAPSHOT + 4.4.7-SNAPSHOT gn-release diff --git a/schemas-test/pom.xml b/schemas-test/pom.xml index 6df035d8937..60a6f0c668c 100644 --- a/schemas-test/pom.xml +++ b/schemas-test/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 jar diff --git a/schemas/config-editor.xsd b/schemas/config-editor.xsd index a6ecd0ed338..31455f489a3 100644 --- a/schemas/config-editor.xsd +++ b/schemas/config-editor.xsd @@ -263,12 +263,14 @@ Configure here the list of fields to display using a table. This only applies to
- - - - +
+ + + + + @@ -434,6 +436,7 @@ Table column. + @@ -903,7 +906,7 @@ Define if this tab is the default one for the view. Only one tab should be the d @@ -1001,6 +1004,15 @@ e.g. only 2 INSPIRE themes: ]]> + + + + + + @@ -1086,6 +1098,7 @@ the mandatory section with no name and then the inner elements. + @@ -1115,9 +1128,11 @@ Note: Only sections with forEach support del attribute.

Distribution

-
- - +
+ +

@@ -1154,6 +1169,119 @@ Note: Only sections with forEach support del attribute. + + + + + + + + + + + + + + + + + + + ]]> + + + + + + + + + + + + + + + + Add a hyperlink on the item + + + + + + + + An optional name to override the default one base on field name for the + section. The name must be defined in ``{schema}/loc/{lang}/strings.xml``. + + + + + + The XPath of the element to create list items. + + + + + + + + + XPath of the element to sort the list by. Must use full name of each nodes eg. gmd:organisationName/gco:CharacterString + + + + + + + An optional attribute to collapse the section. If not set the section is expanded. + + + + + + An optional attribute to not allow collapse for the section. If not set the section is expandable. + + + + + + + Local name to match if the element does not exist. @@ -1168,6 +1296,46 @@ Note: Only sections with forEach support del attribute. + + + + +