diff --git a/src/metax_api/api/rest/base/api_schemas/datacatalog.json b/src/metax_api/api/rest/base/api_schemas/datacatalog.json index 441c0fa0..d1c5eb40 100644 --- a/src/metax_api/api/rest/base/api_schemas/datacatalog.json +++ b/src/metax_api/api/rest/base/api_schemas/datacatalog.json @@ -34,6 +34,18 @@ "maxItems":1, "type":"string" }, + "catalog_record_services_edit":{ + "title":"Editor services", + "description":"Comma-separated list of services which are allowed to edit catalog records in the catalog.", + "maxItems":1, + "type":"string" + }, + "catalog_record_services_create":{ + "title":"Creator services", + "description":"Comma-separated list of services which are allowed to add new catalog records to the catalog.", + "maxItems":1, + "type":"string" + }, "date_modified":{ "title":"Date Modified", "description":"Date on which the resource was changed.", diff --git a/src/metax_api/api/rest/base/api_schemas/file.json b/src/metax_api/api/rest/base/api_schemas/file.json index 91ca00cb..9abc4951 100644 --- a/src/metax_api/api/rest/base/api_schemas/file.json +++ b/src/metax_api/api/rest/base/api_schemas/file.json @@ -17,8 +17,9 @@ }, "algorithm":{ "enum":[ - "md5", - "sha2" + "MD5", + "SHA-256", + "SHA-512" ], "type":"string" }, @@ -376,4 +377,4 @@ ] }, } -} \ No newline at end of file +} diff --git a/src/metax_api/api/rest/base/schemas/att_dataset_schema.json b/src/metax_api/api/rest/base/schemas/att_dataset_schema.json index 5e6757e8..032d3010 100644 --- a/src/metax_api/api/rest/base/schemas/att_dataset_schema.json +++ b/src/metax_api/api/rest/base/schemas/att_dataset_schema.json @@ -1,8 +1,7 @@ - { "@id":"http://uri.suomi.fi/datamodel/ns/mrd#", "title":"Metax Research Datasets", - "modified":"Fri, 17 Aug 2018 11:50:11 GMT", + "modified":"Wed, 11 Mar 2020 12:47:12 GMT", "$schema":"http://json-schema.org/draft-04/schema#", "type":"object", "allOf":[ @@ -14,9 +13,9 @@ "Activity":{ "title":"Activity", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/ns/prov#Activity", "description":"An activity is something that occurs over a period of time and acts upon or with entities; it may include consuming, processing, transforming, modifying, relocating, using, or generating entities.", + "minProperties":1, "properties":{ "title":{ "@id":"http://purl.org/dc/terms/title", @@ -29,7 +28,7 @@ "description":{ "@id":"http://purl.org/dc/terms/description", "title":"Description", - "description":"The description of the activity", + "description":"Generic description of the activity or the event", "@type":"http://www.w3.org/1999/02/22-rdf-syntax-ns#langString", "type":"object", "$ref":"#/definitions/langString" @@ -120,16 +119,16 @@ "CatalogRecord":{ "title":"Catalog Record", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/ns/dcat#CatalogRecord", "description":"A record in a data catalog, describing a single dataset.", + "minProperties":1, "properties":{ "data_catalog":{ "@id":"http://uri.suomi.fi/datamodel/ns/mrd#catalogidentifier", "title":"Research data catalog", "description":"Research Data Catalog identifier or embedded object to data_catalog property in embedded object.", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" }, @@ -138,7 +137,7 @@ "title":"Date Created", "description":"Date of creation of the resource.", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", - "minLength": 1, + "minLength":1, "type":"string", "format":"date-time" }, @@ -147,7 +146,7 @@ "title":"Date Modified", "description":"Date on which the resource was changed.", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", - "minLength": 1, + "minLength":1, "type":"string", "format":"date-time" }, @@ -164,7 +163,7 @@ "title":"Contract", "description":"Contract identifier or embedded contract object", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "preservation_state":{ @@ -173,14 +172,21 @@ "description":"state of dataset in PAS cycle: 0 = Not in PAS, 1 = Proposed for midtterm, 2 = Proposed for long term, 3, =in packaging service, 4 = in dissemination, 5 = in midterm preservation, 6 = in longterm preservation, 7 = Rejected long-term preservation, 8 = Rejected mid-term preservation", "enum":[ "0", - "1", - "2", - "3", - "4", - "5", - "6", - "7", - "8" + "10", + "20", + "30", + "40", + "50", + "60", + "70", + "75", + "80", + "90", + "100", + "110", + "120", + "130", + "140" ], "@type":"http://www.w3.org/2001/XMLSchema#string", "type":"string" @@ -192,7 +198,6 @@ "@type":"http://www.w3.org/2001/XMLSchema#string", "type":"array", "items":{ - "minLength": 1, "type":"string" } }, @@ -201,7 +206,7 @@ "title":"Preservation system description", "description":"Preservation status described for the user", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "preservation_reason_description":{ @@ -209,7 +214,7 @@ "title":"Preservation reason description", "description":"Reason for the preservation from the user", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "preservation_state_modified":{ @@ -217,7 +222,7 @@ "title":"Preservation status modified", "description":"Date when status was modified", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", - "minLength": 1, + "minLength":1, "type":"string", "format":"date-time" }, @@ -228,10 +233,10 @@ "@type":"http://www.w3.org/2001/XMLSchema#boolean", "type":"boolean" }, - "alternate_record":{ + "alternate_record_set":{ "@id":"http://www.w3.org/ns/prov#alternateOf", - "title":"Alternate record", - "description":"Refers to alternate catalog record in different catalog. This reference is created if two datasets in different catalogs have the same preferred identifier", + "title":"Alternate records", + "description":"Refers to alternate catalog records in different catalogs. This reference is created if two datasets in different catalogs have the same preferred identifier.", "@type":"@id", "type":"array", "items":{ @@ -239,7 +244,7 @@ "$ref":"#/definitions/CatalogRecord" } }, - "next":{ + "next_dataset_version":{ "@id":"http://www.w3.org/ns/adms#next", "title":"Next version", "description":"A link to the next version of the catalog record", @@ -260,7 +265,7 @@ "title":"Identifier", "description":"Catalog record identifier", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" }, @@ -269,9 +274,16 @@ "title":"Preservation identifier", "description":"PAS identifier", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" + }, + "previous_dataset_version":{ + "@id":"http://uri.suomi.fi/datamodel/ns/mrd#previous_dataset_version", + "title":"Previous version", + "@type":"@id", + "type":"object", + "$ref":"#/definitions/CatalogRecord" } }, "required":[ @@ -284,9 +296,9 @@ "Checksum":{ "title":"Checksum", "type":"object", - "minProperties": 1, "@id":"http://spdx.org/rdf/terms#Checksum", "description":"", + "minProperties":1, "properties":{ "algorithm":{ "@id":"http://spdx.org/rdf/terms#algorithm", @@ -302,6 +314,7 @@ "OTHER" ], "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string" }, "checksum_value":{ @@ -309,7 +322,7 @@ "title":"Checksum value", "description":"Value of the checksum. xsd:hexBinary", "@type":"http://www.w3.org/2001/XMLSchema#hexBinary", - "minLength": 1, + "minLength":1, "type":"string" } }, @@ -322,16 +335,16 @@ "Concept":{ "title":"Concept", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/2004/02/skos/core#Concept", "description":"An idea or notion; a unit of thought.", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", "title":"Identifier", "description":"This is the IRI identifier for the concept", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" }, @@ -356,7 +369,7 @@ "title":"In scheme", "description":"Relates a resource (for example a concept) to a concept scheme in which it is included.", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" } @@ -369,16 +382,16 @@ "DistributionCharacteristics":{ "title":"File characteristics", "type":"object", - "minProperties": 1, "@id":"http://www.loc.gov/premis/rdf/v1#ObjectCharasteristics", "description":"Technical properties of files", + "minProperties":1, "properties":{ "title":{ "@id":"http://purl.org/dc/terms/title", "title":"File Type Name", "description":"A name of the file type", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "description":{ @@ -386,7 +399,7 @@ "title":"File Type Description", "description":"Description of the file type", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "application_name":{ @@ -394,7 +407,7 @@ "title":"Application name", "description":"A designation for the name of the software program that created the object", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string", "example":[ "MSWord" @@ -405,7 +418,7 @@ "title":"Text encoding", "description":"Used text encoding", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" } }, @@ -417,16 +430,16 @@ "Document":{ "title":"Documented link", "type":"object", - "minProperties": 1, "@id":"http://xmlns.com/foaf/0.1/Document", "description":"A documented link to a document in the Web.", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", "title":"URL", "description":"URL of the homepage", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" }, @@ -455,9 +468,9 @@ "Editor":{ "title":"Editor", "type":"object", - "minProperties": 1, "@id":"http://uri.suomi.fi/datamodel/ns/mrd#Editor", - "description":"Software or service that is used to modify the catalog record and the dataset", + "description":"Software or service that is used to modify the catalog record and the dataset.", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", @@ -467,6 +480,7 @@ "QVAIN" ], "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string" }, "owner_id":{ @@ -474,7 +488,7 @@ "title":"Owner identifier", "description":"owner of the resource", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "creator_id":{ @@ -482,7 +496,7 @@ "title":"Creator identifier", "description":"creator of the resource", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "record_id":{ @@ -490,7 +504,7 @@ "title":"Record identifier", "description":"local record identifier", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" } }, @@ -499,9 +513,9 @@ "EntityRelation":{ "title":"Entity relation", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/ns/prov#EntityInfluence", "description":"Description of a the dataset influence upon any other kind of entity. In Metax RelationType is used from reference data: https://wiki.eduuni.fi/pages/viewpage.action?spaceKey=CSCMETAX&title=Reference+Data", + "minProperties":1, "properties":{ "entity":{ "@id":"http://www.w3.org/ns/prov#entity", @@ -528,9 +542,9 @@ "LinguisticSystem":{ "title":"Linguistic System", "type":"object", - "minProperties": 1, "@id":"http://purl.org/dc/terms/LinguisticSystem", "description":"Examples include written, spoken, sign, and computer languages.\n\nA system of signs, symbols, sounds, gestures, or rules used in communication.", + "minProperties":1, "properties":{ "title":{ "@id":"http://purl.org/dc/terms/title", @@ -545,7 +559,7 @@ "title":"Identifier", "description":"Recommended best practice is to identify the resource by means of a string conforming to a formal identification system. \n\nAn unambiguous reference to the resource within a given context.", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" } @@ -555,16 +569,16 @@ "Location":{ "title":"Location", "type":"object", - "minProperties": 1, "@id":"http://purl.org/dc/terms/Location", "description":"A spatial region or named place.", + "minProperties":1, "properties":{ "geographic_name":{ "@id":"http://www.w3.org/ns/locn#geographicName", "title":"Geographic name", "description":"A geographic name is a proper noun applied to a spatial object. Taking the example used in the relevant INSPIRE data specification (page 18), the following are all valid geographic names for the Greek capital:\n- Αθήνα (the Greek endonym written in the Greek script)\n- Athína (the standard Romanisation of the endonym)\n- Athens (the English language exonym)\nFor INSPIRE-conformant data, provide the metadata for the geographic name using a skos:Concept as a datatype.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "alt":{ @@ -572,7 +586,7 @@ "title":"Altitude", "description":"The WGS84 altitude of a SpatialThing (decimal meters \nabove the local reference ellipsoid).", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "full_address":{ @@ -580,7 +594,7 @@ "title":"Full address", "description":"The complete address written as a string, with or without formatting.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "as_wkt":{ @@ -611,9 +625,9 @@ "Organization":{ "title":"Organization", "type":"object", - "minProperties": 1, "@id":"http://xmlns.com/foaf/0.1/Organization", "description":"An organization.", + "minProperties":1, "properties":{ "@type":{ "type":"string", @@ -624,7 +638,7 @@ "title":"Identifier", "description":"An unambiguous reference to the resource within a given context.", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri", "example":[ @@ -694,15 +708,15 @@ "PeriodOfTime":{ "title":"Period of Time", "type":"object", - "minProperties": 1, "@id":"http://purl.org/dc/terms/PeriodOfTime", "description":"An interval of time that is named or defined by its start and end dates.", + "minProperties":1, "properties":{ "start_date":{ "@id":"http://schema.org/startDate", "title":"Start of the pediod", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", - "minLength": 1, + "minLength":1, "type":"string", "format":"date-time" }, @@ -710,7 +724,7 @@ "@id":"http://schema.org/endDate", "title":"End of the period", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", - "minLength": 1, + "minLength":1, "type":"string", "format":"date-time" }, @@ -719,7 +733,7 @@ "title":"Temporal coverage", "description":"Period of time expressed as ISO 8601 compliant string", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" } }, @@ -728,9 +742,9 @@ "Person":{ "title":"Person", "type":"object", - "minProperties": 1, "@id":"http://xmlns.com/foaf/0.1/Person", "description":"A person.", + "minProperties":1, "properties":{ "@type":{ "type":"string", @@ -741,7 +755,7 @@ "title":"Identifier", "description":"An unambiguous reference to the resource within a given context.", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri", "example":[ @@ -753,7 +767,7 @@ "title":"Name", "description":"This property contains a name of the agent. This property can be repeated for different versions of the name (e.g. the name in different languages)", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "email":{ @@ -824,9 +838,9 @@ "Project":{ "title":"Project", "type":"object", - "minProperties": 1, "@id":"http://xmlns.com/foaf/0.1/Project", "description":"A project (a collective endeavour of some kind).", + "minProperties":1, "properties":{ "name":{ "@id":"http://xmlns.com/foaf/0.1/name", @@ -841,7 +855,7 @@ "title":"Identifier", "description":"Recommended best practice is to identify the resource by means of a string conforming to a formal identification system. An unambiguous reference to the resource within a given context.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "has_funder_identifier":{ @@ -849,7 +863,7 @@ "title":"Project funding identifier", "description":"Unique identifier for the project that is being used by the project funder", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "homepage":{ @@ -899,9 +913,9 @@ "RelatedEntity":{ "title":"Related Entity", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/ns/prov#Entity", "description":"Related entity", + "minProperties":1, "properties":{ "title":{ "@id":"http://purl.org/dc/terms/title", @@ -924,7 +938,7 @@ "title":"Identifier", "description":"Recommended best practice is to identify the resource by means of a string conforming to a formal identification system. An unambiguous reference to the resource within a given context.", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" }, @@ -952,16 +966,16 @@ "ResearchDataLicenseDocument":{ "title":"License Document", "type":"object", - "minProperties": 1, "@id":"http://purl.org/dc/terms/LicenseDocument", "description":"A legal document giving official permission to do something with a Resource.", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", "title":"Identifier", "description":"License identifier from the reference data.", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" }, @@ -986,7 +1000,7 @@ "title":"Licence URL", "description":"A referenced license document that applies to this content", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" } @@ -1002,36 +1016,34 @@ "ResearchDataset":{ "title":"Research Dataset", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/ns/dcat#Dataset", "description":"A collection of data, published or curated by a single source, and available for access or download in one or more formats", + "minProperties":1, "properties":{ "metadata_version_identifier":{ "@id":"http://uri.suomi.fi/datamodel/ns/mrd#datasetidentifier", "title":"Metadata version identifier", "description":"Dataset metadata version identifier in form of http://urn.fi{URN}:version:{number}", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", - "format":"uri", - "readOnly": true + "format":"uri" }, "preferred_identifier":{ "@id":"http://purl.org/dc/terms/identifier", "title":"Preferred identifier", "description":"Unique identifier for the dataset. URN or URI.", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", - "format":"uri", - "readOnly": true + "format":"uri" }, "modified":{ "@id":"http://purl.org/dc/terms/modified", "title":"Dataset modification date", "description":"Last known time when a research dataset or metadata about the research dataset has been significantly modified. This field is usually modified by the user or harvested from other source.", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", - "minLength": 1, + "minLength":1, "type":"string", "format":"date-time" }, @@ -1040,7 +1052,7 @@ "title":"Version", "description":"This property contains a version number or other version designation of the Dataset.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "version_notes":{ @@ -1059,9 +1071,9 @@ "title":"Release date", "description":"Date of formal issuance (e.g., publication) of the resource.", "@type":"http://www.w3.org/2001/XMLSchema#date", - "minLength": 1, + "minLength":1, "type":"string", - "format": "date" + "format":"date" }, "title":{ "@id":"http://purl.org/dc/terms/title", @@ -1104,7 +1116,7 @@ "title":"Preferred Bibliographic Citation", "description":"Preferred bibliographic reference for the resource.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "other_identifier":{ @@ -1317,9 +1329,9 @@ "RightsStatement":{ "title":"Rights Statement", "type":"object", - "minProperties": 1, "@id":"http://purl.org/dc/terms/RightsStatement", "description":"A statement about the intellectual property rights (IPR) held in or over a Resource, a legal document giving official permission to do something with a resource, or a statement about access rights.", + "minProperties":1, "properties":{ "description":{ "@id":"http://purl.org/dc/terms/description", @@ -1334,9 +1346,9 @@ "title":"Date Available", "description":"Date (often a range) that the resource became or will become available.", "@type":"http://www.w3.org/2001/XMLSchema#date", - "minLength": 1, + "minLength":1, "type":"string", - "format": "date" + "format":"date" }, "access_type":{ "@id":"http://purl.org/dc/terms/type", @@ -1397,16 +1409,16 @@ "StructuredIdentifier":{ "title":"Identifier", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/ns/adms#Identifier", "description":"Structured identifier", + "minProperties":1, "properties":{ "notation":{ "@id":"http://www.w3.org/2004/02/skos/core#notation", "title":"Identifier value", "description":"Literal value of the identifier", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "local_identifier_type":{ @@ -1414,7 +1426,7 @@ "title":"Local identifier type", "description":"Local identifier type defines use of the identifier in given context.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "provider":{ @@ -1442,9 +1454,9 @@ "Variable":{ "title":"Variable", "type":"object", - "minProperties": 1, "@id":"http://rdf-vocabulary.ddialliance.org/discovery#Variable", "description":"", + "minProperties":1, "properties":{ "pref_label":{ "@id":"http://www.w3.org/2004/02/skos/core#prefLabel", @@ -1483,7 +1495,7 @@ "title":"Representation", "description":"Defines reference to a Concept Scheme that includes the possible values of a variable.", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" } @@ -1496,16 +1508,16 @@ "WebResource":{ "title":"Remote Web Resource", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/ns/dcat#Distribution", "description":"Represents single available resource in the web.", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", "title":"Identifier", "description":"Unique identifier for the distribution from file: scheme", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri", "example":[ @@ -1517,7 +1529,7 @@ "title":"Title", "description":"A name given to the distribution.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "description":{ @@ -1525,7 +1537,7 @@ "title":"Description", "description":"Free-text account of the distribution.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "modified":{ @@ -1533,7 +1545,7 @@ "title":"Date Modified", "description":"Date on which the resource was changed.", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", - "minLength": 1, + "minLength":1, "type":"string", "format":"date-time" }, @@ -1637,4 +1649,4 @@ } } } -} \ No newline at end of file +} diff --git a/src/metax_api/api/rest/base/schemas/datacatalog_schema.json b/src/metax_api/api/rest/base/schemas/datacatalog_schema.json index 67dff7d2..c81ca00a 100644 --- a/src/metax_api/api/rest/base/schemas/datacatalog_schema.json +++ b/src/metax_api/api/rest/base/schemas/datacatalog_schema.json @@ -1,9 +1,8 @@ - { "description":"This is a subset of https://tietomallit.suomi.fi/model/att that defines API to describe Research Data Catalogs in METAX", "@id":"http://uri.suomi.fi/datamodel/ns/mdc#", "title":"Metax Research Data Catalogs", - "modified":"Wed, 30 May 2018 07:39:45 GMT", + "modified":"Wed, 11 Mar 2020 11:53:03 GMT", "$schema":"http://json-schema.org/draft-04/schema#", "type":"object", "allOf":[ @@ -17,12 +16,14 @@ "type":"object", "@id":"http://www.w3.org/ns/dcat#Catalog", "description":"A curated collection of metadata about datasets", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", "title":"Identifier", "description":"Recommended best practice is to identify the resource by means of a string conforming to a formal identification system. \n\nAn unambiguous reference to the resource within a given context.", "@type":"@id", + "minLength":1, "type":"string", "format":"uri" }, @@ -39,6 +40,7 @@ "title":"Description", "description":"A free-text account of the catalog", "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string" }, "modified":{ @@ -46,16 +48,18 @@ "title":"Date Modified", "description":"Date on which the Catalogue was last modified", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", + "minLength":1, "type":"string", - "format": "date-time" + "format":"date-time" }, "issued":{ "@id":"http://purl.org/dc/terms/issued", "title":"Date Issued", "description":"Date of formal issuance (e.g., publication, release date) of the catalogue.", "@type":"http://www.w3.org/2001/XMLSchema#date", + "minLength":1, "type":"string", - "format": "date" + "format":"date" }, "language":{ "@id":"http://purl.org/dc/terms/language", @@ -122,6 +126,7 @@ "title":"Schema name", "description":"Name of the schema used by the catalog records", "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string" }, "logo":{ @@ -129,6 +134,7 @@ "title":"Logo", "description":"Small symbol that represents the organization", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", + "minLength":1, "type":"string", "format":"uri" }, @@ -155,12 +161,14 @@ "type":"object", "@id":"http://www.w3.org/2004/02/skos/core#Concept", "description":"An idea or notion; a unit of thought.", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", "title":"Identifier", "description":"This is the IRI identifier for the concept", "@type":"@id", + "minLength":1, "type":"string", "format":"uri" }, @@ -183,12 +191,14 @@ "type":"object", "@id":"http://xmlns.com/foaf/0.1/Document", "description":"Documented link", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", "title":"URL", "description":"URL of the homepage", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", + "minLength":1, "type":"string", "format":"uri" }, @@ -220,6 +230,7 @@ "type":"object", "@id":"http://purl.org/dc/terms/LinguisticSystem", "description":"Examples include written, spoken, sign, and computer languages.\n\nA system of signs, symbols, sounds, gestures, or rules used in communication.", + "minProperties":1, "properties":{ "title":{ "@id":"http://purl.org/dc/terms/title", @@ -237,6 +248,7 @@ "title":"Identifier", "description":"Language identifier. URI from http://publications.europa.eu/mdr/authority/language/", "@type":"@id", + "minLength":1, "type":"string", "format":"uri" } @@ -251,17 +263,16 @@ "type":"object", "@id":"http://xmlns.com/foaf/0.1/Organization", "description":"Organization that distributes or publishes datasets", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", "title":"Identifier", "description":"An unambiguous reference to the resource within a given context.", "@type":"@id", + "minLength":1, "type":"string", - "format":"uri", - "example":[ - "" - ] + "format":"uri" }, "name":{ "@id":"http://xmlns.com/foaf/0.1/name", @@ -276,6 +287,7 @@ "title":"Email", "description":"Email address.", "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string", "format":"email" }, @@ -318,12 +330,14 @@ "type":"object", "@id":"http://purl.org/dc/terms/LicenseDocument", "description":"A legal document giving official permission to do something with a Resource.", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", "title":"Identifier", "description":"An unambiguous reference to the resource within a given context.", "@type":"@id", + "minLength":1, "type":"string", "format":"uri" }, @@ -348,6 +362,7 @@ "title":"Licence URL", "description":"A referenced license document that applies to this content", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", + "minLength":1, "type":"string", "format":"uri" } @@ -362,6 +377,7 @@ "type":"object", "@id":"http://purl.org/dc/terms/RightsStatement", "description":"A statement about the intellectual property rights (IPR) held in or over a Resource, a legal document giving official permission to do something with a resource, or a statement about access rights.", + "minProperties":1, "properties":{ "description":{ "@id":"http://purl.org/dc/terms/description", @@ -376,8 +392,9 @@ "title":"Date Available", "description":"Date (often a range) that the resource became or will become available.", "@type":"http://www.w3.org/2001/XMLSchema#date", + "minLength":1, "type":"string", - "format": "date" + "format":"date" }, "access_type":{ "@id":"http://purl.org/dc/terms/type", @@ -429,4 +446,4 @@ } } } -} \ No newline at end of file +} diff --git a/src/metax_api/api/rest/base/schemas/file_schema.json b/src/metax_api/api/rest/base/schemas/file_schema.json index 576046ef..c24f398e 100644 --- a/src/metax_api/api/rest/base/schemas/file_schema.json +++ b/src/metax_api/api/rest/base/schemas/file_schema.json @@ -1,8 +1,7 @@ - { "@id":"http://uri.suomi.fi/datamodel/ns/mfs#", "title":"Metax Data Storage Metadata", - "modified":"Thu, 08 Mar 2018 11:51:24 GMT", + "modified":"Wed, 11 Mar 2020 12:01:59 GMT", "$schema":"http://json-schema.org/draft-04/schema#", "type":"object", "anyOf":[ @@ -17,12 +16,14 @@ "type":"object", "@id":"http://spdx.org/rdf/terms#Checksum", "description":"Checksum of the file", + "minProperties":1, "properties":{ "value":{ "@id":"http://spdx.org/rdf/terms#checksumValue", "title":"Checksum value", "description":"Value of the checksum. xsd:hexBinary", "@type":"http://www.w3.org/2001/XMLSchema#hexBinary", + "minLength":1, "type":"string" }, "algorithm":{ @@ -34,6 +35,7 @@ "sha2" ], "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string" }, "checked":{ @@ -41,6 +43,7 @@ "title":"Checksum checked", "description":"Last time the file checksum is validated", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", + "minLength":1, "type":"string", "format":"date-time" } @@ -57,22 +60,22 @@ "type":"object", "@id":"http://uri.suomi.fi/datamodel/ns/mfs#Directory", "description":"Container for folders", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", "title":"Identifier", "description":"Local directory identifier", - "@type":"http://www.w3.org/2001/XMLSchema#string", - "type":"string", - "example":[ - "" - ] + "@type":"@id", + "minLength":1, + "type":"string" }, "directory_name":{ "@id":"http://uri.suomi.fi/datamodel/ns/mfs#fileName", "title":"Directory name", "description":"Directory name", "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string" }, "directory_path":{ @@ -80,16 +83,9 @@ "title":"Directory Path", "description":"Path to the directory", "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string" }, - "directory_uploaded":{ - "@id":"http://uri.suomi.fi/datamodel/ns/mfs#dateUploaded", - "title":"Date uploaded", - "description":"date when data was uploaded", - "@type":"http://www.w3.org/2001/XMLSchema#dateTime", - "type":"string", - "format":"date-time" - }, "directory_modified":{ "@id":"http://purl.org/dc/terms/modified", "title":"Date modified", @@ -98,22 +94,6 @@ "type":"string", "format":"date-time" }, - "directory_frozen":{ - "@id":"http://uri.suomi.fi/datamodel/ns/mfs#datefrozen", - "title":"Date frozen", - "description":"date when the file is frozen", - "@type":"http://www.w3.org/2001/XMLSchema#dateTime", - "type":"string", - "format":"date-time" - }, - "directory_deleted":{ - "@id":"http://uri.suomi.fi/datamodel/ns/mfs#datedeleted", - "title":"Date deleted", - "description":"date when the frozen file was deleted", - "@type":"http://www.w3.org/2001/XMLSchema#dateTime", - "type":"string", - "format":"date-time" - }, "byte_size":{ "@id":"http://www.w3.org/ns/dcat#byteSize", "title":"Directory byte size", @@ -126,6 +106,7 @@ "title":"Project identifier", "description":"Group that is used to restrict the access to the files. In IDA this is the same as Project identifier.", "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string" }, "contains":{ @@ -155,23 +136,21 @@ "type":"object", "$ref":"#/definitions/FileStorage" }, - "number_of_files":{ + "file_count":{ "@id":"http://uri.suomi.fi/datamodel/ns/mfs#numberOfFiles", - "title":"Number of files", + "title":"File count", "description":"Number of files in adirectory", "@type":"http://www.w3.org/2001/XMLSchema#integer", "type":"integer" } }, "required":[ + "file_count", "identifier", "byte_size", "directory_path", - "directory_uploaded", "directory_modified", "file_storage", - "number_of_files", - "directory_frozen", "directory_name", "project_identifier" ], @@ -182,12 +161,14 @@ "type":"object", "@id":"http://www.w3.org/2000/01/rdf-schema#Resource", "description":"The class of File", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", "title":"Identifier", "description":"A persistent identifier (PID) in urn: scheme", - "@type":"http://www.w3.org/2001/XMLSchema#anyURI", + "@type":"@id", + "minLength":1, "type":"string", "format":"uri" }, @@ -196,6 +177,7 @@ "title":"File name", "description":"File name including the extension", "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string" }, "file_path":{ @@ -203,6 +185,7 @@ "title":"File Path", "description":"Path to the item as file: URI", "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string" }, "file_uploaded":{ @@ -210,6 +193,7 @@ "title":"Date uploaded", "description":"date when data was uploaded", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", + "minLength":1, "type":"string", "format":"date-time" }, @@ -218,6 +202,7 @@ "title":"Date modified", "description":"Date on which the resource was changed.", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", + "minLength":1, "type":"string", "format":"date-time" }, @@ -226,6 +211,7 @@ "title":"Date frozen", "description":"date when the file is frozen", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", + "minLength":1, "type":"string", "format":"date-time" }, @@ -234,6 +220,7 @@ "title":"Date deleted", "description":"date when the frozen file was deleted", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", + "minLength":1, "type":"string", "format":"date-time" }, @@ -248,8 +235,9 @@ "file_format":{ "@id":"http://schema.org/fileFormat", "title":"File format", - "description":"File format, must usually IANA mediatype", + "description":"File format, usually IANA mediatype/mimetype automatically detected from the file.", "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string" }, "byte_size":{ @@ -272,6 +260,7 @@ "title":"Project identifier", "description":"Group that is used to restrict the access to the files. In IDA this is the same as Project identifier.", "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string" }, "checksum":{ @@ -317,12 +306,14 @@ "type":"object", "@id":"http://www.loc.gov/premis/rdf/v1#ObjectCharacteristics", "description":"This class is used as extension point to additional file charasteristics", + "minProperties":1, "properties":{ "title":{ "@id":"http://purl.org/dc/terms/title", "title":"Title", "description":"A name given to the file", "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string" }, "description":{ @@ -330,6 +321,7 @@ "title":"Description", "description":"Free-text account of the distribution.", "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string" }, "encoding":{ @@ -337,19 +329,21 @@ "title":"Text encoding", "description":"Used text encoding", "enum":[ - "ISO-8859-15", "UTF-8", "UTF-16", - "UTF-32" + "UTF-32", + "ISO-8859-15" ], "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string" }, "file_format":{ "@id":"http://schema.org/fileFormat", "title":"File format", - "description":"File format, must usually IANA mediatype", + "description":"Format of the file. Usually IANA mediatype but can be custom type defined by the user.", "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string" }, "format_version":{ @@ -364,6 +358,7 @@ "title":"Application name", "description":"A designation for the name of the software program that created the object", "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string", "example":[ "MSWord" @@ -374,6 +369,7 @@ "title":"File Created", "description":"Original creation date of the file", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", + "minLength":1, "type":"string", "format":"date-time" }, @@ -382,6 +378,7 @@ "title":"Metadata Modified", "description":"Date on which the File Charasteristics have been last updated", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", + "minLength":1, "type":"string", "format":"date-time" }, @@ -397,18 +394,21 @@ "title":"CSV delimiter", "description":"", "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string" }, "csv_record_separator":{ "@id":"http://uri.suomi.fi/datamodel/ns/mfs#csv_record_separator", "title":"CSV record separator", "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string" }, "csv_quoting_char":{ "@id":"http://uri.suomi.fi/datamodel/ns/mfs#csv_quoting_char", "title":"CSV quoting char", "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string" }, "csv_has_header":{ @@ -420,17 +420,26 @@ }, "additionalProperties": false }, + "FileCharacteristicsExtension":{ + "title":"File characteristics extension", + "type":"object", + "@id":"http://uri.suomi.fi/datamodel/ns/mfs#FileCharacteristicsExtension", + "description":"Arbitary extension to file characteristics", + "minProperties":1 + }, "FileStorage":{ "title":"File storage", "type":"object", "@id":"http://uri.suomi.fi/datamodel/ns/mfs#FileStorage", "description":"File storage system", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", "title":"Identifier", "description":"Persistent identifier for the storage system", - "@type":"http://www.w3.org/2001/XMLSchema#anyURI", + "@type":"@id", + "minLength":1, "type":"string", "format":"uri" }, @@ -439,6 +448,7 @@ "title":"Title", "description":"A name given to the resource.", "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string" }, "url":{ @@ -446,6 +456,7 @@ "title":"Access url", "description":"URL of the storage system", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", + "minLength":1, "type":"string", "format":"uri" } @@ -465,4 +476,4 @@ } } } -} \ No newline at end of file +} diff --git a/src/metax_api/api/rest/base/schemas/harvester_dataset_schema.json b/src/metax_api/api/rest/base/schemas/harvester_dataset_schema.json index af961e66..d3b2b9a3 100644 --- a/src/metax_api/api/rest/base/schemas/harvester_dataset_schema.json +++ b/src/metax_api/api/rest/base/schemas/harvester_dataset_schema.json @@ -1,8 +1,7 @@ - { "@id":"http://uri.suomi.fi/datamodel/ns/mrd#", "title":"Metax Research Datasets", - "modified":"Fri, 17 Aug 2018 11:50:11 GMT", + "modified":"Wed, 11 Mar 2020 12:47:12 GMT", "$schema":"http://json-schema.org/draft-04/schema#", "type":"object", "allOf":[ @@ -13,10 +12,10 @@ "definitions":{ "Activity":{ "title":"Activity", - "minProperties": 1, "type":"object", "@id":"http://www.w3.org/ns/prov#Activity", "description":"An activity is something that occurs over a period of time and acts upon or with entities; it may include consuming, processing, transforming, modifying, relocating, using, or generating entities.", + "minProperties":1, "properties":{ "title":{ "@id":"http://purl.org/dc/terms/title", @@ -29,7 +28,7 @@ "description":{ "@id":"http://purl.org/dc/terms/description", "title":"Description", - "description":"The description of the activity", + "description":"Generic description of the activity or the event", "@type":"http://www.w3.org/1999/02/22-rdf-syntax-ns#langString", "type":"object", "$ref":"#/definitions/langString" @@ -120,16 +119,16 @@ "CatalogRecord":{ "title":"Catalog Record", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/ns/dcat#CatalogRecord", "description":"A record in a data catalog, describing a single dataset.", + "minProperties":1, "properties":{ "data_catalog":{ "@id":"http://uri.suomi.fi/datamodel/ns/mrd#catalogidentifier", "title":"Research data catalog", "description":"Research Data Catalog identifier or embedded object to data_catalog property in embedded object.", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" }, @@ -138,7 +137,7 @@ "title":"Date Created", "description":"Date of creation of the resource.", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", - "minLength": 1, + "minLength":1, "type":"string", "format":"date-time" }, @@ -147,7 +146,7 @@ "title":"Date Modified", "description":"Date on which the resource was changed.", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", - "minLength": 1, + "minLength":1, "type":"string", "format":"date-time" }, @@ -164,7 +163,7 @@ "title":"Contract", "description":"Contract identifier or embedded contract object", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "preservation_state":{ @@ -173,14 +172,21 @@ "description":"state of dataset in PAS cycle: 0 = Not in PAS, 1 = Proposed for midtterm, 2 = Proposed for long term, 3, =in packaging service, 4 = in dissemination, 5 = in midterm preservation, 6 = in longterm preservation, 7 = Rejected long-term preservation, 8 = Rejected mid-term preservation", "enum":[ "0", - "1", - "2", - "3", - "4", - "5", - "6", - "7", - "8" + "10", + "20", + "30", + "40", + "50", + "60", + "70", + "75", + "80", + "90", + "100", + "110", + "120", + "130", + "140" ], "@type":"http://www.w3.org/2001/XMLSchema#string", "type":"string" @@ -201,7 +207,7 @@ "title":"Preservation system description", "description":"Preservation status described for the user", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "preservation_reason_description":{ @@ -209,7 +215,7 @@ "title":"Preservation reason description", "description":"Reason for the preservation from the user", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "preservation_state_modified":{ @@ -217,7 +223,7 @@ "title":"Preservation status modified", "description":"Date when status was modified", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", - "minLength": 1, + "minLength":1, "type":"string", "format":"date-time" }, @@ -228,10 +234,10 @@ "@type":"http://www.w3.org/2001/XMLSchema#boolean", "type":"boolean" }, - "alternate_record":{ + "alternate_record_set":{ "@id":"http://www.w3.org/ns/prov#alternateOf", - "title":"Alternate record", - "description":"Refers to alternate catalog record in different catalog. This reference is created if two datasets in different catalogs have the same preferred identifier", + "title":"Alternate records", + "description":"Refers to alternate catalog records in different catalogs. This reference is created if two datasets in different catalogs have the same preferred identifier.", "@type":"@id", "type":"array", "items":{ @@ -239,7 +245,7 @@ "$ref":"#/definitions/CatalogRecord" } }, - "next":{ + "next_dataset_version":{ "@id":"http://www.w3.org/ns/adms#next", "title":"Next version", "description":"A link to the next version of the catalog record", @@ -260,7 +266,7 @@ "title":"Identifier", "description":"Catalog record identifier", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" }, @@ -269,9 +275,16 @@ "title":"Preservation identifier", "description":"PAS identifier", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" + }, + "previous_dataset_version":{ + "@id":"http://uri.suomi.fi/datamodel/ns/mrd#previous_dataset_version", + "title":"Previous version", + "@type":"@id", + "type":"object", + "$ref":"#/definitions/CatalogRecord" } }, "required":[ @@ -284,9 +297,9 @@ "Checksum":{ "title":"Checksum", "type":"object", - "minProperties": 1, "@id":"http://spdx.org/rdf/terms#Checksum", "description":"", + "minProperties":1, "properties":{ "algorithm":{ "@id":"http://spdx.org/rdf/terms#algorithm", @@ -302,6 +315,7 @@ "OTHER" ], "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string" }, "checksum_value":{ @@ -309,7 +323,7 @@ "title":"Checksum value", "description":"Value of the checksum. xsd:hexBinary", "@type":"http://www.w3.org/2001/XMLSchema#hexBinary", - "minLength": 1, + "minLength":1, "type":"string" } }, @@ -322,16 +336,16 @@ "Concept":{ "title":"Concept", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/2004/02/skos/core#Concept", "description":"An idea or notion; a unit of thought.", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", "title":"Identifier", "description":"This is the IRI identifier for the concept", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" }, @@ -356,7 +370,7 @@ "title":"In scheme", "description":"Relates a resource (for example a concept) to a concept scheme in which it is included.", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" } @@ -367,30 +381,27 @@ "additionalProperties": false }, "Directory":{ - "title":"Directory in IDA", + "title":"Directory in file storage", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/ns/dcat#Distribution", "description":"Directory that links to its contained resources.", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", "title":"Identifier", "description":"Unique identifier for the distribution", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", - "format":"uri", - "example":[ - "" - ] + "format":"uri" }, "title":{ "@id":"http://purl.org/dc/terms/title", "title":"Title", "description":"A name given to the file.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "description":{ @@ -398,7 +409,7 @@ "title":"Description", "description":"Free-text account of the file.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "use_category":{ @@ -420,16 +431,16 @@ "DistributionCharacteristics":{ "title":"File characteristics", "type":"object", - "minProperties": 1, "@id":"http://www.loc.gov/premis/rdf/v1#ObjectCharasteristics", "description":"Technical properties of files", + "minProperties":1, "properties":{ "title":{ "@id":"http://purl.org/dc/terms/title", "title":"File Type Name", "description":"A name of the file type", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "description":{ @@ -437,7 +448,7 @@ "title":"File Type Description", "description":"Description of the file type", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "application_name":{ @@ -445,7 +456,7 @@ "title":"Application name", "description":"A designation for the name of the software program that created the object", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string", "example":[ "MSWord" @@ -456,7 +467,7 @@ "title":"Text encoding", "description":"Used text encoding", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" } }, @@ -468,16 +479,16 @@ "Document":{ "title":"Documented link", "type":"object", - "minProperties": 1, "@id":"http://xmlns.com/foaf/0.1/Document", "description":"A documented link to a document in the Web.", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", "title":"URL", "description":"URL of the homepage", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" }, @@ -506,9 +517,9 @@ "Editor":{ "title":"Editor", "type":"object", - "minProperties": 1, "@id":"http://uri.suomi.fi/datamodel/ns/mrd#Editor", - "description":"Software or service that is used to modify the catalog record and the dataset", + "description":"Software or service that is used to modify the catalog record and the dataset.", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", @@ -518,6 +529,7 @@ "QVAIN" ], "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string" }, "owner_id":{ @@ -525,7 +537,7 @@ "title":"Owner identifier", "description":"owner of the resource", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "creator_id":{ @@ -533,7 +545,7 @@ "title":"Creator identifier", "description":"creator of the resource", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "record_id":{ @@ -541,7 +553,7 @@ "title":"Record identifier", "description":"local record identifier", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" } }, @@ -550,9 +562,9 @@ "EntityRelation":{ "title":"Entity relation", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/ns/prov#EntityInfluence", "description":"Description of a the dataset influence upon any other kind of entity. In Metax RelationType is used from reference data: https://wiki.eduuni.fi/pages/viewpage.action?spaceKey=CSCMETAX&title=Reference+Data", + "minProperties":1, "properties":{ "entity":{ "@id":"http://www.w3.org/ns/prov#entity", @@ -577,30 +589,27 @@ "additionalProperties": false }, "File":{ - "title":"File in IDA", + "title":"File in file storage", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/ns/dcat#Distribution", - "description":"File in IDA", + "description":"File in file storage", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", "title":"Identifier", "description":"Unique identifier for the distribution", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", - "format":"uri", - "example":[ - "" - ] + "format":"uri" }, "title":{ "@id":"http://purl.org/dc/terms/title", "title":"Title", "description":"A name given to the file.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "description":{ @@ -608,7 +617,7 @@ "title":"Description", "description":"Free-text account of the file.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "file_type":{ @@ -638,9 +647,9 @@ "LinguisticSystem":{ "title":"Linguistic System", "type":"object", - "minProperties": 1, "@id":"http://purl.org/dc/terms/LinguisticSystem", "description":"Examples include written, spoken, sign, and computer languages.\n\nA system of signs, symbols, sounds, gestures, or rules used in communication.", + "minProperties":1, "properties":{ "title":{ "@id":"http://purl.org/dc/terms/title", @@ -655,7 +664,7 @@ "title":"Identifier", "description":"Recommended best practice is to identify the resource by means of a string conforming to a formal identification system. \n\nAn unambiguous reference to the resource within a given context.", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" } @@ -665,16 +674,16 @@ "Location":{ "title":"Location", "type":"object", - "minProperties": 1, "@id":"http://purl.org/dc/terms/Location", "description":"A spatial region or named place.", + "minProperties":1, "properties":{ "geographic_name":{ "@id":"http://www.w3.org/ns/locn#geographicName", "title":"Geographic name", "description":"A geographic name is a proper noun applied to a spatial object. Taking the example used in the relevant INSPIRE data specification (page 18), the following are all valid geographic names for the Greek capital:\n- Αθήνα (the Greek endonym written in the Greek script)\n- Athína (the standard Romanisation of the endonym)\n- Athens (the English language exonym)\nFor INSPIRE-conformant data, provide the metadata for the geographic name using a skos:Concept as a datatype.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "alt":{ @@ -682,7 +691,7 @@ "title":"Altitude", "description":"The WGS84 altitude of a SpatialThing (decimal meters \nabove the local reference ellipsoid).", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "full_address":{ @@ -690,7 +699,7 @@ "title":"Full address", "description":"The complete address written as a string, with or without formatting.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "as_wkt":{ @@ -703,7 +712,6 @@ "POLYGON((-122.358 47.653, -122.348 47.649, -122.348 47.658, -122.358 47.658, -122.358 47.653))" ], "items":{ - "minLength": 1, "type":"string" } }, @@ -721,9 +729,9 @@ "Organization":{ "title":"Organization", "type":"object", - "minProperties": 1, "@id":"http://xmlns.com/foaf/0.1/Organization", "description":"An organization.", + "minProperties":1, "properties":{ "@type":{ "type":"string", @@ -734,7 +742,7 @@ "title":"Identifier", "description":"An unambiguous reference to the resource within a given context.", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri", "example":[ @@ -805,15 +813,15 @@ "PeriodOfTime":{ "title":"Period of Time", "type":"object", - "minProperties": 1, "@id":"http://purl.org/dc/terms/PeriodOfTime", "description":"An interval of time that is named or defined by its start and end dates.", + "minProperties":1, "properties":{ "start_date":{ "@id":"http://schema.org/startDate", "title":"Start of the pediod", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", - "minLength": 1, + "minLength":1, "type":"string", "format":"date-time" }, @@ -821,7 +829,7 @@ "@id":"http://schema.org/endDate", "title":"End of the period", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", - "minLength": 1, + "minLength":1, "type":"string", "format":"date-time" }, @@ -830,7 +838,7 @@ "title":"Temporal coverage", "description":"Period of time expressed as ISO 8601 compliant string", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" } }, @@ -839,9 +847,9 @@ "Person":{ "title":"Person", "type":"object", - "minProperties": 1, "@id":"http://xmlns.com/foaf/0.1/Person", "description":"A person.", + "minProperties":1, "properties":{ "@type":{ "type":"string", @@ -852,7 +860,7 @@ "title":"Identifier", "description":"An unambiguous reference to the resource within a given context.", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri", "example":[ @@ -864,7 +872,7 @@ "title":"Name", "description":"This property contains a name of the agent. This property can be repeated for different versions of the name (e.g. the name in different languages)", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "email":{ @@ -934,9 +942,9 @@ "Project":{ "title":"Project", "type":"object", - "minProperties": 1, "@id":"http://xmlns.com/foaf/0.1/Project", "description":"A project (a collective endeavour of some kind).", + "minProperties":1, "properties":{ "name":{ "@id":"http://xmlns.com/foaf/0.1/name", @@ -951,7 +959,7 @@ "title":"Identifier", "description":"Recommended best practice is to identify the resource by means of a string conforming to a formal identification system. An unambiguous reference to the resource within a given context.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "has_funder_identifier":{ @@ -959,7 +967,7 @@ "title":"Project funding identifier", "description":"Unique identifier for the project that is being used by the project funder", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "homepage":{ @@ -1009,9 +1017,9 @@ "RelatedEntity":{ "title":"Related Entity", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/ns/prov#Entity", "description":"Related entity", + "minProperties":1, "properties":{ "title":{ "@id":"http://purl.org/dc/terms/title", @@ -1034,7 +1042,7 @@ "title":"Identifier", "description":"Recommended best practice is to identify the resource by means of a string conforming to a formal identification system. An unambiguous reference to the resource within a given context.", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" }, @@ -1076,7 +1084,7 @@ "title":"Identifier", "description":"An unambiguous reference to the resource within a given context.", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri", "example":[ @@ -1088,7 +1096,7 @@ "title":"Name", "description":"This property contains a name of the agent. This property can be repeated for different versions of the name (e.g. the name in different languages)", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "email":{ @@ -1150,16 +1158,16 @@ "ResearchDataLicenseDocument":{ "title":"License Document", "type":"object", - "minProperties": 1, "@id":"http://purl.org/dc/terms/LicenseDocument", "description":"A legal document giving official permission to do something with a Resource.", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", "title":"Identifier", "description":"License identifier from the reference data.", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" }, @@ -1184,7 +1192,7 @@ "title":"Licence URL", "description":"A referenced license document that applies to this content", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" } @@ -1197,16 +1205,16 @@ "ResearchDataset":{ "title":"Research Dataset", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/ns/dcat#Dataset", "description":"A collection of data, published or curated by a single source, and available for access or download in one or more formats", + "minProperties":1, "properties":{ "metadata_version_identifier":{ "@id":"http://uri.suomi.fi/datamodel/ns/mrd#datasetidentifier", "title":"Metadata version identifier", "description":"Dataset metadata version identifier in form of http://urn.fi{URN}:version:{number}", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri", "readOnly": true @@ -1216,7 +1224,7 @@ "title":"Preferred identifier", "description":"Unique identifier for the dataset. URN or URI.", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" }, @@ -1225,7 +1233,7 @@ "title":"Dataset modification date", "description":"Last known time when a research dataset or metadata about the research dataset has been significantly modified. This field is usually modified by the user or harvested from other source.", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", - "minLength": 1, + "minLength":1, "type":"string", "format":"date-time" }, @@ -1234,7 +1242,7 @@ "title":"Version", "description":"This property contains a version number or other version designation of the Dataset.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "version_notes":{ @@ -1253,9 +1261,9 @@ "title":"Release date", "description":"Date of formal issuance (e.g., publication) of the resource.", "@type":"http://www.w3.org/2001/XMLSchema#date", - "minLength": 1, + "minLength":1, "type":"string", - "format": "date" + "format":"date" }, "title":{ "@id":"http://purl.org/dc/terms/title", @@ -1298,7 +1306,7 @@ "title":"Preferred Bibliographic Citation", "description":"Preferred bibliographic reference for the resource.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "other_identifier":{ @@ -1514,8 +1522,8 @@ } }, "total_files_byte_size":{ - "@id":"http://uri.suomi.fi/datamodel/ns/mrd#totalidabytesize", - "title":"Total ida byte size", + "@id":"http://uri.suomi.fi/datamodel/ns/mrd#totalfilesbytesize", + "title":"Total files byte size", "description":"Byte size sum of all Fairdata managed files. Read only field.", "@type":"http://www.w3.org/2001/XMLSchema#integer", "type":"integer", @@ -1540,9 +1548,9 @@ "RightsStatement":{ "title":"Rights Statement", "type":"object", - "minProperties": 1, "@id":"http://purl.org/dc/terms/RightsStatement", "description":"A statement about the intellectual property rights (IPR) held in or over a Resource, a legal document giving official permission to do something with a resource, or a statement about access rights.", + "minProperties":1, "properties":{ "description":{ "@id":"http://purl.org/dc/terms/description", @@ -1557,9 +1565,9 @@ "title":"Date Available", "description":"Date (often a range) that the resource became or will become available.", "@type":"http://www.w3.org/2001/XMLSchema#date", - "minLength": 1, + "minLength":1, "type":"string", - "format": "date" + "format":"date" }, "access_type":{ "@id":"http://purl.org/dc/terms/type", @@ -1620,16 +1628,16 @@ "StructuredIdentifier":{ "title":"Identifier", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/ns/adms#Identifier", "description":"Structured identifier", + "minProperties":1, "properties":{ "notation":{ "@id":"http://www.w3.org/2004/02/skos/core#notation", "title":"Identifier value", "description":"Literal value of the identifier", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "local_identifier_type":{ @@ -1637,7 +1645,7 @@ "title":"Local identifier type", "description":"Local identifier type defines use of the identifier in given context.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "provider":{ @@ -1665,9 +1673,9 @@ "Variable":{ "title":"Variable", "type":"object", - "minProperties": 1, "@id":"http://rdf-vocabulary.ddialliance.org/discovery#Variable", "description":"", + "minProperties":1, "properties":{ "pref_label":{ "@id":"http://www.w3.org/2004/02/skos/core#prefLabel", @@ -1706,7 +1714,7 @@ "title":"Representation", "description":"Defines reference to a Concept Scheme that includes the possible values of a variable.", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" } @@ -1719,16 +1727,16 @@ "WebResource":{ "title":"Remote Web Resource", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/ns/dcat#Distribution", "description":"Represents single available resource in the web.", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", "title":"Identifier", "description":"Unique identifier for the distribution from file: scheme", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri", "example":[ @@ -1740,7 +1748,7 @@ "title":"Title", "description":"A name given to the distribution.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "description":{ @@ -1748,7 +1756,7 @@ "title":"Description", "description":"Free-text account of the distribution.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "modified":{ @@ -1756,7 +1764,7 @@ "title":"Date Modified", "description":"Date on which the resource was changed.", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", - "minLength": 1, + "minLength":1, "type":"string", "format":"date-time" }, diff --git a/src/metax_api/api/rest/base/schemas/ida_dataset_schema.json b/src/metax_api/api/rest/base/schemas/ida_dataset_schema.json index 649ee31a..c67d344e 100644 --- a/src/metax_api/api/rest/base/schemas/ida_dataset_schema.json +++ b/src/metax_api/api/rest/base/schemas/ida_dataset_schema.json @@ -1,8 +1,7 @@ - { "@id":"http://uri.suomi.fi/datamodel/ns/mrd#", "title":"Metax Research Datasets", - "modified":"Fri, 17 Aug 2018 11:50:11 GMT", + "modified":"Wed, 11 Mar 2020 12:36:38 GMT", "$schema":"http://json-schema.org/draft-04/schema#", "type":"object", "allOf":[ @@ -14,9 +13,9 @@ "Activity":{ "title":"Activity", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/ns/prov#Activity", "description":"An activity is something that occurs over a period of time and acts upon or with entities; it may include consuming, processing, transforming, modifying, relocating, using, or generating entities.", + "minProperties":1, "properties":{ "title":{ "@id":"http://purl.org/dc/terms/title", @@ -29,7 +28,7 @@ "description":{ "@id":"http://purl.org/dc/terms/description", "title":"Description", - "description":"The description of the activity", + "description":"Generic description of the activity or the event", "@type":"http://www.w3.org/1999/02/22-rdf-syntax-ns#langString", "type":"object", "$ref":"#/definitions/langString" @@ -122,13 +121,14 @@ "type":"object", "@id":"http://www.w3.org/ns/dcat#CatalogRecord", "description":"A record in a data catalog, describing a single dataset.", + "minProperties":1, "properties":{ "data_catalog":{ "@id":"http://uri.suomi.fi/datamodel/ns/mrd#catalogidentifier", "title":"Research data catalog", "description":"Research Data Catalog identifier or embedded object to data_catalog property in embedded object.", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" }, @@ -137,7 +137,7 @@ "title":"Date Created", "description":"Date of creation of the resource.", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", - "minLength": 1, + "minLength":1, "type":"string", "format":"date-time" }, @@ -146,7 +146,7 @@ "title":"Date Modified", "description":"Date on which the resource was changed.", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", - "minLength": 1, + "minLength":1, "type":"string", "format":"date-time" }, @@ -163,7 +163,7 @@ "title":"Contract", "description":"Contract identifier or embedded contract object", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "preservation_state":{ @@ -172,14 +172,21 @@ "description":"state of dataset in PAS cycle: 0 = Not in PAS, 1 = Proposed for midtterm, 2 = Proposed for long term, 3, =in packaging service, 4 = in dissemination, 5 = in midterm preservation, 6 = in longterm preservation, 7 = Rejected long-term preservation, 8 = Rejected mid-term preservation", "enum":[ "0", - "1", - "2", - "3", - "4", - "5", - "6", - "7", - "8" + "10", + "20", + "30", + "40", + "50", + "60", + "70", + "75", + "80", + "90", + "100", + "110", + "120", + "130", + "140" ], "@type":"http://www.w3.org/2001/XMLSchema#string", "type":"string" @@ -191,9 +198,7 @@ "@type":"http://www.w3.org/2001/XMLSchema#string", "type":"array", "items":{ - "minLength": 1, - "type":"string", - "minLength": 1 + "type":"string" } }, "preservation_description":{ @@ -201,7 +206,7 @@ "title":"Preservation system description", "description":"Preservation status described for the user", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "preservation_reason_description":{ @@ -209,7 +214,7 @@ "title":"Preservation reason description", "description":"Reason for the preservation from the user", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "preservation_state_modified":{ @@ -217,7 +222,7 @@ "title":"Preservation status modified", "description":"Date when status was modified", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", - "minLength": 1, + "minLength":1, "type":"string", "format":"date-time" }, @@ -228,10 +233,10 @@ "@type":"http://www.w3.org/2001/XMLSchema#boolean", "type":"boolean" }, - "alternate_record":{ + "alternate_record_set":{ "@id":"http://www.w3.org/ns/prov#alternateOf", - "title":"Alternate record", - "description":"Refers to alternate catalog record in different catalog. This reference is created if two datasets in different catalogs have the same preferred identifier", + "title":"Alternate records", + "description":"Refers to alternate catalog records in different catalogs. This reference is created if two datasets in different catalogs have the same preferred identifier.", "@type":"@id", "type":"array", "items":{ @@ -239,7 +244,7 @@ "$ref":"#/definitions/CatalogRecord" } }, - "next":{ + "next_dataset_version":{ "@id":"http://www.w3.org/ns/adms#next", "title":"Next version", "description":"A link to the next version of the catalog record", @@ -260,7 +265,7 @@ "title":"Identifier", "description":"Catalog record identifier", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" }, @@ -269,9 +274,16 @@ "title":"Preservation identifier", "description":"PAS identifier", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" + }, + "previous_dataset_version":{ + "@id":"http://uri.suomi.fi/datamodel/ns/mrd#previous_dataset_version", + "title":"Previous version", + "@type":"@id", + "type":"object", + "$ref":"#/definitions/CatalogRecord" } }, "required":[ @@ -284,16 +296,16 @@ "Concept":{ "title":"Concept", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/2004/02/skos/core#Concept", "description":"An idea or notion; a unit of thought.", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", "title":"Identifier", "description":"This is the IRI identifier for the concept", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" }, @@ -318,7 +330,7 @@ "title":"In scheme", "description":"Relates a resource (for example a concept) to a concept scheme in which it is included.", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" } @@ -329,23 +341,20 @@ "additionalProperties": false }, "Directory":{ - "title":"Directory in IDA", + "title":"Directory in file storage", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/ns/dcat#Distribution", "description":"Directory that links to its contained resources.", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", "title":"Identifier", "description":"Unique identifier for the distribution", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", - "format":"uri", - "example":[ - "" - ] + "format":"uri" }, "title":{ "@id":"http://purl.org/dc/terms/title", @@ -360,7 +369,7 @@ "title":"Description", "description":"Free-text account of the file.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "use_category":{ @@ -382,16 +391,16 @@ "Document":{ "title":"Documented link", "type":"object", - "minProperties": 1, "@id":"http://xmlns.com/foaf/0.1/Document", "description":"A documented link to a document in the Web.", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", "title":"URL", "description":"URL of the homepage", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" }, @@ -414,15 +423,14 @@ }, "required":[ "identifier" - ], - "additionalProperties": false + ] }, "Editor":{ "title":"Editor", "type":"object", - "minProperties": 1, "@id":"http://uri.suomi.fi/datamodel/ns/mrd#Editor", - "description":"Software or service that is used to modify the catalog record and the dataset", + "description":"Software or service that is used to modify the catalog record and the dataset.", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", @@ -432,6 +440,7 @@ "QVAIN" ], "@type":"http://www.w3.org/2001/XMLSchema#string", + "minLength":1, "type":"string" }, "owner_id":{ @@ -439,7 +448,7 @@ "title":"Owner identifier", "description":"owner of the resource", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "creator_id":{ @@ -447,7 +456,7 @@ "title":"Creator identifier", "description":"creator of the resource", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "record_id":{ @@ -455,7 +464,7 @@ "title":"Record identifier", "description":"local record identifier", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" } }, @@ -464,9 +473,9 @@ "EntityRelation":{ "title":"Entity relation", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/ns/prov#EntityInfluence", "description":"Description of a the dataset influence upon any other kind of entity. In Metax RelationType is used from reference data: https://wiki.eduuni.fi/pages/viewpage.action?spaceKey=CSCMETAX&title=Reference+Data", + "minProperties":1, "properties":{ "entity":{ "@id":"http://www.w3.org/ns/prov#entity", @@ -491,30 +500,27 @@ "additionalProperties": false }, "File":{ - "title":"File in IDA", + "title":"File in file storage", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/ns/dcat#Distribution", - "description":"File in IDA", + "description":"File in file storage", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", "title":"Identifier", "description":"Unique identifier for the distribution", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", - "format":"uri", - "example":[ - "" - ] + "format":"uri" }, "title":{ "@id":"http://purl.org/dc/terms/title", "title":"Title", "description":"A name given to the file.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "description":{ @@ -522,7 +528,7 @@ "title":"Description", "description":"Free-text account of the file.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "file_type":{ @@ -552,9 +558,9 @@ "LinguisticSystem":{ "title":"Linguistic System", "type":"object", - "minProperties": 1, "@id":"http://purl.org/dc/terms/LinguisticSystem", "description":"Examples include written, spoken, sign, and computer languages.\n\nA system of signs, symbols, sounds, gestures, or rules used in communication.", + "minProperties":1, "properties":{ "title":{ "@id":"http://purl.org/dc/terms/title", @@ -579,16 +585,16 @@ "Location":{ "title":"Location", "type":"object", - "minProperties": 1, "@id":"http://purl.org/dc/terms/Location", "description":"A spatial region or named place.", + "minProperties":1, "properties":{ "geographic_name":{ "@id":"http://www.w3.org/ns/locn#geographicName", "title":"Geographic name", "description":"A geographic name is a proper noun applied to a spatial object. Taking the example used in the relevant INSPIRE data specification (page 18), the following are all valid geographic names for the Greek capital:\n- Αθήνα (the Greek endonym written in the Greek script)\n- Athína (the standard Romanisation of the endonym)\n- Athens (the English language exonym)\nFor INSPIRE-conformant data, provide the metadata for the geographic name using a skos:Concept as a datatype.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "alt":{ @@ -596,7 +602,7 @@ "title":"Altitude", "description":"The WGS84 altitude of a SpatialThing (decimal meters \nabove the local reference ellipsoid).", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "full_address":{ @@ -604,7 +610,7 @@ "title":"Full address", "description":"The complete address written as a string, with or without formatting.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "as_wkt":{ @@ -635,9 +641,9 @@ "Organization":{ "title":"Organization", "type":"object", - "minProperties": 1, "@id":"http://xmlns.com/foaf/0.1/Organization", "description":"An organization.", + "minProperties":1, "properties":{ "@type":{ "type":"string", @@ -648,7 +654,7 @@ "title":"Identifier", "description":"An unambiguous reference to the resource within a given context.", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri", "example":[ @@ -718,15 +724,15 @@ "PeriodOfTime":{ "title":"Period of Time", "type":"object", - "minProperties": 1, "@id":"http://purl.org/dc/terms/PeriodOfTime", "description":"An interval of time that is named or defined by its start and end dates.", + "minProperties":1, "properties":{ "start_date":{ "@id":"http://schema.org/startDate", "title":"Start of the pediod", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", - "minLength": 1, + "minLength":1, "type":"string", "format":"date-time" }, @@ -734,7 +740,7 @@ "@id":"http://schema.org/endDate", "title":"End of the period", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", - "minLength": 1, + "minLength":1, "type":"string", "format":"date-time" }, @@ -743,18 +749,17 @@ "title":"Temporal coverage", "description":"Period of time expressed as ISO 8601 compliant string", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" } - }, - "additionalProperties": false + } }, "Person":{ "title":"Person", "type":"object", - "minProperties": 1, "@id":"http://xmlns.com/foaf/0.1/Person", "description":"A person.", + "minProperties":1, "properties":{ "@type":{ "type":"string", @@ -765,7 +770,7 @@ "title":"Identifier", "description":"An unambiguous reference to the resource within a given context.", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri", "example":[ @@ -777,7 +782,7 @@ "title":"Name", "description":"This property contains a name of the agent. This property can be repeated for different versions of the name (e.g. the name in different languages)", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "email":{ @@ -785,9 +790,8 @@ "title":"Email", "description":"Email address.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, - "type":"string", - "format":"email" + "minLength":1, + "type":"string" }, "telephone":{ "@id":"http://schema.org/telephone", @@ -796,7 +800,6 @@ "@type":"http://www.w3.org/2001/XMLSchema#string", "type":"array", "items":{ - "minLength": 1, "type":"string" } }, @@ -848,9 +851,9 @@ "Project":{ "title":"Project", "type":"object", - "minProperties": 1, "@id":"http://xmlns.com/foaf/0.1/Project", "description":"A project (a collective endeavour of some kind).", + "minProperties":1, "properties":{ "name":{ "@id":"http://xmlns.com/foaf/0.1/name", @@ -865,7 +868,7 @@ "title":"Identifier", "description":"Recommended best practice is to identify the resource by means of a string conforming to a formal identification system. An unambiguous reference to the resource within a given context.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "has_funder_identifier":{ @@ -873,7 +876,7 @@ "title":"Project funding identifier", "description":"Unique identifier for the project that is being used by the project funder", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "homepage":{ @@ -923,9 +926,9 @@ "RelatedEntity":{ "title":"Related Entity", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/ns/prov#Entity", "description":"Related entity", + "minProperties":1, "properties":{ "title":{ "@id":"http://purl.org/dc/terms/title", @@ -948,7 +951,7 @@ "title":"Identifier", "description":"Recommended best practice is to identify the resource by means of a string conforming to a formal identification system. An unambiguous reference to the resource within a given context.", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" }, @@ -976,16 +979,16 @@ "ResearchDataLicenseDocument":{ "title":"License Document", "type":"object", - "minProperties": 1, "@id":"http://purl.org/dc/terms/LicenseDocument", "description":"A legal document giving official permission to do something with a Resource.", + "minProperties":1, "properties":{ "identifier":{ "@id":"http://purl.org/dc/terms/identifier", "title":"Identifier", "description":"License identifier from the reference data.", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" }, @@ -1010,7 +1013,7 @@ "title":"Licence URL", "description":"A referenced license document that applies to this content", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" } @@ -1026,16 +1029,16 @@ "ResearchDataset":{ "title":"Research Dataset", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/ns/dcat#Dataset", "description":"A collection of data, published or curated by a single source, and available for access or download in one or more formats", + "minProperties":1, "properties":{ "metadata_version_identifier":{ "@id":"http://uri.suomi.fi/datamodel/ns/mrd#datasetidentifier", "title":"Metadata version identifier", "description":"Dataset metadata version identifier in form of http://urn.fi{URN}:version:{number}", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri", "readOnly": true @@ -1045,17 +1048,16 @@ "title":"Preferred identifier", "description":"Unique identifier for the dataset. URN or URI.", "@type":"@id", - "minLength": 1, + "minLength":1, "type":"string", - "format":"uri", - "readOnly": true + "format":"uri" }, "modified":{ "@id":"http://purl.org/dc/terms/modified", "title":"Dataset modification date", "description":"Last known time when a research dataset or metadata about the research dataset has been significantly modified. This field is usually modified by the user or harvested from other source.", "@type":"http://www.w3.org/2001/XMLSchema#dateTime", - "minLength": 1, + "minLength":1, "type":"string", "format":"date-time" }, @@ -1064,7 +1066,7 @@ "title":"Version", "description":"This property contains a version number or other version designation of the Dataset.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "version_notes":{ @@ -1083,9 +1085,9 @@ "title":"Release date", "description":"Date of formal issuance (e.g., publication) of the resource.", "@type":"http://www.w3.org/2001/XMLSchema#date", - "minLength": 1, + "minLength":1, "type":"string", - "format": "date" + "format":"date" }, "title":{ "@id":"http://purl.org/dc/terms/title", @@ -1128,7 +1130,7 @@ "title":"Preferred Bibliographic Citation", "description":"Preferred bibliographic reference for the resource.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "other_identifier":{ @@ -1333,8 +1335,8 @@ } }, "total_files_byte_size":{ - "@id":"http://uri.suomi.fi/datamodel/ns/mrd#totalidabytesize", - "title":"Total ida byte size", + "@id":"http://uri.suomi.fi/datamodel/ns/mrd#totalfilesbytesize", + "title":"Total files byte size", "description":"Byte size sum of all Fairdata managed files. Read only field.", "@type":"http://www.w3.org/2001/XMLSchema#integer", "type":"integer", @@ -1352,9 +1354,9 @@ "RightsStatement":{ "title":"Rights Statement", "type":"object", - "minProperties": 1, "@id":"http://purl.org/dc/terms/RightsStatement", "description":"A statement about the intellectual property rights (IPR) held in or over a Resource, a legal document giving official permission to do something with a resource, or a statement about access rights.", + "minProperties":1, "properties":{ "description":{ "@id":"http://purl.org/dc/terms/description", @@ -1369,9 +1371,9 @@ "title":"Date Available", "description":"Date (often a range) that the resource became or will become available.", "@type":"http://www.w3.org/2001/XMLSchema#date", - "minLength": 1, + "minLength":1, "type":"string", - "format": "date" + "format":"date" }, "access_type":{ "@id":"http://purl.org/dc/terms/type", @@ -1432,16 +1434,16 @@ "StructuredIdentifier":{ "title":"Identifier", "type":"object", - "minProperties": 1, "@id":"http://www.w3.org/ns/adms#Identifier", "description":"Structured identifier", + "minProperties":1, "properties":{ "notation":{ "@id":"http://www.w3.org/2004/02/skos/core#notation", "title":"Identifier value", "description":"Literal value of the identifier", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "local_identifier_type":{ @@ -1449,7 +1451,7 @@ "title":"Local identifier type", "description":"Local identifier type defines use of the identifier in given context.", "@type":"http://www.w3.org/2001/XMLSchema#string", - "minLength": 1, + "minLength":1, "type":"string" }, "provider":{ @@ -1477,9 +1479,9 @@ "Variable":{ "title":"Variable", "type":"object", - "minProperties": 1, "@id":"http://rdf-vocabulary.ddialliance.org/discovery#Variable", "description":"", + "minProperties":1, "properties":{ "pref_label":{ "@id":"http://www.w3.org/2004/02/skos/core#prefLabel", @@ -1518,7 +1520,7 @@ "title":"Representation", "description":"Defines reference to a Concept Scheme that includes the possible values of a variable.", "@type":"http://www.w3.org/2001/XMLSchema#anyURI", - "minLength": 1, + "minLength":1, "type":"string", "format":"uri" } diff --git a/src/metax_api/api/rest/base/serializers/catalog_record_serializer.py b/src/metax_api/api/rest/base/serializers/catalog_record_serializer.py index 5886c805..ae5f379d 100644 --- a/src/metax_api/api/rest/base/serializers/catalog_record_serializer.py +++ b/src/metax_api/api/rest/base/serializers/catalog_record_serializer.py @@ -13,7 +13,7 @@ from metax_api.exceptions import Http403 from metax_api.models import CatalogRecord, DataCatalog, Directory, Contract, Common, File -from metax_api.services import CatalogRecordService as CRS, CommonService +from metax_api.services import CatalogRecordService as CRS, CommonService, DataCatalogService from .common_serializer import CommonSerializer from .contract_serializer import ContractSerializer from .data_catalog_serializer import DataCatalogSerializer @@ -81,7 +81,8 @@ class Meta: 'cumulative_state', 'date_cumulation_started', 'date_cumulation_ended', - 'date_last_cumulative_addition' + 'date_last_cumulative_addition', + 'rems_identifier' ) + CommonSerializer.Meta.fields extra_kwargs = { @@ -122,6 +123,7 @@ def is_valid(self, raise_exception=False): self.initial_data.pop('preservation_identifier', None) self.initial_data.pop('preservation_dataset_version', None) self.initial_data.pop('preservation_dataset_origin_version', None) + self.initial_data.pop('rems_identifier', None) if self._data_catalog_is_changed(): # updating data catalog, but not necessarily research_dataset. @@ -150,7 +152,7 @@ def update(self, instance, validated_data): def create(self, validated_data): if self._migration_override_requested(): - # any custom stuff before create that my be necessary for migration purposes + # any custom stuff before create that might be necessary for migration purposes pid = '' if validated_data['research_dataset'].get('preferred_identifier', False): # store pid, since it will be overwritten during create otherwise @@ -161,7 +163,6 @@ def create(self, validated_data): if self._migration_override_requested(): # any custom stuff after create that my be necessary for migration purposes - if pid: # save original pid provided by the requestor res.research_dataset['preferred_identifier'] = pid @@ -299,6 +300,9 @@ def _check_and_strip_sensitive_fields(self, instance, res): if not instance.user_is_privileged(self.context['request']): res['research_dataset'] = CRS.check_and_remove_metadata_based_on_access_type( CRS.remove_contact_info_metadata(res['research_dataset'])) + + res.pop('rems_identifier', None) + return res def _populate_dir_titles(self, ds): @@ -373,6 +377,10 @@ def _validate_json_schema(self, value): if self._operation_is_create: if not value.get('preferred_identifier', None): + if DataCatalogService.is_harvested(self.initial_data['data_catalog']): + raise ValidationError({ 'preferred_identifier': + ['harvested catalog record must have preferred identifier']}) + # normally not present, but may be set by harvesters. if missing, # use temporary value and remove after schema validation. value['preferred_identifier'] = 'temp' @@ -557,7 +565,7 @@ def _data_catalog_is_changed(self): elif isinstance(dc, dict): return dc['identifier'] != self.instance.catalog_json['identifier'] else: # pragma: no cover - raise ValidationError({ 'detail': ['cant figure out the type of data_catalog'] }) + raise ValidationError({ 'detail': ['can not figure out the type of data_catalog'] }) def _preferred_identifier_is_changed(self): """ diff --git a/src/metax_api/api/rest/base/serializers/data_catalog_serializer.py b/src/metax_api/api/rest/base/serializers/data_catalog_serializer.py index 660000aa..20a07088 100644 --- a/src/metax_api/api/rest/base/serializers/data_catalog_serializer.py +++ b/src/metax_api/api/rest/base/serializers/data_catalog_serializer.py @@ -24,6 +24,8 @@ class Meta: 'catalog_json', 'catalog_record_group_edit', 'catalog_record_group_create', + 'catalog_record_services_edit', + 'catalog_record_services_create', ) + CommonSerializer.Meta.fields extra_kwargs = CommonSerializer.Meta.extra_kwargs diff --git a/src/metax_api/api/rest/base/serializers/file_serializer.py b/src/metax_api/api/rest/base/serializers/file_serializer.py index d866e933..669498ee 100644 --- a/src/metax_api/api/rest/base/serializers/file_serializer.py +++ b/src/metax_api/api/rest/base/serializers/file_serializer.py @@ -10,6 +10,7 @@ from rest_framework import serializers from rest_framework.serializers import ValidationError from rest_framework.validators import UniqueValidator +from django.conf import settings from metax_api.models import Directory, File, FileStorage from metax_api.services import FileService as FS @@ -31,7 +32,7 @@ 'service_modified', '__request' ] - +CHECKSUM_ALGORITHMS = settings.CHECKSUM_ALGORITHMS class FileSerializer(CommonSerializer): @@ -160,6 +161,17 @@ def validate_file_path(self, value): return value + def validate_checksum_algorithm(self, value): + """ + Ensure that used checksum_algorithm is one of the configured in settings file. + """ + + if value not in CHECKSUM_ALGORITHMS: + raise ValidationError('file checksum_algorithm should be one of {}, now {}' + .format(CHECKSUM_ALGORITHMS, value)) + + return value + def _end_user_update_validations(self, validated_data): """ Enforce some rules related to end users when updating files. diff --git a/src/metax_api/api/rest/base/views/common_view.py b/src/metax_api/api/rest/base/views/common_view.py index 0801573e..fcdabfa6 100644 --- a/src/metax_api/api/rest/base/views/common_view.py +++ b/src/metax_api/api/rest/base/views/common_view.py @@ -129,12 +129,6 @@ def paginate_queryset(self, queryset): if CS.get_boolean_query_param(self.request, 'no_pagination'): return None - if self.request.query_params.get('ordering'): - # for some reason ordering is not taken into account when using pagination. - # ensure queryset is ordered. - ordering = self.request.query_params.get('ordering').split(',') - queryset.order_by(*ordering) - return super(CommonViewSet, self).paginate_queryset(queryset) def get_queryset(self): diff --git a/src/metax_api/api/rest/base/views/file_view.py b/src/metax_api/api/rest/base/views/file_view.py index 2ff43937..9ff2c130 100644 --- a/src/metax_api/api/rest/base/views/file_view.py +++ b/src/metax_api/api/rest/base/views/file_view.py @@ -149,6 +149,10 @@ def datasets(self, request): parameters in GET. Also, some clients forcibly shove parameters in body in GET requests to query parameters, so using POST instead is more guaranteed to work. """ + + if CommonService.get_boolean_query_param(request, 'detailed'): + return FileService.get_detailed_datasets_where_file_belongs_to(request.data) + return FileService.get_datasets_where_file_belongs_to(request.data) @list_route(methods=['post'], url_path="restore") diff --git a/src/metax_api/middleware/identifyapicaller.py b/src/metax_api/middleware/identifyapicaller.py index 1116e74b..abc04ba5 100644 --- a/src/metax_api/middleware/identifyapicaller.py +++ b/src/metax_api/middleware/identifyapicaller.py @@ -103,6 +103,7 @@ def _identify_api_caller(self, request): Valid service users and authentication methods are listed in app_config. """ + http_auth_header = request.META.get('HTTP_AUTHORIZATION', None) if not http_auth_header: @@ -184,11 +185,7 @@ def _auth_bearer(self, request, auth_b64): _logger.exception('Failed to extract token from id_token string') raise Http403 - # todo temporary. eventually only CSCUsername will be supported - # (or make field configurable, but for now support both) - if token.get('sub', '').endswith('@fairdataid'): - request.user.username = token['sub'] - elif len(token.get('CSCUserName', '')) > 0: + if len(token.get('CSCUserName', '')) > 0: request.user.username = token['CSCUserName'] else: _logger.warning('id_token does not contain valid user id: fairdataid or cscusername') diff --git a/src/metax_api/migrations/0015_auto_20200218_1607.py b/src/metax_api/migrations/0015_auto_20200218_1607.py new file mode 100644 index 00000000..fdeda0d5 --- /dev/null +++ b/src/metax_api/migrations/0015_auto_20200218_1607.py @@ -0,0 +1,23 @@ +# Generated by Django 2.2.9 on 2020-02-18 14:07 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('metax_api', '0014_catalogrecord_state'), + ] + + operations = [ + migrations.AddField( + model_name='datacatalog', + name='catalog_record_services_create', + field=models.CharField(blank=True, help_text='Services which are allowed to edit catalog records in the catalog.', max_length=200, null=True), + ), + migrations.AddField( + model_name='datacatalog', + name='catalog_record_services_edit', + field=models.CharField(blank=True, help_text='Services which are allowed to edit catalog records in the catalog.', max_length=200, null=True), + ), + ] diff --git a/src/metax_api/migrations/0016_auto_20200310_1341.py b/src/metax_api/migrations/0016_auto_20200310_1341.py new file mode 100644 index 00000000..a3449f62 --- /dev/null +++ b/src/metax_api/migrations/0016_auto_20200310_1341.py @@ -0,0 +1,33 @@ +# Generated by Django 2.2.9 on 2020-03-10 11:41 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('metax_api', '0015_auto_20200218_1607'), + ] + + operations = [ + migrations.AlterField( + model_name='datacatalog', + name='catalog_record_group_create', + field=models.CharField(help_text='Group which is allowed to add new catalog records to the catalog.', max_length=200, null=True), + ), + migrations.AlterField( + model_name='datacatalog', + name='catalog_record_group_edit', + field=models.CharField(help_text='Group which is allowed to edit catalog records in the catalog.', max_length=200, null=True), + ), + migrations.AlterField( + model_name='datacatalog', + name='catalog_record_services_create', + field=models.CharField(help_text='Services which are allowed to edit catalog records in the catalog.', max_length=200, null=True), + ), + migrations.AlterField( + model_name='datacatalog', + name='catalog_record_services_edit', + field=models.CharField(help_text='Services which are allowed to edit catalog records in the catalog.', max_length=200, null=True), + ), + ] diff --git a/src/metax_api/migrations/0017_catalogrecord_rems_identifier.py b/src/metax_api/migrations/0017_catalogrecord_rems_identifier.py new file mode 100644 index 00000000..bccc4d04 --- /dev/null +++ b/src/metax_api/migrations/0017_catalogrecord_rems_identifier.py @@ -0,0 +1,18 @@ +# Generated by Django 2.2.10 on 2020-02-24 12:30 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('metax_api', '0016_auto_20200310_1341'), + ] + + operations = [ + migrations.AddField( + model_name='catalogrecord', + name='rems_identifier', + field=models.CharField(default=None, help_text='Defines corresponding catalog item in REMS service', max_length=200, null=True), + ), + ] diff --git a/src/metax_api/models/catalog_record.py b/src/metax_api/models/catalog_record.py index f9564c53..bc3bf3b9 100644 --- a/src/metax_api/models/catalog_record.py +++ b/src/metax_api/models/catalog_record.py @@ -313,6 +313,9 @@ class CatalogRecord(Common): _access_granter = JSONField(null=True, default=None, help_text='Stores data of REMS user who is currently granting access to this dataset') + rems_identifier = models.CharField(max_length=200, null=True, default=None, + help_text='Defines corresponding catalog item in REMS service') + # END OF MODEL FIELD DEFINITIONS # """ @@ -369,15 +372,34 @@ def user_has_access(self, request): In the future, will probably be more involved checking... """ if request.user.is_service: + if request.method == 'GET': + return True + if not self._check_catalog_permissions(self.data_catalog.catalog_record_group_edit, + self.data_catalog.catalog_record_services_edit, request): + return False return True elif request.method in READ_METHODS: - return True + if request.user.username is None: # unauthenticated user + if self.state == self.STATE_PUBLISHED: + return True + else: + raise Http404 + else: # enduser + if self.state == self.STATE_PUBLISHED: + return True + elif self.state == self.STATE_DRAFT and self.metadata_provider_user == request.user.username: + return True + else: + raise Http404 # write operation return self.user_is_owner(request) def user_is_owner(self, request): + if self.state == self.STATE_DRAFT and self.metadata_provider_user != request.user.username: + raise Http404 + if self.editor and 'owner_id' in self.editor: return request.user.username == self.editor['owner_id'] elif self.metadata_provider_user: @@ -404,7 +426,7 @@ def user_is_privileged(self, request): # unknown user return False - def _check_catalog_permissions(self, catalog_groups): + def _check_catalog_permissions(self, catalog_groups, catalog_services, request=None): """ Some data catalogs can only allow writing datasets from a specific group of users. Check if user has group/project which permits creating or editing datasets in @@ -413,21 +435,34 @@ def _check_catalog_permissions(self, catalog_groups): Note that there is also parameter END_USER_ALLOWED_DATA_CATALOGS in settings.py which dictates which catalogs are open for end users. """ + # populates self.request if not existing; happens with DELETE-request when self.request object is empty + if request: + self.request = request + if not self.request: # pragma: no cover # should only only happen when setting up test cases assert executing_test_case(), 'only permitted when setting up testing conditions' return True - if not catalog_groups: - return True - if self.request.user.is_service: - return True + if catalog_services: + allowed_services = [i.lower() for i in catalog_services.split(',')] + from metax_api.services import AuthService + return AuthService.check_services_against_allowed_services(self.request, allowed_services) + return False - allowed_groups = catalog_groups.split(',') + elif not self.request.user.is_service: + if catalog_groups: + allowed_groups = catalog_groups.split(',') - from metax_api.services import AuthService - return AuthService.check_user_groups_against_groups(self.request, allowed_groups) + from metax_api.services import AuthService + return AuthService.check_user_groups_against_groups(self.request, allowed_groups) + return True + + _logger.info( + 'Catalog {} is not belonging to any service or group '.format(self.data_catalog.catalog_json['identifier']) + ) + return False def _access_type_is_open(self): from metax_api.services import CatalogRecordService as CRS @@ -861,6 +896,11 @@ def _path_included_in_previous_metadata_version(self, project, path): ) def delete(self, *args, **kwargs): + if self.state == self.STATE_DRAFT: + # delete permanently instead of only marking as 'removed' + super(Common, self).delete() + return + if self.has_alternate_records(): self._remove_from_alternate_record_set() if get_identifier_type(self.preferred_identifier) == IdentifierType.DOI: @@ -868,7 +908,11 @@ def delete(self, *args, **kwargs): 'delete')) if self._dataset_has_rems_managed_access() and settings.REMS['ENABLED']: - self.add_post_request_callable(REMSUpdate(self, 'close', reason='deletion')) + self.add_post_request_callable( + REMSUpdate(self, 'close', rems_id=self.rems_identifier, reason='dataset deletion') + ) + self.rems_identifier = None + super().save(update_fields=['rems_identifier']) self.add_post_request_callable(RabbitMQPublishRecord(self, 'delete')) @@ -896,7 +940,11 @@ def deprecate(self, timestamp=None): self.date_deprecated = self.date_modified = timestamp or get_tz_aware_now_without_micros() if self._dataset_has_rems_managed_access() and settings.REMS['ENABLED']: - self.add_post_request_callable(REMSUpdate(self, 'close', reason='deprecation')) + self.add_post_request_callable( + REMSUpdate(self, 'close', rems_id=self.rems_identifier, reason='dataset deprecation') + ) + self.rems_identifier = None + super().save(update_fields=['rems_identifier']) super().save(update_fields=['deprecated', 'date_deprecated', 'date_modified']) self.add_post_request_callable(DelayedLog( @@ -959,6 +1007,10 @@ def catalog_is_pas(self): def has_alternate_records(self): return bool(self.alternate_record_set) + def _save_as_draft(self, request): + from metax_api.services import CommonService + return CommonService.get_boolean_query_param(self.request, 'draft') and settings.DRAFT_ENABLED + def get_metadata_version_listing(self): entries = [] for entry in self.research_dataset_versions.all(): @@ -1007,9 +1059,13 @@ def _validate_for_rems(self): def _pre_create_operations(self, pid_type=None): - if not self._check_catalog_permissions(self.data_catalog.catalog_record_group_create): + if not self._check_catalog_permissions(self.data_catalog.catalog_record_group_create, + self.data_catalog.catalog_record_services_create): raise Http403({ 'detail': [ 'You are not permitted to create datasets in this data catalog.' ]}) + self.research_dataset['metadata_version_identifier'] = generate_uuid_identifier() + self.identifier = generate_uuid_identifier() + if self.catalog_is_pas(): # todo: default identifier type could probably be a parameter of the data catalog pref_id_type = IdentifierType.DOI @@ -1020,6 +1076,7 @@ def _pre_create_operations(self, pid_type=None): # in harvested catalogs, the harvester is allowed to set the preferred_identifier. # do not overwrite. pass + elif self.catalog_is_legacy(): if 'preferred_identifier' not in self.research_dataset: raise ValidationError({ @@ -1035,6 +1092,9 @@ def _pre_create_operations(self, pid_type=None): 'Catalog %s is a legacy catalog - not generating pid' % self.data_catalog.catalog_json['identifier'] ) + elif self._save_as_draft(self.request): + self.state = self.STATE_DRAFT + self.research_dataset['preferred_identifier'] = self.identifier else: if pref_id_type == IdentifierType.URN: self.research_dataset['preferred_identifier'] = generate_uuid_identifier(urn_prefix=True) @@ -1050,9 +1110,6 @@ def _pre_create_operations(self, pid_type=None): _logger.debug("Identifier type not specified in the request. Using URN identifier for pref id") self.research_dataset['preferred_identifier'] = generate_uuid_identifier(urn_prefix=True) - self.research_dataset['metadata_version_identifier'] = generate_uuid_identifier() - self.identifier = generate_uuid_identifier() - if not self.metadata_owner_org: # field metadata_owner_org is optional, but must be set. in case it is omitted, # derive from metadata_provider_org. @@ -1071,10 +1128,6 @@ def _pre_create_operations(self, pid_type=None): self.date_cumulation_started = self.date_created def _post_create_operations(self): - if self.catalog_versions_datasets(): - dvs = DatasetVersionSet() - dvs.save() - dvs.records.add(self) if 'files' in self.research_dataset or 'directories' in self.research_dataset: # files must be added after the record itself has been created, to be able @@ -1091,18 +1144,25 @@ def _post_create_operations(self): if other_record: self._create_or_update_alternate_record_set(other_record) - if get_identifier_type(self.preferred_identifier) == IdentifierType.DOI: - self._validate_cr_against_datacite_schema() - self.add_post_request_callable(DataciteDOIUpdate(self, self.research_dataset['preferred_identifier'], - 'create')) + if self._save_as_draft(self.request): + # do nothing + pass + else: + if self.catalog_versions_datasets(): + dvs = DatasetVersionSet() + dvs.save() + dvs.records.add(self) - if self._dataset_has_rems_managed_access() and settings.REMS['ENABLED']: - self._validate_for_rems() - user_info = self._get_user_info_for_rems() - self._access_granter = user_info - self.add_post_request_callable(REMSUpdate(self, 'create', user_info=user_info)) + if get_identifier_type(self.preferred_identifier) == IdentifierType.DOI: + self._validate_cr_against_datacite_schema() + self.add_post_request_callable(DataciteDOIUpdate(self, self.research_dataset['preferred_identifier'], + 'create')) - self.add_post_request_callable(RabbitMQPublishRecord(self, 'create')) + if self._dataset_has_rems_managed_access() and settings.REMS['ENABLED']: + self._handle_rems_managed_access() + super().save(update_fields=['rems_identifier']) + + self.add_post_request_callable(RabbitMQPublishRecord(self, 'create')) _logger.info( 'Created a new >'preferred_identifier' as preferred_identifier - from metax_api_catalogrecord cr - inner join metax_api_catalogrecord_files cr_f on catalogrecord_id = cr.id - where cr_f.file_id in %s and cr.removed = false and cr.active = true - group by preferred_identifier + SELECT research_dataset->>'preferred_identifier' AS preferred_identifier + FROM metax_api_catalogrecord cr + INNER JOIN metax_api_catalogrecord_files cr_f + ON catalogrecord_id = cr.id + WHERE cr_f.file_id IN %s + AND cr.removed = false AND cr.active = true + GROUP BY preferred_identifier """ with connection.cursor() as cr: @@ -243,6 +242,44 @@ def get_datasets_where_file_belongs_to(cls, file_identifiers): return Response(preferred_identifiers, status=status.HTTP_200_OK) + @classmethod + def get_detailed_datasets_where_file_belongs_to(cls, file_identifiers): + """ + Find out which (non-deprecated) datasets a list of files belongs to, and return + their preferred_identifiers per file as a list in json format. + + Parameter file_identifiers can be a list of pk's (integers), or file identifiers (strings). + """ + _logger.info('Retrieving detailed list of datasets where files belong to') + + file_ids = cls._file_identifiers_to_ids(file_identifiers) + + _logger.info('Looking datasets for the following files (printing first 10):\n%s' + % '\n'.join(str(id) for id in file_identifiers[:10])) + + sql_select_related_records = """ + SELECT f.identifier, json_agg(cr.research_dataset->>'preferred_identifier') + FROM metax_api_file f + JOIN metax_api_catalogrecord_files cr_f + ON f.id=cr_f.file_id + JOIN metax_api_catalogrecord cr + ON cr.id=cr_f.catalogrecord_id + WHERE f.id IN %s + AND cr.removed = false AND cr.active = true + GROUP BY f.id + ORDER BY f.id ASC; + """ + + with connection.cursor() as cr: + cr.execute(sql_select_related_records, [tuple(file_ids)]) + if cr.rowcount == 0: + preferred_identifiers = [] + _logger.info('No datasets found for files') + else: + preferred_identifiers = cr.fetchall() + _logger.info('Found following datasets:\n%s' % preferred_identifiers) + return Response(dict(preferred_identifiers), status=status.HTTP_200_OK) + @classmethod def destroy_single(cls, file): """ diff --git a/src/metax_api/services/rems_service.py b/src/metax_api/services/rems_service.py index 60213108..49652d7f 100644 --- a/src/metax_api/services/rems_service.py +++ b/src/metax_api/services/rems_service.py @@ -71,30 +71,51 @@ def create_rems_entity(self, cr, user_info): self._create_catalogue_item(res_id, wf_id) - def close_rems_entity(self, cr, reason): + def close_rems_entity(self, old_rems_id, reason): """ Closes all applications and archives and disables all related entities """ - pref_id = cr.research_dataset['preferred_identifier'] - title = cr.research_dataset['title'].get('en') or cr.research_dataset['title'].get('fi') + rems_ci = self._get_catalogue_item(old_rems_id) - rems_ci = self._get_rems( - 'catalogue-item', - f'resource={pref_id}&archived=true&disabled=true' - ) + self._close_applications(old_rems_id, reason) + + self._close_entity('catalogue-item', rems_ci[0]['id']) + self._close_entity('workflow', rems_ci[0]['wfid']) + self._close_entity('resource', rems_ci[0]['resource-id']) + + def update_rems_entity(self, cr, old_rems_id, reason): + """ + Archives and disables related catalogue_item and resource, closes all applications and + creates new resource and catalogue_item with correct license. cr contains new rems_identifier + and old identifier is given as parameter because dataset changed have been saved at this + point already. + """ + self.cr = cr - if len(rems_ci) < 1: - # this should not happen but do not block the metax dataset removal - _logger.error(f'Could not find catalogue-item for {cr.identifier} in REMS.') - return + rems_ci = self._get_catalogue_item(old_rems_id) - self._close_applications(title, pref_id, reason) + self._close_applications(old_rems_id, reason) self._close_entity('catalogue-item', rems_ci[0]['id']) - self._close_entity('workflow', rems_ci[0]['wfid']) self._close_entity('resource', rems_ci[0]['resource-id']) - def _close_applications(self, title, pref_id, reason): + license_id = self._create_license() + res_id = self._create_resource(license_id) + self._create_catalogue_item(res_id, rems_ci[0]['wfid']) + + def _get_catalogue_item(self, rems_id): + rems_ci = self._get_rems( + 'catalogue-item', + f'resource={rems_id}&archived=true&disabled=true' + ) + + if len(rems_ci) < 1: # pragma: no cover + # this should not happen + raise REMSException(f'Could not find catalogue-item for {rems_id} in REMS.') + + return rems_ci + + def _close_applications(self, rems_id, reason): """ Get all applications that are related to dataset and close them. Application state determines which user (applicant or handler) can close the application. @@ -103,7 +124,7 @@ def _close_applications(self, title, pref_id, reason): # REMS only allows reporter_user to get all applications self.headers['x-rems-user-id'] = self.reporter_user - applications = self._get_rems('application', f'query=resource:\"{pref_id}\"') + applications = self._get_rems('application', f'query=resource:\"{rems_id}\"') for application in applications: if application['application/state'] in HANDLER_CLOSEABLE_APPLICATIONS: @@ -172,7 +193,7 @@ def _create_license(self): def _create_resource(self, license_id): body = { - "resid": self.cr.research_dataset['preferred_identifier'], + "resid": self.cr.rems_identifier, "organization": self.cr.metadata_owner_org, "licenses": [license_id] } diff --git a/src/metax_api/settings.py b/src/metax_api/settings.py index beca2a99..2837041c 100755 --- a/src/metax_api/settings.py +++ b/src/metax_api/settings.py @@ -65,10 +65,16 @@ 'password': 'password' } + API_EXT_USER = { + 'username': 'external', + 'password': 'externalpassword' + } + API_TEST_USERS = [ API_TEST_USER, API_METAX_USER, API_AUTH_TEST_USER, + API_EXT_USER ] API_ACCESS = { @@ -93,9 +99,9 @@ }, "datasets": { "read": ["all"], - "create": ["testuser", "metax", "api_auth_user", "endusers"], - "update": ["testuser", "metax", "api_auth_user", "endusers"], - "delete": ["testuser", "metax", "api_auth_user", "endusers"] + "create": ["testuser", "metax", "api_auth_user", "endusers", "external"], + "update": ["testuser", "metax", "api_auth_user", "endusers", "external"], + "delete": ["testuser", "metax", "api_auth_user", "endusers", "external"] }, "directories": { "read": ["testuser", "metax", "endusers"], @@ -170,6 +176,8 @@ ATT_DATA_CATALOG_IDENTIFIER = "urn:nbn:fi:att:data-catalog-att" PAS_DATA_CATALOG_IDENTIFIER = "urn:nbn:fi:att:data-catalog-pas" LEGACY_DATA_CATALOG_IDENTIFIER = "urn:nbn:fi:att:data-catalog-legacy" + EXT_DATA_CATALOG_IDENTIFIER = "urn:nbn:fi:att:data-catalog-ext" + else: IDA_DATA_CATALOG_IDENTIFIER = app_config_dict['IDA_DATACATALOG_IDENTIFIER'] ATT_DATA_CATALOG_IDENTIFIER = app_config_dict['ATT_DATACATALOG_IDENTIFIER'] @@ -199,6 +207,11 @@ # endpoint in localhost where bearer tokens should be sent for validation VALIDATE_TOKEN_URL = 'https://127.0.0.1/secure/validate_token' +if executing_in_test_case or executing_in_travis: + CHECKSUM_ALGORITHMS = ['SHA-256', 'MD5', 'SHA-512'] +else: + CHECKSUM_ALGORITHMS = app_config_dict['CHECKSUM_ALGORITHMS'] + if executing_in_test_case or executing_in_travis: ERROR_FILES_PATH = '/tmp/metax-api-tests/errors' else: @@ -594,7 +607,7 @@ else: REMS = { 'ENABLED': app_config_dict.get('REMS', {}).get('ENABLED'), - 'API_KEY': app_config_dict.get('REMS', {}).get('API_KEY'), + 'API_KEY': str(app_config_dict.get('REMS', {}).get('API_KEY')), 'BASE_URL': app_config_dict.get('REMS', {}).get('BASE_URL'), 'ETSIN_URL_TEMPLATE': app_config_dict.get('REMS', {}).get('ETSIN_URL_TEMPLATE'), 'METAX_USER': app_config_dict.get('REMS', {}).get('METAX_USER'), @@ -602,3 +615,8 @@ 'AUTO_APPROVER': app_config_dict.get('REMS', {}).get('AUTO_APPROVER'), 'FORM_ID': int(app_config_dict.get('REMS', {}).get('FORM_ID')), } + +if executing_in_test_case or executing_in_travis: + DRAFT_ENABLED = True +else: + DRAFT_ENABLED = app_config_dict.get('DRAFT_ENABLED', False) diff --git a/src/metax_api/templates/secure/auth_success.html b/src/metax_api/templates/secure/auth_success.html index dcc1f2c1..4c309deb 100644 --- a/src/metax_api/templates/secure/auth_success.html +++ b/src/metax_api/templates/secure/auth_success.html @@ -15,30 +15,35 @@

Where have I arrived?

You are logged in as:

{{ email }}

- {% if linked_accounts %} -

Linked accounts

-

The following accounts have been linked with your Fairdata identity:

- - {% endif %} + {% if idm_account_exists and home_org_exists %} +

Your token is:

+

{{ token_string }}

- {% if csc_account_linked %} +

Valid until:

+

{{ token_valid_until }}

+

Terms of Use

+

By using Metax End User API the user agrees that he or she has asked consent from all persons whose personal information the user will add to the descriptive data and informed them of how they can get their personal data removed. By using the API the user agrees to the Terms of Usage.

+ {% elif not idm_account_exists %} +

How to get an API authentication token?

- Your CSC-account is linked to your Fairdata identity, so you should be able to access your IDA projects using Metax, and other services. If you linked the account only a short while ago, it takes about an hour for your project information to become available for Metax. + You currently do not have a CSC account. In order to get an authentication token and interact directly with the Metax API, register a CSC account and come back to this page.

- {% else %} + {% elif not home_org_exists %} +

How to get an API authentication token?

- Your Fairdata identity has not been linked with a CSC-account. If you are going to interact with IDA file metadata, or publish datasets with IDA files, this is a good opportunity to link your CSC-account to your Fairdata-account. You can do that here. Once your accounts have been linked, it takes about an hour for your project information to become available for Metax. + You currently do not have a home organization set. In order to get an authentication token and interact directly with the Metax API, contact CSC helpdesk to get your home organization sorted out and come back to this page.

{% endif %} -

Your token (id_token) is:

-

{{ token_string }}

+

+ Logout +

+ + {% if haka_exists %} +

+ IF LOGGED IN USING HAKA, CLOSE BROWSER IN ORDER TO COMPLETELY CLOSE CURRENT SESSION! +

+ {% endif %} -

Valid until:

-

{{ token_valid_until }}

diff --git a/src/metax_api/templates/secure/auth_success_new.html b/src/metax_api/templates/secure/auth_success_new.html deleted file mode 100644 index 4c309deb..00000000 --- a/src/metax_api/templates/secure/auth_success_new.html +++ /dev/null @@ -1,49 +0,0 @@ - - - - - Metax End User Authentication - - -

Metax API Authentication Successful

- -

Where have I arrived?

-

- This page gives you an authentication token that you can use to interact directly with Metax API. For more information, visit Metax documentation about End User Access. -

- -

You are logged in as:

-

{{ email }}

- - {% if idm_account_exists and home_org_exists %} -

Your token is:

-

{{ token_string }}

- -

Valid until:

-

{{ token_valid_until }}

-

Terms of Use

-

By using Metax End User API the user agrees that he or she has asked consent from all persons whose personal information the user will add to the descriptive data and informed them of how they can get their personal data removed. By using the API the user agrees to the Terms of Usage.

- {% elif not idm_account_exists %} -

How to get an API authentication token?

-

- You currently do not have a CSC account. In order to get an authentication token and interact directly with the Metax API, register a CSC account and come back to this page. -

- {% elif not home_org_exists %} -

How to get an API authentication token?

-

- You currently do not have a home organization set. In order to get an authentication token and interact directly with the Metax API, contact CSC helpdesk to get your home organization sorted out and come back to this page. -

- {% endif %} - -

- Logout -

- - {% if haka_exists %} -

- IF LOGGED IN USING HAKA, CLOSE BROWSER IN ORDER TO COMPLETELY CLOSE CURRENT SESSION! -

- {% endif %} - - - diff --git a/src/metax_api/tests/api/rest/base/views/common/write.py b/src/metax_api/tests/api/rest/base/views/common/write.py index 151ac3d1..21011330 100644 --- a/src/metax_api/tests/api/rest/base/views/common/write.py +++ b/src/metax_api/tests/api/rest/base/views/common/write.py @@ -238,12 +238,11 @@ def test_atomic_create(self): record_count_before = CatalogRecord.objects.all().count() response = self.client.post('/rest/datasets?atomic=true', [cr, cr2, cr3], format="json") - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, response.content) - self.assertEqual(len(response.data['success']) == 0, True) - self.assertEqual(len(response.data['failed']) == 1, True) - self.assertEqual('detail' in response.data, True) - self.assertEqual('atomic' in response.data['detail'][0], True) + self.assertEqual(len(response.data['success']) == 0, True, response.data) + self.assertEqual(len(response.data['failed']) == 1, True, response.data) + self.assertEqual('detail' in response.data, True, response.data) + self.assertEqual('atomic' in response.data['detail'][0], True, response.data) self.assertEqual(record_count_before, CatalogRecord.objects.all().count(), 'shouldnt create new records') def test_atomic_update(self): diff --git a/src/metax_api/tests/api/rest/base/views/datasets/read.py b/src/metax_api/tests/api/rest/base/views/datasets/read.py index 97ad8545..89eb7db2 100644 --- a/src/metax_api/tests/api/rest/base/views/datasets/read.py +++ b/src/metax_api/tests/api/rest/base/views/datasets/read.py @@ -337,38 +337,45 @@ def test_read_catalog_record_search_by_preservation_state(self): ''' Various simple filtering requests ''' - response = self.client.get('/rest/datasets?state=0') - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data['results']) > 2, True, - 'There should have been multiple results for state=0 request') + for queryparam in ('preservation_state', 'state'): + response = self.client.get('/rest/datasets?{}=0'.format(queryparam)) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.data['results']) > 2, True, + 'There should have been multiple results for state=0 request') - response = self.client.get('/rest/datasets?state=10') - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data['results']), 2) + response = self.client.get('/rest/datasets?{}=10'.format(queryparam)) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.data['results']), 2) - response = self.client.get('/rest/datasets?state=40') - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data['results']), 1) + response = self.client.get('/rest/datasets?{}=40'.format(queryparam)) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.data['results']), 1) def test_read_catalog_record_search_by_preservation_state_666(self): - response = self.client.get('/rest/datasets?state=666') - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data['results']), 0, 'should return empty list') + for queryparam in ('preservation_state', 'state'): + response = self.client.get('/rest/datasets?{}=666'.format(queryparam)) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.data['results']), 0, 'should return empty list') def test_read_catalog_record_search_by_preservation_state_many(self): - response = self.client.get('/rest/datasets?state=10,40') - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data['results']), 3) - self.assertEqual(response.data['results'][0]['preservation_state'], 10) - self.assertEqual(response.data['results'][1]['preservation_state'], 10) - self.assertEqual(response.data['results'][2]['preservation_state'], 40) + for queryparam in ('preservation_state', 'state'): + response = self.client.get('/rest/datasets?{}=10,40'.format(queryparam)) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.data['results']), 3) + self.assertEqual(response.data['results'][0]['preservation_state'], 10) + self.assertEqual(response.data['results'][1]['preservation_state'], 10) + self.assertEqual(response.data['results'][2]['preservation_state'], 40) def test_read_catalog_record_search_by_preservation_state_invalid_value(self): response = self.client.get('/rest/datasets?state=1,a') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual('is not an integer' in response.data['state'][0], True, - 'Error should say letter a is not an integer') + 'Error should say letter a is not an integer') + response = self.client.get('/rest/datasets?preservation_state=1,a') + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual('is not an integer' in response.data['preservation_state'][0], True, + 'Error should say letter a is not an integer') class CatalogRecordApiReadActorFilter(CatalogRecordApiReadCommon): @@ -448,9 +455,10 @@ def test_agents_and_actors(self): response = self.client.get('/rest/datasets?pas_filter=janus&creator_organization=sad organization') self.assertEqual(len(response.data['results']), 1) - response = self.client.get('/rest/datasets?state=10&pas_filter=kaisa&creator_organization=notfound') - self.assertEqual(len(response.data['results']), 0) - + for queryparam in ('preservation_state', 'state'): + response = self.client.get('/rest/datasets?{}=10&pas_filter=kaisa&' + 'creator_organization=notfound'.format(queryparam)) + self.assertEqual(len(response.data['results']), 0) class CatalogRecordApiReadPASFilter(CatalogRecordApiReadCommon): @@ -481,35 +489,37 @@ def test_pas_filter(self): # beging testing - response = self.client.get('/rest/datasets?state=10&pas_filter=if you') - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data['results']), 1) + for queryparam in ('preservation_state', 'state'): + response = self.client.get('/rest/datasets?{}=10&pas_filter=if you'.format(queryparam)) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.data['results']), 1) - response = self.client.get('/rest/datasets?state=10&pas_filter=kiinni jos') - self.assertEqual(len(response.data['results']), 1) + response = self.client.get('/rest/datasets?{}=10&pas_filter=kiinni jos'.format(queryparam)) + self.assertEqual(len(response.data['results']), 1) - response = self.client.get('/rest/datasets?state=10&pas_filter=niemine') - self.assertEqual(len(response.data['results']), 1) + response = self.client.get('/rest/datasets?{}=10&pas_filter=niemine'.format(queryparam)) + self.assertEqual(len(response.data['results']), 1) - # more than 3 curators, requires typing exact case-sensitive name... see comments in related code - response = self.client.get('/rest/datasets?state=10&pas_filter=jokunen') - self.assertEqual(len(response.data['results']), 0) - response = self.client.get('/rest/datasets?state=10&pas_filter=Jaska Jokunen') - self.assertEqual(len(response.data['results']), 1) + # more than 3 curators, requires typing exact case-sensitive name... see comments in related code + response = self.client.get('/rest/datasets?{}=10&pas_filter=jokunen'.format(queryparam)) + self.assertEqual(len(response.data['results']), 0) + response = self.client.get('/rest/datasets?{}=10&pas_filter=Jaska Jokunen'.format(queryparam)) + self.assertEqual(len(response.data['results']), 1) - # contract_id 1 has several other associated test datasets - response = self.client.get('/rest/datasets?state=10&pas_filter=agreement') - self.assertEqual(len(response.data['results']), 3) + # contract_id 1 has several other associated test datasets + response = self.client.get('/rest/datasets?{}=10&pas_filter=agreement'.format(queryparam)) + self.assertEqual(len(response.data['results']), 3) - response = self.client.get('/rest/datasets?state=10&pas_filter=does not exist') - self.assertEqual(len(response.data['results']), 0) + response = self.client.get('/rest/datasets?{}=10&pas_filter=does not exist'.format(queryparam)) + self.assertEqual(len(response.data['results']), 0) def test_pas_filter_is_restricted(self): """ Query param is permitted to users metax and tpas. """ - response = self.client.get('/rest/datasets?state=10&pas_filter=hmmm') - self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + for queryparam in ('preservation_state', 'state'): + response = self.client.get('/rest/datasets?{}=10&pas_filter=hmmm'.format(queryparam)) + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) class CatalogRecordApiReadQueryParamsTests(CatalogRecordApiReadCommon): @@ -547,27 +557,30 @@ def test_read_catalog_record_search_by_curator_not_found_case_sensitivity(self): self.assertEqual(len(response.data['results']), 0) def test_read_catalog_record_search_by_curator_and_state_1(self): - response = self.client.get('/rest/datasets?curator=id:of:curator:rahikainen&state=10') - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data['results']), 2) - self.assertEqual(response.data['results'][0]['id'], 2) - self.assertEqual(response.data['results'][0]['preservation_state'], 10) - self.assertEqual(response.data['results'][0]['research_dataset']['curator'][0]['name'], 'Rahikainen', - 'Curator name is not matching') + for queryparam in ('preservation_state', 'state'): + response = self.client.get('/rest/datasets?curator=id:of:curator:rahikainen&{}=10'.format(queryparam)) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.data['results']), 2) + self.assertEqual(response.data['results'][0]['id'], 2) + self.assertEqual(response.data['results'][0]['preservation_state'], 10) + self.assertEqual(response.data['results'][0]['research_dataset']['curator'][0]['name'], 'Rahikainen', + 'Curator name is not matching') def test_read_catalog_record_search_by_curator_and_state_2(self): - response = self.client.get('/rest/datasets?curator=id:of:curator:rahikainen&state=40') - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data['results']), 1) - self.assertEqual(response.data['results'][0]['id'], 4) - self.assertEqual(response.data['results'][0]['preservation_state'], 40) - self.assertEqual(response.data['results'][0]['research_dataset']['curator'][0]['name'], 'Rahikainen', - 'Curator name is not matching') + for queryparam in ('preservation_state', 'state'): + response = self.client.get('/rest/datasets?curator=id:of:curator:rahikainen&{}=40'.format(queryparam)) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.data['results']), 1) + self.assertEqual(response.data['results'][0]['id'], 4) + self.assertEqual(response.data['results'][0]['preservation_state'], 40) + self.assertEqual(response.data['results'][0]['research_dataset']['curator'][0]['name'], 'Rahikainen', + 'Curator name is not matching') def test_read_catalog_record_search_by_curator_and_state_not_found(self): - response = self.client.get('/rest/datasets?curator=id:of:curator:rahikainen&state=55') - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data['results']), 0) + for queryparam in ('preservation_state', 'state'): + response = self.client.get('/rest/datasets?curator=id:of:curator:rahikainen&{}=55'.format(queryparam)) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.data['results']), 0) def test_read_catalog_record_search_by_owner_id(self): cr = CatalogRecord.objects.get(pk=1) diff --git a/src/metax_api/tests/api/rest/base/views/datasets/write.py b/src/metax_api/tests/api/rest/base/views/datasets/write.py index 5ab9c78a..3edc19d5 100644 --- a/src/metax_api/tests/api/rest/base/views/datasets/write.py +++ b/src/metax_api/tests/api/rest/base/views/datasets/write.py @@ -29,6 +29,7 @@ END_USER_ALLOWED_DATA_CATALOGS = django_settings.END_USER_ALLOWED_DATA_CATALOGS LEGACY_CATALOGS = django_settings.LEGACY_CATALOGS IDA_CATALOG = django_settings.IDA_DATA_CATALOG_IDENTIFIER +EXT_CATALOG = django_settings.EXT_DATA_CATALOG_IDENTIFIER class CatalogRecordApiWriteCommon(APITestCase, TestClassUtils): @@ -156,10 +157,45 @@ def _get_new_full_test_cr_data(self, cr_from_test_data, dc_from_test_data): class CatalogRecordDraftTests(CatalogRecordApiWriteCommon): """ - Tests related to draft dataset creation: - - when requesting data through API, field 'state' is returned - - the value of field 'state' can't be modified through API + Tests related to draft datasets """ + def setUp(self): + super().setUp() + + # create catalogs with end user access permitted + dc = DataCatalog.objects.get(pk=1) + catalog_json = dc.catalog_json + for identifier in END_USER_ALLOWED_DATA_CATALOGS: + catalog_json['identifier'] = identifier + dc = DataCatalog.objects.create( + catalog_json=catalog_json, + date_created=get_tz_aware_now_without_micros(), + catalog_record_services_create='testuser,api_auth_user,metax', + catalog_record_services_edit='testuser,api_auth_user,metax' + ) + + self.token = get_test_oidc_token(new_proxy=True) + self._mock_token_validation_succeeds() + # Create published record with owner: testuser and pk 1 + # Create draft records with owner: testuser, pk: 2 and owner: 'some owner who is not you', pk 3 + self._set_cr_owner_and_state(1, 'published', self.token['CSCUserName']) # Published dataset + self.assertEqual(CatalogRecord.objects.get(pk=1).metadata_provider_user, 'testuser') + + self._set_cr_owner_and_state(2, 'draft', self.token['CSCUserName']) # testusers' draft + self.assertEqual(CatalogRecord.objects.get(pk=2).metadata_provider_user, 'testuser') + + self._set_cr_owner_and_state(3, 'draft', '#### Some owner who is not you ####') # Draft dataset for some user + self.assertNotEqual(CatalogRecord.objects.get(pk=3).metadata_provider_user, 'testuser') + + def _set_cr_owner_and_state(self, cr_id, state, owner): + ''' helper method for testing user accessibility for draft datasets ''' + cr = CatalogRecord.objects.get(pk=cr_id) + cr.state = state + cr.user_created = owner + cr.metadata_provider_user = owner + cr.editor = None # pretend the record was created by user directly + cr.data_catalog_id = DataCatalog.objects.get(catalog_json__identifier=END_USER_ALLOWED_DATA_CATALOGS[0]).id + cr.force_save() def test_field_exists(self): """Try fetching any dataset, field 'state' should be returned'""" @@ -178,6 +214,191 @@ def test_change_state_field_through_API(self): self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) self.assertFalse(response.data['state'] == 'changed value') + ### + # Tests for different user roles access to drafts + ### + + @responses.activate + def test_endusers_access_to_draft_datasets(self): + ''' End user should get published data and his/her drafts ''' + # Test access as end user + self._use_http_authorization(method='bearer', token=self.token) + + # Test access for owner of dataset + response = self.client.get('/rest/datasets/1') + self.assertEqual(response.status_code, status.HTTP_200_OK, response.status_code) + response = self.client.get('/rest/datasets/2') + self.assertEqual(response.status_code, status.HTTP_200_OK, response.status_code) + response = self.client.get('/rest/datasets/3') + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND, response.status_code) + # Test for multiple datasets + response = self.client.get('/rest/datasets', format="json") + # Returned list of datasets should not have owner "#### Some owner who is not you ####" + owners = [cr['metadata_provider_user'] for cr in response.data['results']] + self.assertEqual('#### Some owner who is not you ####' not in owners, True, response.data) + + def test_service_users_access_to_draft_datasets(self): + ''' Service users should get all data ''' + # test access as a service-user + self._use_http_authorization(method='basic', username='metax') + + response = self.client.get('/rest/datasets/1') + self.assertEqual(response.status_code, status.HTTP_200_OK, response.status_code) + response = self.client.get('/rest/datasets/2') + self.assertEqual(response.status_code, status.HTTP_200_OK, response.status_code) + response = self.client.get('/rest/datasets/3') + self.assertEqual(response.status_code, status.HTTP_200_OK, response.status_code) + # test for multiple datasets + response = self.client.get('/rest/datasets', format="json") + # Returned list of datasets should have owner "#### Some owner who is not you ####" + owners = [cr['metadata_provider_user'] for cr in response.data['results']] + self.assertEqual('#### Some owner who is not you ####' in owners, True, response.data) + + def test_anonymous_users_access_to_draft_datasets(self): + ''' Unauthenticated user should get only published datasets ''' + # Test access as unauthenticated user + self.client._credentials = {} + + response = self.client.get('/rest/datasets/1') + self.assertEqual(response.status_code, status.HTTP_200_OK) + response = self.client.get('/rest/datasets/2') + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND, response.data) + response = self.client.get('/rest/datasets/3') + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND, response.data) + # test for multiple datasets + response = self.client.get('/rest/datasets', format="json") + # Returned list of datasets should not have drafts + states = [cr['state'] for cr in response.data['results']] + self.assertEqual('draft' not in states, True, response.data) + + ### + # Tests for different user roles access to update drafts + ### + + @responses.activate + def test_endusers_can_update_draft_datasets(self): + ''' End user should be able to update only his/her drafts ''' + # Set end user + self._use_http_authorization(method='bearer', token=self.token) + + for http_verb in ['put', 'patch']: + update_request = getattr(self.client, http_verb) + data1 = self.client.get('/rest/datasets/1').data # published + response = update_request('/rest/datasets/1', data1, format="json") + self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) + + data2 = self.client.get('/rest/datasets/2').data # end users own draft + response = update_request('/rest/datasets/2', data2, format="json") + self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) + + data3 = self.client.get('/rest/datasets/3').data # someone elses draft + response = update_request('/rest/datasets/3', data3, format="json") + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND, response.data) + + # test for multiple datasets + response = update_request('/rest/datasets', [data1, data2, data3], format="json") + owners = [cr['object']['metadata_provider_user'] for cr in response.data['success']] + self.assertEqual('#### Some owner who is not you ####' not in owners, True, response.data) + + def test_service_users_can_update_draft_datasets(self): + ''' Dataset drafts should be able to be updated by service users (service is responsible that + their current user in e.g. Qvain is allowed to access the dataset)''' + # Set service-user + self._use_http_authorization(method='basic', username='metax') + + for http_verb in ['put', 'patch']: + update_request = getattr(self.client, http_verb) + data1 = self.client.get('/rest/datasets/1').data # published + response = update_request('/rest/datasets/1', data1, format="json") + self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) + + data2 = self.client.get('/rest/datasets/2').data # draft + response = update_request('/rest/datasets/2', data2, format="json") + self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) + + data3 = self.client.get('/rest/datasets/3').data # draft + response = update_request('/rest/datasets/3', data3, format="json") + self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) + + # test for multiple datasets + response = update_request('/rest/datasets', [data1, data2, data3], format="json") + self.assertEqual(len(response.data['success']), 3, 'response.data should contain 3 changed objects') + owners = [cr['object']['metadata_provider_user'] for cr in response.data['success']] + self.assertEqual('#### Some owner who is not you ####' in owners, True, response.data) + + def test_anonymous_user_cannot_update_draft_datasets(self): + ''' Unauthenticated user should not be able to know drafts exists in the first place''' + # Set unauthenticated user + self.client._credentials = {} + + # Fetches a published dataset since unauthenticated user can't get drafts + response = self.client.get('/rest/datasets/1') + self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) + data = response.data + + for http_verb in ['put', 'patch']: + update_request = getattr(self.client, http_verb) + response = update_request('/rest/datasets/1', data, format="json") # published + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN, response.status_code) + response = update_request('/rest/datasets/2', data, format="json") # draft + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN, response.status_code) + response = update_request('/rest/datasets/3', data, format="json") # draft + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN, response.status_code) + + # test for multiple datasets + response = update_request('/rest/datasets', data, format="json") + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN, response.status_code) + + ### + # Tests for deleting drafts + ### + + def test_draft_is_permanently_deleted_by_service_user(self): + '''Draft datasets should be permanently deleted from the database. + Only the dataset owner is able to delete draft datasets.''' + # Set service-user + self._use_http_authorization(method='basic', username='metax') + + for cr_id in (2, 3): + response = self.client.delete('/rest/datasets/%d' % cr_id) + self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT, response.data) + self.assertFalse(CatalogRecord.objects_unfiltered.filter(pk=cr_id).exists()) + + @responses.activate + def test_draft_is_permanently_deleted_by_enduser(self): + # Set end user + self._use_http_authorization(method='bearer', token=self.token) + + response = self.client.delete('/rest/datasets/2') + self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT, response.status_code) + self.assertFalse(CatalogRecord.objects_unfiltered.filter(pk=2).exists()) + response = self.client.delete('/rest/datasets/3') + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND, response.status_code) + + ### + # Tests for saving drafts + ### + + def test_service_users_can_save_draft_datasets(self): + ''' Drafts should be saved without preferred identifier ''' + # test access as a service-user + self._use_http_authorization(method='basic', username='metax') + + response = self.client.post('/rest/datasets?draft', self.cr_test_data, format="json") + + pid = response.data['research_dataset']['preferred_identifier'] + self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.data) + self.assertTrue(pid == response.data['identifier'], response.data) + self.assertTrue('urn' not in pid, response.data) + self.assertTrue('doi' not in pid, response.data) + self.assertTrue(response.data['state'] == 'draft', response.data) + + for queryparam in ('', '?draft=false'): + response = self.client.post('/rest/datasets{}'.format(queryparam), self.cr_test_data, format="json") + self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.data) + self.assertTrue(response.data['state'] == 'published', response.data) + + class CatalogRecordApiWriteCreateTests(CatalogRecordApiWriteCommon): # # @@ -1006,7 +1227,12 @@ def setUp(self): catalog_json = dc.catalog_json catalog_json['identifier'] = django_settings.PAS_DATA_CATALOG_IDENTIFIER catalog_json['dataset_versioning'] = False - dc = DataCatalog.objects.create(catalog_json=catalog_json, date_created=get_tz_aware_now_without_micros()) + dc = DataCatalog.objects.create( + catalog_json=catalog_json, + date_created=get_tz_aware_now_without_micros(), + catalog_record_services_create='testuser,api_auth_user,metax', + catalog_record_services_edit='testuser,api_auth_user,metax' + ) def test_update_catalog_record_pas_state_allowed_value(self): cr = self.client.get('/rest/datasets/1').data @@ -1885,6 +2111,7 @@ def _set_and_ensure_initial_conditions(self): self.assertEqual(records[1].alternate_record_set.id, ars_id) self.assertEqual(records[2].alternate_record_set.id, ars_id) + class CatalogRecordApiWriteDatasetVersioning(CatalogRecordApiWriteCommon): """ @@ -2042,6 +2269,7 @@ def test_new_dataset_version_pref_id_type_stays_same_as_previous_dataset_version cr_v1 = self.client.post('/rest/datasets?pid_type=urn', self.cr_test_data, format="json").data cr_v1['research_dataset']['files'].pop(0) cr_v2 = self.client.put('/rest/datasets/{0}?pid_type=doi'.format(cr_v1['identifier']), cr_v1, format="json") + self.assertEqual(cr_v2.status_code, status.HTTP_200_OK, cr_v2.data) self.assertEqual('new_version_created' in cr_v2.data, True) self.assertTrue( get_identifier_type(cr_v2.data['new_version_created']['preferred_identifier']) == IdentifierType.URN) @@ -2159,7 +2387,7 @@ def _get_file_from_test_data(self): from_test_data.update({ "checksum": { "value": "checksumvalue", - "algorithm": "sha2", + "algorithm": "SHA-256", "checked": "2017-05-23T10:07:22.559656Z", }, "file_name": "must_replace", @@ -2480,6 +2708,7 @@ def assert_file_count(self, cr, expected_file_count): def assert_total_files_byte_size(self, cr, expected_size): self.assertEqual(cr['research_dataset']['total_files_byte_size'], expected_size) + class CatalogRecordApiWriteCumulativeDatasets(CatalogRecordApiWriteAssignFilesCommon): """ @@ -3376,7 +3605,12 @@ def setUp(self): catalog_json = dc.catalog_json for identifier in END_USER_ALLOWED_DATA_CATALOGS: catalog_json['identifier'] = identifier - dc = DataCatalog.objects.create(catalog_json=catalog_json, date_created=get_tz_aware_now_without_micros()) + dc = DataCatalog.objects.create( + catalog_json=catalog_json, + date_created=get_tz_aware_now_without_micros(), + catalog_record_services_create='testuser,api_auth_user,metax', + catalog_record_services_edit='testuser,api_auth_user,metax' + ) self.token = get_test_oidc_token() @@ -3390,8 +3624,8 @@ def setUp(self): def _set_cr_owner_to_token_user(self, cr_id): cr = CatalogRecord.objects.get(pk=cr_id) - cr.user_created = self.token['sub'] - cr.metadata_provider_user = self.token['sub'] + cr.user_created = self.token['CSCUserName'] + cr.metadata_provider_user = self.token['CSCUserName'] cr.editor = None # pretend the record was created by user directly cr.force_save() @@ -3407,15 +3641,12 @@ def test_user_can_create_dataset(self): automatically placed and the user is only able to affect allowed fields ''' - user_created = self.token['sub'] - metadata_provider_user = self.token['sub'] + user_created = self.token['CSCUserName'] + metadata_provider_user = self.token['CSCUserName'] metadata_provider_org = self.token['schacHomeOrganization'] metadata_owner_org = self.token['schacHomeOrganization'] - self.cr_test_data['data_catalog'] = { - "id": 1, - "identifier": END_USER_ALLOWED_DATA_CATALOGS[0] # ida - } + self.cr_test_data['data_catalog'] = END_USER_ALLOWED_DATA_CATALOGS[0] # ida self.cr_test_data['contract'] = 1 self.cr_test_data['editor'] = { 'nope': 'discarded by metax' } self.cr_test_data['preservation_description'] = 'discarded by metax' @@ -3495,7 +3726,7 @@ def test_owner_can_edit_dataset(self): response = self.client.put('/rest/datasets/%d' % modified_data['id'], modified_data, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) self.assertEqual(response.data['research_dataset']['value'], 112233) # value we set - self.assertEqual(response.data['user_modified'], self.token['sub']) # set by metax + self.assertEqual(response.data['user_modified'], self.token['CSCUserName']) # set by metax # none of these should have been affected self.assertEqual('contract' in response.data, False) @@ -3513,8 +3744,8 @@ def test_owner_can_edit_datasets_only_in_permitted_catalogs(self): # create test record self.cr_test_data['data_catalog'] = 1 - self.cr_test_data['user_created'] = self.token['sub'] - self.cr_test_data['metadata_provider_user'] = self.token['sub'] + self.cr_test_data['user_created'] = self.token['CSCUserName'] + self.cr_test_data['metadata_provider_user'] = self.token['CSCUserName'] self.cr_test_data.pop('editor', None) self._use_http_authorization() # create cr as a service-user @@ -3536,15 +3767,18 @@ def test_owner_can_edit_dataset_check_perms_from_editor_field(self): ''' self.cr_test_data['data_catalog'] = END_USER_ALLOWED_DATA_CATALOGS[0] self.cr_test_data['user_created'] = 'editor field is checked before this field, so should be ok' - self.cr_test_data['editor'] = { 'owner_id': self.token['sub'] } + self.cr_test_data['editor'] = { 'owner_id': self.token['CSCUserName'] } self._use_http_authorization() # create cr as a service-user to ensure editor-field is set + response = self.client.post('/rest/datasets', self.cr_test_data, format="json") self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.data) + self._use_http_authorization(method='bearer', token=self.token) response = self.client.get('/rest/datasets/%d' % response.data['id'], format="json") modified_data = response.data modified_data['research_dataset']['value'] = 112233 + response = self.client.put('/rest/datasets/%d' % response.data['id'], modified_data, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -3587,7 +3821,7 @@ def test_user_file_permissions_are_checked_during_dataset_create(self): # add project membership to user's token and try again file_identifier = self.cr_test_data['research_dataset']['files'][0]['identifier'] project_identifier = File.objects.get(identifier=file_identifier).project_identifier - self.token['group_names'].append('fairdata:IDA01:%s' % project_identifier) + self.token['group_names'].append('IDA01:%s' % project_identifier) self._use_http_authorization(method='bearer', token=self.token) response = self.client.post('/rest/datasets', self.cr_test_data, format="json") @@ -3621,7 +3855,7 @@ def test_user_file_permissions_are_checked_during_dataset_update(self): # add project membership to user's token and try again project_identifier = File.objects.get(identifier=new_files[0]['identifier']).project_identifier - self.token['group_names'].append('fairdata:IDA01:%s' % project_identifier) + self.token['group_names'].append('IDA01:%s' % project_identifier) self._use_http_authorization(method='bearer', token=self.token) response = self.client.put('/rest/datasets/1', modified_data, format="json") @@ -3654,249 +3888,97 @@ def _check_fields(obj): _check_fields(response.data) -class CatalogRecordApiEndUserAccessV2(CatalogRecordApiEndUserAccess): +class CatalogRecordServicesAccess(CatalogRecordApiWriteCommon): """ - End User Access -related permission testing for new auth proxy. - - These will eventually be removed. These are basically a bunch of - dull copypasted tests. + Testing access of services to external catalogs with harvested flag and vice versa. """ def setUp(self): + """ + Create a test-datacatalog that plays the role as a external catalog. + """ super().setUp() - self.token = get_test_oidc_token(new_proxy=True) - self._use_http_authorization(method='bearer', token=self.token) - def _set_cr_owner_to_token_user(self, cr_id): - cr = CatalogRecord.objects.get(pk=cr_id) - cr.user_created = self.token['CSCUserName'] - cr.metadata_provider_user = self.token['CSCUserName'] - cr.editor = None # pretend the record was created by user directly - cr.force_save() + self.dc = DataCatalog.objects.filter(catalog_json__research_dataset_schema='att').first() + self.dc.catalog_json['identifier'] = EXT_CATALOG + self.dc.catalog_json['harvested'] = True + self.dc.catalog_record_services_create = 'external' + self.dc.catalog_record_services_edit = 'external' + self.dc.force_save() - @responses.activate - def test_user_can_create_dataset(self): - ''' - Ensure end user can create a new dataset, and required fields are - automatically placed and the user is only able to affect allowed - fields - ''' - user_created = self.token['CSCUserName'] - metadata_provider_user = self.token['CSCUserName'] - metadata_provider_org = self.token['schacHomeOrganization'] - metadata_owner_org = self.token['schacHomeOrganization'] - - self.cr_test_data['data_catalog'] = END_USER_ALLOWED_DATA_CATALOGS[0] # ida - self.cr_test_data['contract'] = 1 - self.cr_test_data['editor'] = { 'nope': 'discarded by metax' } - self.cr_test_data['preservation_description'] = 'discarded by metax' - self.cr_test_data['preservation_reason_description'] = 'discarded by metax' - self.cr_test_data['preservation_state'] = 10 - self.cr_test_data.pop('metadata_provider_user', None) - self.cr_test_data.pop('metadata_provider_org', None) - self.cr_test_data.pop('metadata_owner_org', None) + self.cr_test_data['data_catalog'] = self.dc.catalog_json['identifier'] + del self.cr_test_data['research_dataset']['files'] + del self.cr_test_data['research_dataset']['total_files_byte_size'] - # test file permission checking in another test - self.cr_test_data['research_dataset'].pop('files', None) - self.cr_test_data['research_dataset'].pop('directories', None) + self._use_http_authorization(username=django_settings.API_EXT_USER['username'], + password=django_settings.API_EXT_USER['password']) + def test_external_service_can_add_catalog_record_to_own_catalog(self): + self.cr_test_data['research_dataset']['preferred_identifier'] = '123456' response = self.client.post('/rest/datasets', self.cr_test_data, format="json") - self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.content) - self.assertEqual(response.data['user_created'], user_created) - self.assertEqual(response.data['metadata_provider_user'], metadata_provider_user) - self.assertEqual(response.data['metadata_provider_org'], metadata_provider_org) - self.assertEqual(response.data['metadata_owner_org'], metadata_owner_org) - self.assertEqual('contract' in response.data, False) - self.assertEqual('editor' in response.data, False) - self.assertEqual('preservation_description' in response.data, False) - self.assertEqual('preservation_reason_description' in response.data, False) - self.assertEqual(response.data['preservation_state'], 0) - - @responses.activate - def test_owner_can_edit_dataset(self): - ''' - Ensure end users are able to edit datasets owned by them. - Ensure end users can only edit permitted fields. - Note: File project permissions should not be checked, since files are not changed. - ''' - - # create test record - self.cr_test_data['data_catalog'] = END_USER_ALLOWED_DATA_CATALOGS[0] - self.cr_test_data['research_dataset'].pop('files', None) # test file permission checking in another test - self.cr_test_data['research_dataset'].pop('directories', None) - response = self.client.post('/rest/datasets', self.cr_test_data, format="json") self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.data) + self.assertEqual(response.data['research_dataset']['preferred_identifier'], '123456') - modified_data = response.data - # research_dataset is the only permitted field to edit - modified_data['research_dataset']['value'] = 112233 - modified_data['contract'] = 1 - modified_data['editor'] = { 'nope': 'discarded by metax' } - modified_data['preservation_description'] = 'discarded by metax' - modified_data['preservation_reason_description'] = 'discarded by metax' - modified_data['preservation_state'] = 10 - - response = self.client.put('/rest/datasets/%d' % modified_data['id'], modified_data, format="json") - self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) - self.assertEqual(response.data['research_dataset']['value'], 112233) # value we set - self.assertEqual(response.data['user_modified'], self.token['CSCUserName']) # set by metax - - # none of these should have been affected - self.assertEqual('contract' in response.data, False) - self.assertEqual('editor' in response.data, False) - self.assertEqual('preservation_description' in response.data, False) - self.assertEqual('preservation_reason_description' in response.data, False) - self.assertEqual(response.data['preservation_state'], 0) - - @responses.activate - def test_owner_can_edit_datasets_only_in_permitted_catalogs(self): - ''' - Ensure end users are able to edit datasets only in permitted catalogs, even if they - own the record (catalog may be disabled from end user editing for reason or another). - ''' - - # create test record - self.cr_test_data['data_catalog'] = 1 - self.cr_test_data['user_created'] = self.token['CSCUserName'] - self.cr_test_data['metadata_provider_user'] = self.token['CSCUserName'] - self.cr_test_data.pop('editor', None) - - self._use_http_authorization() # create cr as a service-user + def test_external_service_can_update_catalog_record_in_own_catalog(self): + self.cr_test_data['research_dataset']['preferred_identifier'] = '123456' response = self.client.post('/rest/datasets', self.cr_test_data, format="json") - self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.data) - modified_data = response.data - modified_data['research_dataset']['value'] = 112233 - - self._use_http_authorization(method='bearer', token=self.token) - response = self.client.put('/rest/datasets/%d' % modified_data['id'], modified_data, format="json") - self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN, response.data) - - @responses.activate - def test_owner_can_edit_dataset_check_perms_from_editor_field(self): - ''' - Ensure end user perms are also checked from the field 'editor', which may be - set by .e.g. qvain. - ''' - self.cr_test_data['data_catalog'] = END_USER_ALLOWED_DATA_CATALOGS[0] - self.cr_test_data['user_created'] = 'editor field is checked before this field, so should be ok' - self.cr_test_data['editor'] = { 'owner_id': self.token['CSCUserName'] } - - self._use_http_authorization() # create cr as a service-user to ensure editor-field is set - response = self.client.post('/rest/datasets', self.cr_test_data, format="json") self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.data) + self.assertEqual(response.data['research_dataset']['preferred_identifier'], '123456') - response = self.client.get('/rest/datasets/%d' % response.data['id'], format="json") - modified_data = response.data - modified_data['research_dataset']['value'] = 112233 - response = self.client.put('/rest/datasets/%d' % response.data['id'], modified_data, format="json") - self.assertEqual(response.status_code, status.HTTP_200_OK) + cr_id = response.data['id'] + self.cr_test_data['research_dataset']['preferred_identifier'] = '654321' + response = self.client.put('/rest/datasets/{}'.format(cr_id), self.cr_test_data, format="json") - @responses.activate - def test_user_file_permissions_are_checked_during_dataset_create(self): - ''' - Ensure user's association with a project is checked during dataset create when - attaching files or directories to a dataset. - ''' + self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) + self.assertEqual(response.data['research_dataset']['preferred_identifier'], '654321') - # try creating without proper permisisons - self.cr_test_data['data_catalog'] = END_USER_ALLOWED_DATA_CATALOGS[0] # ida + def test_external_service_can_delete_catalog_record_from_own_catalog(self): + self.cr_test_data['research_dataset']['preferred_identifier'] = '123456' response = self.client.post('/rest/datasets', self.cr_test_data, format="json") - self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN, response.content) - # add project membership to user's token and try again - file_identifier = self.cr_test_data['research_dataset']['files'][0]['identifier'] - project_identifier = File.objects.get(identifier=file_identifier).project_identifier - self.token['group_names'].append('IDA01:%s' % project_identifier) - self._use_http_authorization(method='bearer', token=self.token) + cr_id = response.data['id'] + response = self.client.delete('/rest/datasets/{}'.format(cr_id)) + self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT, response.data) + response = self.client.get('/rest/datasets/{}'.format(cr_id), format="json") + self.assertEqual('not found' in response.json()['detail'].lower(), True) + + def test_external_service_can_not_add_catalog_record_to_other_catalog(self): + dc = self._get_object_from_test_data('datacatalog', requested_index=1) + self.cr_test_data['data_catalog'] = dc['catalog_json']['identifier'] + self.cr_test_data['research_dataset']['preferred_identifier'] = 'temp-pid' response = self.client.post('/rest/datasets', self.cr_test_data, format="json") - self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.content) - @responses.activate - def test_user_file_permissions_are_checked_during_dataset_update(self): - ''' - Ensure user's association with a project is checked during dataset update when - attaching files or directories to a dataset. The permissions should be checked - only for changed files (newly added, or removed). - ''' - # get some files to add to another dataset - response = self.client.get('/rest/datasets/2', format="json") - new_files = response.data['research_dataset']['files'] + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN, response.data) - # this is the dataset we'll modify - self._set_cr_owner_to_token_user(1) - self._set_cr_to_permitted_catalog(1) - response = self.client.get('/rest/datasets/1', format="json") - # ensure the files really are new - for f in new_files: - for existing_f in response.data['research_dataset']['files']: - assert f['identifier'] != existing_f['identifier'], 'test preparation failure, files should differ' - modified_data = response.data - modified_data['research_dataset']['files'].extend(new_files) + def test_external_service_can_not_update_catalog_record_in_other_catalog(self): + response = self.client.put('/rest/datasets/1', {}, format="json") - # should fail, since user's token has no permission for the newly added files - response = self.client.put('/rest/datasets/1', modified_data, format="json") - self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN, response.content) + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN, response.data) - # add project membership to user's token and try again - project_identifier = File.objects.get(identifier=new_files[0]['identifier']).project_identifier - self.token['group_names'].append('IDA01:%s' % project_identifier) - self._use_http_authorization(method='bearer', token=self.token) + def test_external_service_can_not_delete_catalog_record_from_other_catalog(self): + response = self.client.delete('/rest/datasets/1') - response = self.client.put('/rest/datasets/1', modified_data, format="json") - self.assertEqual(response.status_code, status.HTTP_200_OK, response.content) + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN, response.data) - @responses.activate - def test_user_projects_are_checked_when_writing_into_protected_data_catalog(self): - ''' - If a data catalog has field catalog_record_group_create defined, ensure - user's projects are checked when user tries to create or edit dataset in that catalog. - ''' - permitted_create_group = 'some_permitted_create_group' - permitted_edit_group = 'some_permitted_edit_group' - - # add limiting groups to catalog creators and editors - dc_identifier = END_USER_ALLOWED_DATA_CATALOGS[0] # ida - dc = DataCatalog.objects.get(catalog_json__identifier=dc_identifier) - dc.catalog_record_group_create = permitted_create_group - dc.catalog_record_group_edit = permitted_edit_group - dc.save() + def test_harvested_catalogs_must_have_preferred_identifier_create(self): + # create without preferred identifier - # try creating without proper permisisons - self.cr_test_data['data_catalog'] = dc_identifier response = self.client.post('/rest/datasets', self.cr_test_data, format="json") - self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN, response.content) - # add project membership to user's token and try again - # - file project, so that files may be added... - self.token['group_names'].append( - 'IDA01:%s' % File.objects.get( - identifier=self.cr_test_data['research_dataset']['files'][0]['identifier'] - ).project_identifier - ) - # - data catalog group - self.token['group_names'].append(permitted_create_group) - self._use_http_authorization(method='bearer', token=self.token) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, response.data) + self.assertEqual('must have preferred identifier' in + response.data['research_dataset']['preferred_identifier'][0], True) + self.cr_test_data['research_dataset']['preferred_identifier'] = '' response = self.client.post('/rest/datasets', self.cr_test_data, format="json") - self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.content) - # attempt to modify record - should fail, since user does not have group - # that allows editing. - cr_data = response.data - response = self.client.put('/rest/datasets/%d' % cr_data['id'], cr_data, format="json") - self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN, response.content) - - # add approriate editing group to token - self.token['group_names'].append(permitted_edit_group) - self._use_http_authorization(method='bearer', token=self.token) - - # try editing again - should be ok - response = self.client.put('/rest/datasets/%d' % cr_data['id'], cr_data, format="json") - self.assertEqual(response.status_code, status.HTTP_200_OK, response.content) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, response.data) + self.assertEqual('must have preferred identifier' in + response.data['research_dataset']['preferred_identifier'][0], True) @unittest.skipIf(django_settings.REMS['ENABLED'] is not True, 'Only run if REMS is enabled') @@ -3930,6 +4012,9 @@ class CatalogRecordApiWriteREMS(CatalogRecordApiWriteCommon): } } + # any other than what is included in permit_rights is sufficient + other_license = rf['reference_data']['license'][1] + def setUp(self): super().setUp() # Create ida data catalog @@ -4201,6 +4286,7 @@ def test_creating_permit_dataset_creates_catalogue_item_service_succeeds(self): """ response = self._create_new_rems_dataset() self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.data) + self.assertTrue(response.data.get('rems_identifier') is not None, 'rems_identifier should be present') @responses.activate def test_creating_permit_dataset_creates_catalogue_item_service_fails_1(self): @@ -4255,6 +4341,7 @@ def test_changing_dataset_to_permit_creates_new_catalogue_item_succeeds(self): response = self.client.put(f'/rest/datasets/{cr["id"]}?access_granter={granter}', cr, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) + self.assertTrue(response.data.get('rems_identifier') is not None, 'rems_identifier should be present') @responses.activate def test_changing_dataset_to_permit_creates_new_catalogue_item_fails(self): @@ -4303,6 +4390,49 @@ def test_changing_access_type_to_other_closes_rems_entities_fails(self): response = self.client.put(f'/rest/datasets/{cr["id"]}', cr, format="json") self.assertEqual(response.status_code, status.HTTP_503_SERVICE_UNAVAILABLE, response.data) + @responses.activate + def test_changing_dataset_license_updates_rems(self): + """ + Create REMS dataset and change it's license. Ensure that + request is successful and that dataset's rems_identifier is changed. + """ + response = self._create_new_rems_dataset() + self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.data) + + cr_before = response.data + + rems_id_before = cr_before['rems_identifier'] + cr_before['research_dataset']['access_rights']['license'] = [ + { + "title": self.other_license['label'], + "identifier": self.other_license['uri'] + } + ] + + response = self.client.put(f'/rest/datasets/{cr_before["id"]}', cr_before, format="json") + self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) + + cr_after = response.data + self.assertNotEqual(rems_id_before, cr_after['rems_identifier'], 'REMS identifier should have been changed') + + @responses.activate + def test_deleting_license_updates_rems(self): + """ + Create REMS dataset and delete it's license. Ensure that rems_identifier is removed and no failures occur. + """ + response = self._create_new_rems_dataset() + self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.data) + + cr_before = response.data + + cr_before['research_dataset']['access_rights'].pop('license') + + response = self.client.put(f'/rest/datasets/{cr_before["id"]}', cr_before, format="json") + self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) + + cr_after = response.data + self.assertTrue(cr_after.get('rems_identifier') is None, 'REMS identifier should have been deleted') + @responses.activate def test_creating_permit_dataset_creates_catalogue_item_end_user(self): """ @@ -4330,10 +4460,15 @@ def test_deleting_permit_dataset_removes_catalogue_item_succeeds(self): response = self._create_new_rems_dataset() self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.data) + cr_id = response.data['id'] + # delete dataset - response = self.client.delete(f'/rest/datasets/{response.data["id"]}') + response = self.client.delete(f'/rest/datasets/{cr_id}') self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT, response.data) + cr = self.client.get(f'/rest/datasets/{cr_id}?removed').data + self.assertTrue(cr.get('rems_identifier') is None, 'rems_identifier should not be present') + @responses.activate def test_deleting_permit_dataset_removes_catalogue_item_fails(self): response = self._create_new_rems_dataset() @@ -4350,10 +4485,14 @@ def test_deprecating_permit_dataset_removes_catalogue_item_succeeds(self): response = self._create_new_rems_dataset() self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.data) + cr_before = response.data # deprecate dataset - response = self.client.delete(f"/rest/files/{response.data['research_dataset']['files'][0]['identifier']}") + response = self.client.delete(f"/rest/files/{cr_before['research_dataset']['files'][0]['identifier']}") self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) + cr_after = self.client.get(f'/rest/datasets/{cr_before["id"]}').data + self.assertTrue(cr_after.get('rems_identifier') is None, 'rems_identifier should not be present') + @responses.activate def test_deprecating_permit_dataset_removes_catalogue_item_fails(self): response = self._create_new_rems_dataset() @@ -4411,7 +4550,7 @@ def test_missing_license_in_dataset(self): """ License is required when dataset is REMS managed """ - self.cr_test_data['research_dataset']['access_rights'] = self.permit_rights + self.cr_test_data['research_dataset']['access_rights'] = deepcopy(self.permit_rights) del self.cr_test_data['research_dataset']['access_rights']['license'] self.cr_test_data['data_catalog'] = IDA_CATALOG @@ -4422,3 +4561,29 @@ def test_missing_license_in_dataset(self): ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, response.data) self.assertTrue('must define license' in response.data['detail'], response.data) + + @responses.activate + def test_only_return_rems_identifier_to_privileged(self): + self._set_http_authorization('service') + + response = self._create_new_rems_dataset() + self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.data) + self.assertTrue(response.data.get('rems_identifier') is not None, 'rems_identifier should be returned to owner') + + self._set_http_authorization('no') + response = self.client.get(f'/rest/datasets/{response.data["id"]}') + self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) + self.assertTrue(response.data.get('rems_identifier') is None, 'rems_identifier should not be returned to Anon') + + @responses.activate + def test_rems_identifier_cannot_be_changed(self): + response = self._create_new_rems_dataset() + self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.data) + + cr = response.data + + cr['rems_identifier'] = 'some:new:identifier' + + response = self.client.put(f'/rest/datasets/{cr["id"]}', cr, format="json") + self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) + self.assertNotEqual(response.data['rems_identifier'], 'some:new:identifier', 'rems_id should not be changed') diff --git a/src/metax_api/tests/api/rest/base/views/directories/read.py b/src/metax_api/tests/api/rest/base/views/directories/read.py index 27abce57..e0760492 100644 --- a/src/metax_api/tests/api/rest/base/views/directories/read.py +++ b/src/metax_api/tests/api/rest/base/views/directories/read.py @@ -551,7 +551,7 @@ def setUp(self): def _update_token_with_project_of_directory(self, dir_id): proj = Directory.objects.get(pk=dir_id).project_identifier - self.token['group_names'].append('fairdata:IDA01:%s' % proj) + self.token['group_names'].append('IDA01:%s' % proj) self._use_http_authorization(method='bearer', token=self.token) @responses.activate diff --git a/src/metax_api/tests/api/rest/base/views/files/read.py b/src/metax_api/tests/api/rest/base/views/files/read.py index ad05ae87..1222bab6 100644 --- a/src/metax_api/tests/api/rest/base/views/files/read.py +++ b/src/metax_api/tests/api/rest/base/views/files/read.py @@ -107,6 +107,38 @@ def test_get_related_datasets_ok_2(self): self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) self._assert_results_length(response, 5) + def test_get_detailed_related_datasets_ok_1(self): + """ + File identifiers listed below should belong to 5 datasets + """ + response = self.client.post('/rest/files/datasets?detailed=true', [1], format='json') + self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) + self._assert_results_length(response, 1) + self.assertEqual(len(list(response.data.values())[0]), 3, response.data) + + def test_get_detailed_related_datasets_ok_2(self): + """ + File identifiers listed below should belong to 5 datasets + """ + file_identifiers = [1, 2, 3, 4, 5] + + response = self.client.post('/rest/files/datasets?detailed=true', file_identifiers, format='json') + self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) + self._assert_results_length(response, 5) + + # set of all returned datasets + self.assertEqual(len(set(sum(response.data.values(), []))), 5, response.data) + + # check if identifiers work + file_identifiers = ['pid:urn:1', 'pid:urn:2', 'pid:urn:3', 'pid:urn:4', 'pid:urn:5'] + + response = self.client.post('/rest/files/datasets?detailed=true', file_identifiers, format='json') + self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) + self._assert_results_length(response, 5) + + # set of all returned datasets + self.assertEqual(len(set(sum(response.data.values(), []))), 5, response.data) + def test_get_related_datasets_files_not_found(self): """ When the files themselves are not found, 404 should be returned @@ -114,6 +146,9 @@ def test_get_related_datasets_files_not_found(self): response = self.client.post('/rest/files/datasets', ['doesnotexist'], format='json') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND, response.data) + response = self.client.post('/rest/files/datasets?detailed=true', ['doesnotexist'], format='json') + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND, response.data) + def test_get_related_datasets_records_not_found(self): """ When files are found, but no records for them, an empty list should be returned @@ -126,8 +161,12 @@ def test_get_related_datasets_records_not_found(self): self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) self._assert_results_length(response, 0) + response = self.client.post('/rest/files/datasets?detailed=true', [1], format='json') + self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) + self._assert_results_length(response, 0) + def _assert_results_length(self, response, length): - self.assertEqual(isinstance(response.data, list), True, response.data) + self.assertTrue(isinstance(response.data, dict) or isinstance(response.data, list), response.data) self.assertEqual(len(response.data), length) @@ -157,7 +196,7 @@ def test_user_can_read_owned_files(self): self.assertEqual(len(response.data), 0, 'should return 200 OK, but user projects has no files') # set user to same project as previous files and try again. should now succeed - self.token['group_names'].append('fairdata:IDA01:%s' % proj) + self.token['group_names'].append('IDA01:%s' % proj) self._use_http_authorization(method='bearer', token=self.token) response = self.client.get('/rest/files') diff --git a/src/metax_api/tests/api/rest/base/views/files/write.py b/src/metax_api/tests/api/rest/base/views/files/write.py index 5bd988c9..e6b71b87 100644 --- a/src/metax_api/tests/api/rest/base/views/files/write.py +++ b/src/metax_api/tests/api/rest/base/views/files/write.py @@ -51,7 +51,7 @@ def _get_new_test_data(self): from_test_data.update({ "checksum": { "value": "habeebit", - "algorithm": "sha2", + "algorithm": "SHA-256", "checked": "2017-05-23T10:07:22.559656Z", }, "file_name": "file_name_1", @@ -175,7 +175,7 @@ def test_file_format_version_with_empty_format_version_when_file_format_has_no_v self.test_new_data['file_characteristics']['file_format'] = self.ff_without_version['input_file_format'] self.test_new_data['file_characteristics']['format_version'] = '' response = self.client.post('/rest/files', self.test_new_data, format="json") - self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.data) def test_file_format_version_with_valid_file_format_and_valid_file_version_1(self): self.test_new_data['file_characteristics']['file_format'] = self.ff_with_version['input_file_format'] @@ -268,6 +268,42 @@ def test_create_file_error_json_validation(self): self.assertEqual('Json path:' in response.data['file_characteristics'][0], True, 'The error should contain the json path') + def test_create_file_allowed_checksum_algorithm(self): + self.test_new_data['checksum']['algorithm'] = 'SHA-512' + + response = self.client.post('/rest/files', self.test_new_data, format="json") + + self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.data) + self.assertEqual(response.data['checksum']['algorithm'], 'SHA-512') + + self.test_new_data['identifier'] = 'urn:nbn:fi:csc-md5' + self.test_new_data['file_path'] = '/md5/filepath/md5-filename' + self.test_new_data['file_name'] = 'md5-filename' + self.test_new_data['checksum']['algorithm'] = 'MD5' + + response = self.client.post('/rest/files', self.test_new_data, format="json") + + self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.data) + self.assertEqual(response.data['checksum']['algorithm'], 'MD5') + + def test_create_file_not_allowed_checksum_algorithm(self): + from django.db import transaction + + for algo in ['sha2', 'sha256', 'sha-256']: + # run POST requests inside db transaction to ensure django testcase transactions + # work correctly. https://stackoverflow.com/a/23326971/1201945 this probably has + # somethind to do with the fact that POST requests to /rest/files do not normally + # execute inside a db transaction like all other requests to metax api do. see + # file_view.py for details. + # + # alternative for below would be to use optional query param ?dryrun=true, which + # causes the request to be executed inside a transaction too. + with transaction.atomic(): + self.test_new_data['checksum']['algorithm'] = algo + response = self.client.post('/rest/files', self.test_new_data, format="json") + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, response.data) + self.assertEqual('checksum_algorithm' in response.data, True) + # # create list operations # @@ -1237,7 +1273,7 @@ def test_user_cant_create_files(self): ''' # ensure user belongs to same project - self.token['group_names'].append('fairdata:IDA01:%s' % self.test_new_data['project_identifier']) + self.token['group_names'].append('IDA01:%s' % self.test_new_data['project_identifier']) self._use_http_authorization(method='bearer', token=self.token) response = self.client.post('/rest/files', self.test_new_data, format="json") @@ -1250,7 +1286,7 @@ def test_user_can_only_update_permitted_file_fields(self): ''' # ensure user belongs to same project proj = File.objects.get(pk=1).project_identifier - self.token['group_names'].append('fairdata:IDA01:%s' % proj) + self.token['group_names'].append('IDA01:%s' % proj) self._use_http_authorization(method='bearer', token=self.token) response = self.client.get('/rest/files/1', format="json") @@ -1308,7 +1344,7 @@ def test_user_can_update_files_in_their_projects(self): file = response.data['results'][0] - self.token['group_names'].append('fairdata:IDA01:%s' % proj) + self.token['group_names'].append('IDA01:%s' % proj) self._use_http_authorization(method='bearer', token=self.token) response = self.client.put('/rest/files/%s' % file['id'], file, format="json") diff --git a/src/metax_api/tests/api/rpc/base/views/statistic_rpc.py b/src/metax_api/tests/api/rpc/base/views/statistic_rpc.py index c23db183..83aa0a53 100644 --- a/src/metax_api/tests/api/rpc/base/views/statistic_rpc.py +++ b/src/metax_api/tests/api/rpc/base/views/statistic_rpc.py @@ -113,7 +113,11 @@ def _setup_testdata(self): # create legacy datacatalog dc = DataCatalog.objects.filter(catalog_json__research_dataset_schema='ida').first() dc.catalog_json['identifier'] = settings.LEGACY_CATALOGS[0] - dc_json = { "catalog_json": dc.catalog_json } + dc_json = { + "catalog_record_services_create": "testuser,api_auth_user,metax", + "catalog_record_services_edit": "testuser,api_auth_user,metax", + "catalog_json": dc.catalog_json + } response = self.client.post('/rest/datacatalogs', dc_json, format="json") self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.data) diff --git a/src/metax_api/tests/testdata/README.md b/src/metax_api/tests/testdata/README.md new file mode 100644 index 00000000..d453133b --- /dev/null +++ b/src/metax_api/tests/testdata/README.md @@ -0,0 +1,41 @@ +# Metax tests and testdata + + +## running tests + + +In order to run tests inside the vagrant vm, as metax-user, execute the following command: + +``` +runtests +``` + +The above will execute all tests, and stop execution at first failed test. + +In order to run tests selectively inside the vagrant vm, as metax-user, navigate to src/ directory in this repository, and execute: + +``` +python manage.py test metax_api.tests.api.rest.base.views.datasets --failfast +``` + +The above executes all tests in directory src/metax_api/tests/api/rest/base/views/datasets, and fails stops execution at first failed test. To run a single testcase from a particular test suite, execute: + +``` +python manage.py test metax_api.tests.api.rest.base.views.datasets.write.CatalogRecordApiWriteDatasetVersioning.test_changing_files_creates_new_dataset_version +``` + +The above executes a single specified testcase only. Very useful when developing and testing a new feature, or debugging, since you can start a debugger (e.g. ipdb) inside the testcase code, or anywhere inside the Metax application. + + +## editing templates + + +The directory src/metax_api/tests/testdata includes some templates for datasets, files, data catalogs etc, that are used when generating testdata that is used during automated tests, and are also automatically imported to a test env when it is being provisioned. Sometimes, but at this point rarely, the templates should be altered, in case some new features are introduced or significant changes are made to the system. When that happens, the entire testdata should be regenerated. Changing the templates brings a risk of breaking existing tests. + + +## editing and generating testdata + + +The directory src/metax_api/tests/testdata contains the script ``generate_test_data.py`` which unsurprisingly generates the test data. The output file is ``test_data.json``, which is loaded into the db at the beginning of each testcase. Editing the templates is one way to edit pre-generated testdata, but modifying the ``generate_test_data.py`` script is another, and in the script more detailed and specific customizations to the testdata can be made. Changing the testdata brings a risk of breaking existing test. + +Whenever testdata templates are changed, or testdata in general is re-generated, the resulting ``test_data.json`` file needs to be committed to the codebase. diff --git a/src/metax_api/tests/testdata/data_catalog_test_data_template.json b/src/metax_api/tests/testdata/data_catalog_test_data_template.json index e5bd5eb0..3dcf978f 100644 --- a/src/metax_api/tests/testdata/data_catalog_test_data_template.json +++ b/src/metax_api/tests/testdata/data_catalog_test_data_template.json @@ -102,5 +102,7 @@ "research_dataset_schema": null, "dataset_versioning": true }, - "service_created": "metax" + "service_created": "metax", + "catalog_record_services_create": "testuser,api_auth_user,metax", + "catalog_record_services_edit": "testuser,api_auth_user,metax" } \ No newline at end of file diff --git a/src/metax_api/tests/testdata/file_test_data_template.json b/src/metax_api/tests/testdata/file_test_data_template.json index 53053ff8..8e3b88e4 100644 --- a/src/metax_api/tests/testdata/file_test_data_template.json +++ b/src/metax_api/tests/testdata/file_test_data_template.json @@ -1,7 +1,7 @@ { "project_identifier": "some_project_identifier", "byte_size": 5512, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", diff --git a/src/metax_api/tests/testdata/test_data.json b/src/metax_api/tests/testdata/test_data.json index 142a647a..3f1d3390 100644 --- a/src/metax_api/tests/testdata/test_data.json +++ b/src/metax_api/tests/testdata/test_data.json @@ -28,7 +28,7 @@ { "fields": { "byte_size": 100, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -61,7 +61,7 @@ { "fields": { "byte_size": 200, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -94,7 +94,7 @@ { "fields": { "byte_size": 300, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -127,7 +127,7 @@ { "fields": { "byte_size": 400, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -160,7 +160,7 @@ { "fields": { "byte_size": 500, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -193,7 +193,7 @@ { "fields": { "byte_size": 600, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -226,7 +226,7 @@ { "fields": { "byte_size": 700, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -259,7 +259,7 @@ { "fields": { "byte_size": 800, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -292,7 +292,7 @@ { "fields": { "byte_size": 900, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -325,7 +325,7 @@ { "fields": { "byte_size": 1000, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -358,7 +358,7 @@ { "fields": { "byte_size": 1100, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -391,7 +391,7 @@ { "fields": { "byte_size": 1200, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -424,7 +424,7 @@ { "fields": { "byte_size": 1300, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -457,7 +457,7 @@ { "fields": { "byte_size": 1400, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -490,7 +490,7 @@ { "fields": { "byte_size": 1500, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -523,7 +523,7 @@ { "fields": { "byte_size": 1600, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -556,7 +556,7 @@ { "fields": { "byte_size": 1700, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -589,7 +589,7 @@ { "fields": { "byte_size": 1800, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -622,7 +622,7 @@ { "fields": { "byte_size": 1900, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -655,7 +655,7 @@ { "fields": { "byte_size": 2000, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -688,7 +688,7 @@ { "fields": { "byte_size": 2100, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -721,7 +721,7 @@ { "fields": { "byte_size": 2200, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -754,7 +754,7 @@ { "fields": { "byte_size": 2300, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -787,7 +787,7 @@ { "fields": { "byte_size": 2400, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -820,7 +820,7 @@ { "fields": { "byte_size": 2500, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -853,7 +853,7 @@ { "fields": { "byte_size": 2600, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -886,7 +886,7 @@ { "fields": { "byte_size": 2700, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -919,7 +919,7 @@ { "fields": { "byte_size": 2800, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -952,7 +952,7 @@ { "fields": { "byte_size": 2900, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -985,7 +985,7 @@ { "fields": { "byte_size": 3000, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1018,7 +1018,7 @@ { "fields": { "byte_size": 3100, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1051,7 +1051,7 @@ { "fields": { "byte_size": 3200, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1084,7 +1084,7 @@ { "fields": { "byte_size": 3300, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1117,7 +1117,7 @@ { "fields": { "byte_size": 3400, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1150,7 +1150,7 @@ { "fields": { "byte_size": 3500, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1183,7 +1183,7 @@ { "fields": { "byte_size": 3600, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1216,7 +1216,7 @@ { "fields": { "byte_size": 3700, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1249,7 +1249,7 @@ { "fields": { "byte_size": 3800, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1282,7 +1282,7 @@ { "fields": { "byte_size": 3900, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1315,7 +1315,7 @@ { "fields": { "byte_size": 4000, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1348,7 +1348,7 @@ { "fields": { "byte_size": 4100, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1381,7 +1381,7 @@ { "fields": { "byte_size": 4200, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1414,7 +1414,7 @@ { "fields": { "byte_size": 4300, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1447,7 +1447,7 @@ { "fields": { "byte_size": 4400, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1480,7 +1480,7 @@ { "fields": { "byte_size": 4500, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1513,7 +1513,7 @@ { "fields": { "byte_size": 4600, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1546,7 +1546,7 @@ { "fields": { "byte_size": 4700, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1579,7 +1579,7 @@ { "fields": { "byte_size": 4800, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1612,7 +1612,7 @@ { "fields": { "byte_size": 4900, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1645,7 +1645,7 @@ { "fields": { "byte_size": 5000, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1678,7 +1678,7 @@ { "fields": { "byte_size": 5100, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1711,7 +1711,7 @@ { "fields": { "byte_size": 5200, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1744,7 +1744,7 @@ { "fields": { "byte_size": 5300, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1777,7 +1777,7 @@ { "fields": { "byte_size": 5400, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1810,7 +1810,7 @@ { "fields": { "byte_size": 5500, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1843,7 +1843,7 @@ { "fields": { "byte_size": 5600, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1876,7 +1876,7 @@ { "fields": { "byte_size": 5700, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1909,7 +1909,7 @@ { "fields": { "byte_size": 5800, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1942,7 +1942,7 @@ { "fields": { "byte_size": 5900, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -1975,7 +1975,7 @@ { "fields": { "byte_size": 6000, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2008,7 +2008,7 @@ { "fields": { "byte_size": 6100, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2041,7 +2041,7 @@ { "fields": { "byte_size": 6200, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2074,7 +2074,7 @@ { "fields": { "byte_size": 6300, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2107,7 +2107,7 @@ { "fields": { "byte_size": 6400, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2140,7 +2140,7 @@ { "fields": { "byte_size": 6500, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2173,7 +2173,7 @@ { "fields": { "byte_size": 6600, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2206,7 +2206,7 @@ { "fields": { "byte_size": 6700, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2239,7 +2239,7 @@ { "fields": { "byte_size": 6800, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2272,7 +2272,7 @@ { "fields": { "byte_size": 6900, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2305,7 +2305,7 @@ { "fields": { "byte_size": 7000, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2338,7 +2338,7 @@ { "fields": { "byte_size": 7100, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2371,7 +2371,7 @@ { "fields": { "byte_size": 7200, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2404,7 +2404,7 @@ { "fields": { "byte_size": 7300, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2437,7 +2437,7 @@ { "fields": { "byte_size": 7400, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2470,7 +2470,7 @@ { "fields": { "byte_size": 7500, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2503,7 +2503,7 @@ { "fields": { "byte_size": 7600, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2536,7 +2536,7 @@ { "fields": { "byte_size": 7700, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2569,7 +2569,7 @@ { "fields": { "byte_size": 7800, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2602,7 +2602,7 @@ { "fields": { "byte_size": 7900, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2635,7 +2635,7 @@ { "fields": { "byte_size": 8000, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2668,7 +2668,7 @@ { "fields": { "byte_size": 8100, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2701,7 +2701,7 @@ { "fields": { "byte_size": 8200, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2734,7 +2734,7 @@ { "fields": { "byte_size": 8300, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2767,7 +2767,7 @@ { "fields": { "byte_size": 8400, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2800,7 +2800,7 @@ { "fields": { "byte_size": 8500, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2833,7 +2833,7 @@ { "fields": { "byte_size": 8600, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2866,7 +2866,7 @@ { "fields": { "byte_size": 8700, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2899,7 +2899,7 @@ { "fields": { "byte_size": 8800, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2932,7 +2932,7 @@ { "fields": { "byte_size": 8900, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2965,7 +2965,7 @@ { "fields": { "byte_size": 9000, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -2998,7 +2998,7 @@ { "fields": { "byte_size": 9100, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3031,7 +3031,7 @@ { "fields": { "byte_size": 9200, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3064,7 +3064,7 @@ { "fields": { "byte_size": 9300, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3097,7 +3097,7 @@ { "fields": { "byte_size": 9400, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3130,7 +3130,7 @@ { "fields": { "byte_size": 9500, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3163,7 +3163,7 @@ { "fields": { "byte_size": 9600, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3196,7 +3196,7 @@ { "fields": { "byte_size": 9700, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3229,7 +3229,7 @@ { "fields": { "byte_size": 9800, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3262,7 +3262,7 @@ { "fields": { "byte_size": 9900, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3295,7 +3295,7 @@ { "fields": { "byte_size": 10000, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3328,7 +3328,7 @@ { "fields": { "byte_size": 10100, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3361,7 +3361,7 @@ { "fields": { "byte_size": 10200, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3394,7 +3394,7 @@ { "fields": { "byte_size": 10300, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3427,7 +3427,7 @@ { "fields": { "byte_size": 10400, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3460,7 +3460,7 @@ { "fields": { "byte_size": 10500, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3493,7 +3493,7 @@ { "fields": { "byte_size": 10600, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3526,7 +3526,7 @@ { "fields": { "byte_size": 10700, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3559,7 +3559,7 @@ { "fields": { "byte_size": 10800, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3592,7 +3592,7 @@ { "fields": { "byte_size": 10900, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3625,7 +3625,7 @@ { "fields": { "byte_size": 11000, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3658,7 +3658,7 @@ { "fields": { "byte_size": 11100, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3691,7 +3691,7 @@ { "fields": { "byte_size": 11200, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3724,7 +3724,7 @@ { "fields": { "byte_size": 11300, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3757,7 +3757,7 @@ { "fields": { "byte_size": 11400, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3790,7 +3790,7 @@ { "fields": { "byte_size": 11500, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3823,7 +3823,7 @@ { "fields": { "byte_size": 11600, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3856,7 +3856,7 @@ { "fields": { "byte_size": 11700, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3889,7 +3889,7 @@ { "fields": { "byte_size": 11800, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3922,7 +3922,7 @@ { "fields": { "byte_size": 11900, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -3955,7 +3955,7 @@ { "fields": { "byte_size": 12000, - "checksum_algorithm": "sha2", + "checksum_algorithm": "SHA-256", "checksum_checked": "2017-06-23T14:41:59Z", "checksum_value": "habeebit", "date_created": "2017-05-23T10:07:22Z", @@ -4523,6 +4523,8 @@ "fi": "Testidatakatalogin nimi" } }, + "catalog_record_services_create": "testuser,api_auth_user,metax", + "catalog_record_services_edit": "testuser,api_auth_user,metax", "date_created": "2017-05-15T10:07:22Z", "date_modified": "2017-06-15T10:07:22Z", "service_created": "metax" @@ -4636,6 +4638,8 @@ "fi": "Testidatakatalogin nimi" } }, + "catalog_record_services_create": "testuser,api_auth_user,metax", + "catalog_record_services_edit": "testuser,api_auth_user,metax", "date_created": "2017-05-15T10:07:22Z", "date_modified": "2017-06-15T10:07:22Z", "service_created": "metax" @@ -4749,6 +4753,8 @@ "fi": "Testidatakatalogin nimi" } }, + "catalog_record_services_create": "testuser,api_auth_user,metax", + "catalog_record_services_edit": "testuser,api_auth_user,metax", "date_created": "2017-05-15T10:07:22Z", "date_modified": "2017-06-15T10:07:22Z", "service_created": "metax" @@ -4862,6 +4868,8 @@ "fi": "Testidatakatalogin nimi" } }, + "catalog_record_services_create": "testuser,api_auth_user,metax", + "catalog_record_services_edit": "testuser,api_auth_user,metax", "date_created": "2017-05-15T10:07:22Z", "date_modified": "2017-06-15T10:07:22Z", "service_created": "metax" @@ -4975,6 +4983,8 @@ "fi": "Testidatakatalogin nimi" } }, + "catalog_record_services_create": "testuser,api_auth_user,metax", + "catalog_record_services_edit": "testuser,api_auth_user,metax", "date_created": "2017-05-15T10:07:22Z", "date_modified": "2017-06-15T10:07:22Z", "service_created": "metax" @@ -5088,6 +5098,8 @@ "fi": "Testidatakatalogin nimi" } }, + "catalog_record_services_create": "testuser,api_auth_user,metax", + "catalog_record_services_edit": "testuser,api_auth_user,metax", "date_created": "2017-05-15T10:07:22Z", "date_modified": "2017-06-15T10:07:22Z", "service_created": "metax" @@ -5201,6 +5213,8 @@ "fi": "Testidatakatalogin nimi" } }, + "catalog_record_services_create": "testuser,api_auth_user,metax", + "catalog_record_services_edit": "testuser,api_auth_user,metax", "date_created": "2017-05-15T10:07:22Z", "date_modified": "2017-06-15T10:07:22Z", "service_created": "metax" @@ -5314,6 +5328,8 @@ "fi": "Testidatakatalogin nimi" } }, + "catalog_record_services_create": "testuser,api_auth_user,metax", + "catalog_record_services_edit": "testuser,api_auth_user,metax", "date_created": "2017-05-15T10:07:22Z", "date_modified": "2017-06-15T10:07:22Z", "service_created": "metax" diff --git a/src/metax_api/tests/utils.py b/src/metax_api/tests/utils.py index 9f2102fd..e05b655c 100644 --- a/src/metax_api/tests/utils.py +++ b/src/metax_api/tests/utils.py @@ -36,20 +36,16 @@ def get_json_schema(model_name): with open(path.dirname(path.realpath(__file__)) + '/../api/rest/base/schemas/%s_schema.json' % model_name) as f: return json_load(f) + def get_test_oidc_token(new_proxy=False): - token = { - "sub": "testuser123456@fairdataid", - "linkedIds": [ - "testuser123456@fairdataid", - "testuser@csc.fi", - "testuser@cscuserid" - ], + # note: leaving new_proxy param in place for now to avoid conflicts. + return { + "sub": "randomstringhere", "displayName": "Teppo Testaaja", "eppn": "testuser@csc.fi", "iss": "https://fd-auth.csc.fi", "group_names": [ - "fairdata:TAITO01", - "fairdata:TAITO01:2002013", + "TAITO01:2002013", "IDA01:2001036" ], "schacHomeOrganizationType": "urn:schac:homeOrganizationType:fi:other", @@ -64,15 +60,11 @@ def get_test_oidc_token(new_proxy=False): "exp": 1535539191, "iat": 1535535591, "family_name": "Testaaja", - "email": "teppo.testaaja@csc.fi" + "email": "teppo.testaaja@csc.fi", + "CSCUserName": "testuser", + "CSCOrgNameFi": "IT Center for Science", } - if new_proxy: - token["sub"] = "randomstringhere" - token["CSCUserName"] = "testuser" - token["CSCOrgNameFi"] = "IT Center for Science" - - return token def generate_test_identifier(itype, index, urn=True): ''' @@ -91,6 +83,7 @@ def generate_test_identifier(itype, index, urn=True): return 'urn:nbn:fi:att:%s' % (uuid[:-len(postfix)] + postfix) return uuid[:-len(postfix)] + postfix + def generate_test_token(payload): ''' While the real algorithm used in the Fairdata auth component is RS256, HS256 is the only one @@ -112,7 +105,12 @@ def create_end_user_data_catalogs(self): catalog_json = dc.catalog_json for identifier in django_settings.END_USER_ALLOWED_DATA_CATALOGS: catalog_json['identifier'] = identifier - DataCatalog.objects.create(catalog_json=catalog_json, date_created=get_tz_aware_now_without_micros()) + DataCatalog.objects.create( + catalog_json=catalog_json, + date_created=get_tz_aware_now_without_micros(), + catalog_record_services_create='testuser,api_auth_user,metax', + catalog_record_services_edit='testuser,api_auth_user,metax' + ) def _set_http_authorization(self, credentials_type): # Deactivate credentials @@ -213,8 +211,8 @@ def _create_cr_for_owner(self, pk_for_template_cr, data): self.token = get_test_oidc_token() if 'editor' in data: data.pop('editor', None) - data['user_created'] = self.token['sub'] - data['metadata_provider_user'] = self.token['sub'] + data['user_created'] = self.token['CSCUserName'] + data['metadata_provider_user'] = self.token['CSCUserName'] data['metadata_provider_org'] = self.token['schacHomeOrganization'] data['metadata_owner_org'] = self.token['schacHomeOrganization'] data['data_catalog']['identifier'] = django_settings.END_USER_ALLOWED_DATA_CATALOGS[0] @@ -334,4 +332,4 @@ def get_embargoed_cr_with_files_and_dirs_from_api_with_file_details(self, is_ava # Empty credentials to not mess up the actual test self.client.credentials() - return response.data \ No newline at end of file + return response.data diff --git a/src/metax_api/views/secure/secure_view.py b/src/metax_api/views/secure/secure_view.py index 6575b5ff..1976e227 100644 --- a/src/metax_api/views/secure/secure_view.py +++ b/src/metax_api/views/secure/secure_view.py @@ -35,39 +35,6 @@ def get(self, request, **kwargs): token_payload = json.loads(request.META['HTTP_OIDC_ID_TOKEN_PAYLOAD']) _logger.debug(token_payload) - if token_payload.get('sub', '').endswith('@fairdataid'): - return self._old_proxy(request, token_payload) - else: - return self._new_proxy(request, token_payload) - - def _old_proxy(self, request, token_payload): - linked_accounts = self._get_linked_accounts(token_payload) - - for acc in linked_accounts: - if acc.endswith('@cscuserid'): - csc_account_linked = True - break - else: - csc_account_linked = False - - json_logger.info( - event='user_login_visit', - user_id=token_payload['sub'], - org_id=token_payload['schacHomeOrganization'], - ) - - context = { - 'email': token_payload['email'], - 'linked_accounts': linked_accounts, - 'csc_account_linked': csc_account_linked, - 'token_string': request.META['HTTP_OIDC_ID_TOKEN'], - 'token_valid_until': datetime.fromtimestamp(token_payload['exp']).strftime('%Y-%m-%d %H:%M:%S'), - } - - # note: django automatically searches templates from root directory templates/ - return render(request, 'secure/auth_success.html', context=context) - - def _new_proxy(self, request, token_payload): try: json_logger.info( event='user_login_visit', @@ -92,10 +59,7 @@ def _new_proxy(self, request, token_payload): } # note: django automatically searches templates from root directory templates/ - return render(request, 'secure/auth_success_new.html', context=context) - - def _get_linked_accounts(self, token_payload): - return [ acc for acc in token_payload.get('linkedIds', []) if not acc.endswith('@fairdataid') ] + return render(request, 'secure/auth_success.html', context=context) class SecureLogoutView(TemplateView): diff --git a/swagger/swagger-yaml-to-html.py b/swagger/swagger-yaml-to-html.py index 47b31675..aac26e73 100644 --- a/swagger/swagger-yaml-to-html.py +++ b/swagger/swagger-yaml-to-html.py @@ -2,7 +2,11 @@ """ Usage: - python swagger-yaml-to-html.py < /path/to/api.yaml > doc.html +python swagger-yaml-to-html.py < swagger.yaml > index.html + +Note: The index.html file does not need to be manually updated in the repository. There is an ansible role in +metax-ops/ansible/roles/docs, which automatically generates the index.html swagger page. For development it may +be useful to manully run the above script Credits: @@ -77,5 +81,5 @@ """ -spec = yaml.load(sys.stdin) +spec = yaml.load(sys.stdin, Loader=yaml.FullLoader) sys.stdout.write(TEMPLATE % json.dumps(spec)) diff --git a/swagger/swagger.yaml b/swagger/swagger.yaml index ea592c0b..fff49b0e 100644 --- a/swagger/swagger.yaml +++ b/swagger/swagger.yaml @@ -811,7 +811,7 @@ paths: type: string - name: data_catalog in: query - description: Filter by data catalog identifier + description: Filter by data catalog urn identifier type: string - name: offset in: query