diff --git a/specifyweb/backend/export/cache.py b/specifyweb/backend/export/cache.py new file mode 100644 index 00000000000..3f4075c54b3 --- /dev/null +++ b/specifyweb/backend/export/cache.py @@ -0,0 +1,209 @@ +"""Cache table operations for DwC export pipeline.""" +import logging +import re +from django.db import connection + +from .dwca_utils import sanitize_column_name + +logger = logging.getLogger(__name__) + + +def get_cache_table_name(mapping_id, collection_id, prefix='dwc_cache'): + """Generate a safe cache table name.""" + return f'{prefix}_{mapping_id}_{collection_id}' + + +def create_cache_table(table_name, columns): + """Create a cache table with the given columns. + + columns: list of (column_name, column_type) tuples. + An auto-increment primary key is always added. + """ + safe_name = re.sub(r'[^a-zA-Z0-9_]', '', table_name) + col_defs = ', '.join( + f'`{re.sub(r"[^a-zA-Z0-9_]", "", name)}` {col_type}' + for name, col_type in columns + ) + with connection.cursor() as cursor: + cursor.execute(f'DROP TABLE IF EXISTS `{safe_name}`') + cursor.execute( + f'CREATE TABLE `{safe_name}` (' + f'`id` INT AUTO_INCREMENT PRIMARY KEY, {col_defs}' + f') ENGINE=InnoDB DEFAULT CHARSET=utf8mb4' + ) + logger.info('Created cache table %s', safe_name) + + +def drop_cache_table(table_name): + """Drop a cache table if it exists.""" + safe_name = re.sub(r'[^a-zA-Z0-9_]', '', table_name) + with connection.cursor() as cursor: + cursor.execute(f'DROP TABLE IF EXISTS `{safe_name}`') + logger.info('Dropped cache table %s', safe_name) + + +def build_cache_tables(export_dataset, user=None, progress_callback=None): + """Build cache tables for an ExportDataSet's core mapping and all extensions.""" + core_mapping = export_dataset.coremapping + collection = export_dataset.collection + + _build_single_cache(core_mapping, collection, user=user, + progress_callback=progress_callback) + + for ext in export_dataset.extensions.all().order_by('sortorder').iterator(chunk_size=2000): + _build_single_cache(ext.schemamapping, collection, + prefix=f'dwc_cache_ext{ext.sortorder}', + user=user, progress_callback=progress_callback) + + +def _build_single_cache(mapping, collection, prefix='dwc_cache', user=None, + progress_callback=None): + """Build a single cache table for one SchemaMapping.""" + from .models import CacheTableMeta + from django.utils import timezone + + table_name = get_cache_table_name(mapping.id, collection.id, prefix) + + meta, _ = CacheTableMeta.objects.update_or_create( + schemamapping=mapping, + defaults={'tablename': table_name, 'buildstatus': 'building'} + ) + + try: + display_fields = [ + f for f in mapping.query.fields.order_by('position') + if getattr(f, 'term', None) + ] + + columns = [ + (sanitize_column_name(f.term), _infer_column_type(f)) + for f in display_fields + ] + + create_cache_table(table_name, columns) + + rowcount = _execute_and_populate( + table_name, mapping, collection, user, progress_callback + ) + + meta.buildstatus = 'idle' + meta.lastbuilt = timezone.now() + meta.rowcount = rowcount + meta.save() + + logger.info('Cache table %s built with %d rows', table_name, rowcount) + + except Exception: + meta.buildstatus = 'error' + meta.save() + logger.exception('Failed to build cache table %s', table_name) + raise + + +def _execute_and_populate(table_name, mapping, collection, user, progress_callback=None): + """Execute a mapping's query and INSERT results into the cache table. + + Uses SQLAlchemy build_query() to ensure output matches query_to_csv + (date formatting, null replacement, etc.), then batch-INSERTs rows. + + Returns the number of rows inserted. + """ + from specifyweb.backend.stored_queries.execution import ( + build_query, BuildQueryProps, set_group_concat_max_len, + apply_special_post_query_processing, + ) + from specifyweb.backend.stored_queries.queryfield import QueryField + from specifyweb.backend.stored_queries.models import session_context + from .field_adapter import EphemeralFieldAdapter + + query_obj = mapping.query + display_fields = [ + f for f in query_obj.fields.order_by('position') + if getattr(f, 'term', None) + ] + field_specs = [ + QueryField.from_spqueryfield(EphemeralFieldAdapter(f, force_display=True)) + for f in display_fields + ] + + safe_name = re.sub(r'[^a-zA-Z0-9_]', '', table_name) + col_count = len(display_fields) + placeholders = ', '.join(['%s'] * col_count) + col_names = ', '.join( + f'`{sanitize_column_name(f.term)}`' + for f in display_fields + ) + insert_sql = f'INSERT INTO `{safe_name}` ({col_names}) VALUES ({placeholders})' + + total = 0 + BATCH_SIZE = 2000 + + with session_context() as session: + set_group_concat_max_len(session.connection()) + sa_query, _ = build_query( + session, collection, user, + query_obj.contexttableid, + field_specs, + BuildQueryProps( + replace_nulls=True, + date_format_override='%Y-%m-%d', + ), + ) + sa_query = apply_special_post_query_processing( + sa_query, query_obj.contexttableid, field_specs, collection, user, + should_list_query=False, + ) + + batch = [] + if isinstance(sa_query, list): + iterator = iter(sa_query) + else: + iterator = sa_query.yield_per(BATCH_SIZE) + + for row in iterator: + batch.append(tuple( + str(v) if v is not None else '' for v in row[1:] + )) + + if len(batch) >= BATCH_SIZE: + with connection.cursor() as cursor: + cursor.executemany(insert_sql, batch) + total += len(batch) + batch = [] + if progress_callback: + progress_callback(total, None) + + if batch: + with connection.cursor() as cursor: + cursor.executemany(insert_sql, batch) + total += len(batch) + + if progress_callback: + progress_callback(total, total) + + return total + + +def _infer_column_type(spqueryfield): + """Infer a MySQL column type from a Specify query field.""" + fname = (spqueryfield.fieldname or '').lower() + + if 'guid' in fname or 'uuid' in fname: + return 'VARCHAR(256)' + if fname in ('id', 'rankid', 'number1', 'number2', 'countamt', + 'sortorder', 'position', 'version'): + return 'INT' + if 'numericyear' in fname or 'numericmonth' in fname or 'numericday' in fname: + return 'INT' + if fname in ('latitude1', 'latitude2', 'longitude1', 'longitude2', + 'latlongaccuracy', 'maxelevation', 'minelevation'): + return 'DECIMAL(12,6)' + if fname in ('startdate', 'enddate', 'determineddate', 'catalogeddate', + 'timestampcreated', 'timestampmodified'): + return 'VARCHAR(32)' + if fname.startswith('is') or fname.startswith('yes'): + return 'VARCHAR(8)' + if fname in ('catalognumber', 'altcatalognumber', 'barcode', 'fieldnumber', + 'code', 'abbreviation', 'datum'): + return 'VARCHAR(256)' + return 'TEXT' diff --git a/specifyweb/backend/export/dwca_utils.py b/specifyweb/backend/export/dwca_utils.py new file mode 100644 index 00000000000..4314b8bb32f --- /dev/null +++ b/specifyweb/backend/export/dwca_utils.py @@ -0,0 +1,131 @@ +"""Shared utilities for DwC archive generation.""" +import re +from datetime import date +from uuid import uuid4 +from xml.etree import ElementTree as ET + + +def sanitize_term_name(term_iri): + """Extract the short name from a DwC term IRI. + + 'http://rs.tdwg.org/dwc/terms/catalogNumber' -> 'catalogNumber' + 'http://purl.org/dc/terms/type' -> 'type' + """ + if '/' in term_iri: + term_iri = term_iri.rsplit('/', 1)[-1] + if '#' in term_iri: + term_iri = term_iri.rsplit('#', 1)[-1] + return term_iri + + +def sanitize_column_name(name): + """Sanitize a term IRI into a valid MySQL column name.""" + name = sanitize_term_name(name) + name = re.sub(r'[^a-zA-Z0-9_]', '_', name) + return name[:64] + + +# Known extension rowType URIs +EXTENSION_ROW_TYPES = { + 'MeasurementOrFact': 'http://rs.iobis.org/obis/terms/ExtendedMeasurementOrFact', + 'ResourceRelationship': 'http://rs.tdwg.org/dwc/terms/ResourceRelationship', + 'Identification': 'http://rs.tdwg.org/dwc/terms/Identification', + 'Multimedia': 'http://rs.gbif.org/terms/1.0/Multimedia', +} + + +def build_meta_xml(core_terms, ext_info_list): + """Build meta.xml describing the DwC archive structure. + + core_terms: list of full term IRIs for the core file + ext_info_list: list of dicts with 'filename' and 'terms' (full IRIs) + """ + archive = ET.Element('archive') + archive.set('xmlns', 'http://rs.tdwg.org/dwc/text/') + archive.set('metadata', 'eml.xml') + + # Core + core = ET.SubElement(archive, 'core') + core.set('encoding', 'UTF-8') + core.set('fieldsTerminatedBy', ',') + core.set('linesTerminatedBy', '\\n') + core.set('fieldsEnclosedBy', '"') + core.set('ignoreHeaderLines', '1') + core.set('rowType', 'http://rs.tdwg.org/dwc/terms/Occurrence') + + files = ET.SubElement(core, 'files') + location = ET.SubElement(files, 'location') + location.text = 'occurrence.csv' + + if core_terms: + id_elem = ET.SubElement(core, 'id') + id_elem.set('index', '0') + + for idx, term_iri in enumerate(core_terms): + f = ET.SubElement(core, 'field') + f.set('index', str(idx)) + f.set('term', term_iri) + + # Extensions + for ext in ext_info_list: + extension = ET.SubElement(archive, 'extension') + extension.set('encoding', 'UTF-8') + extension.set('fieldsTerminatedBy', ',') + extension.set('linesTerminatedBy', '\\n') + extension.set('fieldsEnclosedBy', '"') + extension.set('ignoreHeaderLines', '1') + row_type = ext.get('rowType', 'http://rs.tdwg.org/dwc/terms/MeasurementOrFact') + extension.set('rowType', row_type) + + files = ET.SubElement(extension, 'files') + location = ET.SubElement(files, 'location') + location.text = ext['filename'] + + coreid = ET.SubElement(extension, 'coreid') + coreid.set('index', '0') + + for idx, term_iri in enumerate(ext['terms']): + f = ET.SubElement(extension, 'field') + f.set('index', str(idx)) + f.set('term', term_iri) + + return ET.tostring(archive, encoding='unicode', xml_declaration=True) + + +def build_eml_xml(export_dataset): + """Build EML metadata. Returns custom EML if uploaded, else generates minimal EML.""" + if export_dataset.metadata: + try: + from specifyweb.specify.models import Spappresourcedata + data = Spappresourcedata.objects.filter( + spappresource=export_dataset.metadata + ).first() + if data and data.data: + content = data.data + if isinstance(content, bytes): + content = content.decode('utf-8') + return content + except Exception: + pass + + eml = ET.Element('eml:eml') + eml.set('xmlns:eml', 'eml://ecoinformatics.org/eml-2.1.1') + eml.set('packageId', str(uuid4())) + eml.set('system', 'http://specify.org') + + dataset = ET.SubElement(eml, 'dataset') + title = ET.SubElement(dataset, 'title') + title.text = export_dataset.exportname + + creator = ET.SubElement(dataset, 'creator') + org = ET.SubElement(creator, 'organizationName') + org.text = 'Specify Collection' + + pubdate = ET.SubElement(dataset, 'pubDate') + pubdate.text = date.today().strftime('%Y-%m-%d') + + abstract = ET.SubElement(dataset, 'abstract') + para = ET.SubElement(abstract, 'para') + para.text = f'Darwin Core Archive export: {export_dataset.exportname}' + + return ET.tostring(eml, encoding='unicode', xml_declaration=True) diff --git a/specifyweb/backend/export/migrations/0001_cachetablemeta.py b/specifyweb/backend/export/migrations/0001_cachetablemeta.py new file mode 100644 index 00000000000..34561616aca --- /dev/null +++ b/specifyweb/backend/export/migrations/0001_cachetablemeta.py @@ -0,0 +1,45 @@ +from django.db import migrations, models +import django.db.models.deletion +import django.utils.timezone + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('specify', '0048_extensions_and_vocabulary'), + ] + + operations = [ + migrations.CreateModel( + name='CacheTableMeta', + fields=[ + ('id', models.AutoField(db_column='CacheTableMetaID', primary_key=True, serialize=False)), + ('tablename', models.CharField(db_column='TableName', max_length=128, unique=True)), + ('lastbuilt', models.DateTimeField(blank=True, db_column='LastBuilt', null=True)), + ('rowcount', models.IntegerField(blank=True, db_column='RowCount', null=True)), + ('buildstatus', models.CharField( + choices=[('idle', 'idle'), ('building', 'building'), ('error', 'error')], + db_column='BuildStatus', default='idle', max_length=16, + )), + ('builderror', models.TextField(blank=True, db_column='BuildError', null=True)), + ('timestampcreated', models.DateTimeField(db_column='TimestampCreated', default=django.utils.timezone.now)), + ('timestampmodified', models.DateTimeField(db_column='TimestampModified', default=django.utils.timezone.now)), + ('collection', models.ForeignKey( + db_column='CollectionID', + on_delete=django.db.models.deletion.CASCADE, + related_name='+', to='specify.collection', + )), + ('schemamapping', models.ForeignKey( + db_column='SchemaMappingID', + on_delete=django.db.models.deletion.CASCADE, + related_name='cachetablemetas', to='specify.schemamapping', + )), + ], + options={ + 'db_table': 'cachetablemeta', + 'indexes': [models.Index(fields=['schemamapping', 'collection'], name='CacheMetaMappingColIDX')], + }, + ), + ] diff --git a/specifyweb/backend/export/migrations/__init__.py b/specifyweb/backend/export/migrations/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/specifyweb/backend/export/models.py b/specifyweb/backend/export/models.py index e69de29bb2d..26ebe2f1d70 100644 --- a/specifyweb/backend/export/models.py +++ b/specifyweb/backend/export/models.py @@ -0,0 +1,48 @@ +"""Models for the export app. + +The core DwC mapping/dataset/extension tables live on the main `specify` +app (added in upstream PRs #7873/#7874/#7877 and #7746). This module +re-exports them under PascalCase aliases for use throughout this package +and adds the cache-tracking model that's specific to the cache engine. +""" +from django.db import models +from django.utils import timezone + +from specifyweb.specify.models import ( + Schemamapping as SchemaMapping, + Exportdataset as ExportDataSet, + Exportdatasetextension as ExportDataSetExtension, +) + +__all__ = ['SchemaMapping', 'ExportDataSet', 'ExportDataSetExtension', 'CacheTableMeta'] + + +class CacheTableMeta(models.Model): + """Tracks build state and metadata for cache tables backing DwC exports.""" + + id = models.AutoField(primary_key=True, db_column='CacheTableMetaID') + + schemamapping = models.ForeignKey( + 'specify.Schemamapping', db_column='SchemaMappingID', + related_name='cachetablemetas', null=False, on_delete=models.CASCADE, + ) + collection = models.ForeignKey( + 'specify.Collection', db_column='CollectionID', + related_name='+', null=False, on_delete=models.CASCADE, + ) + tablename = models.CharField(max_length=128, unique=True, db_column='TableName') + lastbuilt = models.DateTimeField(blank=True, null=True, db_column='LastBuilt') + rowcount = models.IntegerField(blank=True, null=True, db_column='RowCount') + buildstatus = models.CharField( + max_length=16, default='idle', db_column='BuildStatus', + choices=[('idle', 'idle'), ('building', 'building'), ('error', 'error')], + ) + builderror = models.TextField(blank=True, null=True, db_column='BuildError') + timestampcreated = models.DateTimeField(default=timezone.now, db_column='TimestampCreated') + timestampmodified = models.DateTimeField(default=timezone.now, db_column='TimestampModified') + + class Meta: + db_table = 'cachetablemeta' + indexes = [ + models.Index(fields=['schemamapping', 'collection'], name='CacheMetaMappingColIDX'), + ] diff --git a/specifyweb/backend/export/schema_terms.json b/specifyweb/backend/export/schema_terms.json new file mode 100644 index 00000000000..ffd9635a496 --- /dev/null +++ b/specifyweb/backend/export/schema_terms.json @@ -0,0 +1,526 @@ +{ + "vocabularies": { + "dwc": { + "name": "Darwin Core", + "abbreviation": "dwc", + "vocabularyURI": "http://rs.tdwg.org/dwc/terms/", + "description": "Darwin Core standard terms for biodiversity data", + "terms": { + "http://rs.tdwg.org/dwc/terms/occurrenceID": { + "name": "occurrenceID", + "description": "An identifier for the Occurrence", + "group": "Occurrence", + "mappingPaths": [["CollectionObject", "guid"]] + }, + "http://rs.tdwg.org/dwc/terms/catalogNumber": { + "name": "catalogNumber", + "description": "An identifier for the record within the data set or collection", + "group": "Occurrence", + "mappingPaths": [["CollectionObject", "catalogNumber"]] + }, + "http://rs.tdwg.org/dwc/terms/recordedBy": { + "name": "recordedBy", + "description": "A person, group, or organization responsible for recording the original Occurrence", + "group": "Occurrence", + "mappingPaths": [["CollectionObject", "collectingEvent", "collectors", "agent", "lastName"]] + }, + "http://rs.tdwg.org/dwc/terms/individualCount": { + "name": "individualCount", + "description": "The number of individuals present at the time of the Occurrence", + "group": "Occurrence", + "mappingPaths": [["CollectionObject", "countAmt"]] + }, + "http://rs.tdwg.org/dwc/terms/sex": { + "name": "sex", + "description": "The sex of the biological individual(s) represented in the Occurrence", + "group": "Occurrence", + "mappingPaths": [] + }, + "http://rs.tdwg.org/dwc/terms/lifeStage": { + "name": "lifeStage", + "description": "The age class or life stage of the Organism(s) at the time the Occurrence was recorded", + "group": "Occurrence", + "mappingPaths": [] + }, + "http://rs.tdwg.org/dwc/terms/reproductiveCondition": { + "name": "reproductiveCondition", + "description": "The reproductive condition of the biological individual(s) represented in the Occurrence", + "group": "Occurrence", + "mappingPaths": [] + }, + "http://rs.tdwg.org/dwc/terms/behavior": { + "name": "behavior", + "description": "The behavior shown by the subject at the time the Occurrence was recorded", + "group": "Occurrence", + "mappingPaths": [] + }, + "http://rs.tdwg.org/dwc/terms/preparations": { + "name": "preparations", + "description": "A list of preparations and preservation methods for a specimen", + "group": "Occurrence", + "mappingPaths": [["CollectionObject", "preparations", "prepType", "name"]] + }, + "http://rs.tdwg.org/dwc/terms/disposition": { + "name": "disposition", + "description": "The current state of a specimen with respect to the collection identified in collectionCode", + "group": "Occurrence", + "mappingPaths": [] + }, + "http://rs.tdwg.org/dwc/terms/otherCatalogNumbers": { + "name": "otherCatalogNumbers", + "description": "A list of previous or alternate fully qualified catalog numbers", + "group": "Occurrence", + "mappingPaths": [["CollectionObject", "altCatalogNumber"]] + }, + "http://rs.tdwg.org/dwc/terms/occurrenceRemarks": { + "name": "occurrenceRemarks", + "description": "Comments or notes about the Occurrence", + "group": "Occurrence", + "mappingPaths": [["CollectionObject", "remarks"]] + }, + "http://rs.tdwg.org/dwc/terms/eventDate": { + "name": "eventDate", + "description": "The date-time or interval during which an Event occurred", + "group": "Event", + "mappingPaths": [["CollectionObject", "collectingEvent", "startDate"]] + }, + "http://rs.tdwg.org/dwc/terms/eventTime": { + "name": "eventTime", + "description": "The time or interval during which an Event occurred", + "group": "Event", + "mappingPaths": [["CollectionObject", "collectingEvent", "startTime"]] + }, + "http://rs.tdwg.org/dwc/terms/startDayOfYear": { + "name": "startDayOfYear", + "description": "The earliest integer day of the year on which the Event occurred", + "group": "Event", + "mappingPaths": [["CollectionObject", "collectingEvent", "startDateNumericDay"]] + }, + "http://rs.tdwg.org/dwc/terms/endDayOfYear": { + "name": "endDayOfYear", + "description": "The latest integer day of the year on which the Event occurred", + "group": "Event", + "mappingPaths": [["CollectionObject", "collectingEvent", "endDateNumericDay"]] + }, + "http://rs.tdwg.org/dwc/terms/year": { + "name": "year", + "description": "The four-digit year in which the Event occurred", + "group": "Event", + "mappingPaths": [["CollectionObject", "collectingEvent", "startDateNumericYear"]] + }, + "http://rs.tdwg.org/dwc/terms/month": { + "name": "month", + "description": "The integer month in which the Event occurred", + "group": "Event", + "mappingPaths": [["CollectionObject", "collectingEvent", "startDateNumericMonth"]] + }, + "http://rs.tdwg.org/dwc/terms/day": { + "name": "day", + "description": "The integer day of the month on which the Event occurred", + "group": "Event", + "mappingPaths": [["CollectionObject", "collectingEvent", "startDateNumericDay"]] + }, + "http://rs.tdwg.org/dwc/terms/verbatimEventDate": { + "name": "verbatimEventDate", + "description": "The verbatim original representation of the date and time information for an Event", + "group": "Event", + "mappingPaths": [["CollectionObject", "collectingEvent", "verbatimDate"]] + }, + "http://rs.tdwg.org/dwc/terms/habitat": { + "name": "habitat", + "description": "A category or description of the habitat in which the Event occurred", + "group": "Event", + "mappingPaths": [["CollectionObject", "collectingEvent", "collectingEventAttribute", "text1"]] + }, + "http://rs.tdwg.org/dwc/terms/samplingProtocol": { + "name": "samplingProtocol", + "description": "The names of, references to, or descriptions of the methods or protocols used during an Event", + "group": "Event", + "mappingPaths": [["CollectionObject", "collectingEvent", "method"]] + }, + "http://rs.tdwg.org/dwc/terms/fieldNumber": { + "name": "fieldNumber", + "description": "An identifier given to the event in the field", + "group": "Event", + "mappingPaths": [["CollectionObject", "fieldNumber"]] + }, + "http://rs.tdwg.org/dwc/terms/eventRemarks": { + "name": "eventRemarks", + "description": "Comments or notes about the Event", + "group": "Event", + "mappingPaths": [["CollectionObject", "collectingEvent", "remarks"]] + }, + "http://rs.tdwg.org/dwc/terms/continent": { + "name": "continent", + "description": "The name of the continent in which the Location occurs", + "group": "Location", + "mappingPaths": [["CollectionObject", "collectingEvent", "locality", "geography", "continent"]] + }, + "http://rs.tdwg.org/dwc/terms/country": { + "name": "country", + "description": "The name of the country or major administrative unit in which the Location occurs", + "group": "Location", + "mappingPaths": [["CollectionObject", "collectingEvent", "locality", "geography", "country"]] + }, + "http://rs.tdwg.org/dwc/terms/countryCode": { + "name": "countryCode", + "description": "The standard code for the country in which the Location occurs", + "group": "Location", + "mappingPaths": [] + }, + "http://rs.tdwg.org/dwc/terms/stateProvince": { + "name": "stateProvince", + "description": "The name of the next smaller administrative region than country", + "group": "Location", + "mappingPaths": [["CollectionObject", "collectingEvent", "locality", "geography", "state"]] + }, + "http://rs.tdwg.org/dwc/terms/county": { + "name": "county", + "description": "The full, unabbreviated name of the next smaller administrative region than stateProvince", + "group": "Location", + "mappingPaths": [["CollectionObject", "collectingEvent", "locality", "geography", "county"]] + }, + "http://rs.tdwg.org/dwc/terms/municipality": { + "name": "municipality", + "description": "The full, unabbreviated name of the next smaller administrative region than county", + "group": "Location", + "mappingPaths": [] + }, + "http://rs.tdwg.org/dwc/terms/locality": { + "name": "locality", + "description": "The specific description of the place", + "group": "Location", + "mappingPaths": [["CollectionObject", "collectingEvent", "locality", "localityName"]] + }, + "http://rs.tdwg.org/dwc/terms/verbatimLocality": { + "name": "verbatimLocality", + "description": "The original textual description of the place", + "group": "Location", + "mappingPaths": [["CollectionObject", "collectingEvent", "locality", "verbatimLocality"]] + }, + "http://rs.tdwg.org/dwc/terms/minimumElevationInMeters": { + "name": "minimumElevationInMeters", + "description": "The lower limit of the range of elevation", + "group": "Location", + "mappingPaths": [["CollectionObject", "collectingEvent", "locality", "minElevation"]] + }, + "http://rs.tdwg.org/dwc/terms/maximumElevationInMeters": { + "name": "maximumElevationInMeters", + "description": "The upper limit of the range of elevation", + "group": "Location", + "mappingPaths": [["CollectionObject", "collectingEvent", "locality", "maxElevation"]] + }, + "http://rs.tdwg.org/dwc/terms/decimalLatitude": { + "name": "decimalLatitude", + "description": "The geographic latitude in decimal degrees of the geographic center of a Location", + "group": "Location", + "mappingPaths": [["CollectionObject", "collectingEvent", "locality", "latitude1"]] + }, + "http://rs.tdwg.org/dwc/terms/decimalLongitude": { + "name": "decimalLongitude", + "description": "The geographic longitude in decimal degrees of the geographic center of a Location", + "group": "Location", + "mappingPaths": [["CollectionObject", "collectingEvent", "locality", "longitude1"]] + }, + "http://rs.tdwg.org/dwc/terms/geodeticDatum": { + "name": "geodeticDatum", + "description": "The ellipsoid, geodetic datum, or spatial reference system used in decimalLatitude and decimalLongitude", + "group": "Location", + "mappingPaths": [["CollectionObject", "collectingEvent", "locality", "datum"]] + }, + "http://rs.tdwg.org/dwc/terms/coordinateUncertaintyInMeters": { + "name": "coordinateUncertaintyInMeters", + "description": "The horizontal distance from the given decimalLatitude and decimalLongitude describing the smallest circle containing the whole of the Location", + "group": "Location", + "mappingPaths": [["CollectionObject", "collectingEvent", "locality", "latLongAccuracy"]] + }, + "http://rs.tdwg.org/dwc/terms/verbatimCoordinates": { + "name": "verbatimCoordinates", + "description": "The verbatim original spatial coordinates of the Location", + "group": "Location", + "mappingPaths": [] + }, + "http://rs.tdwg.org/dwc/terms/verbatimLatitude": { + "name": "verbatimLatitude", + "description": "The verbatim original latitude of the Location", + "group": "Location", + "mappingPaths": [["CollectionObject", "collectingEvent", "locality", "verbatimLatitude"]] + }, + "http://rs.tdwg.org/dwc/terms/verbatimLongitude": { + "name": "verbatimLongitude", + "description": "The verbatim original longitude of the Location", + "group": "Location", + "mappingPaths": [["CollectionObject", "collectingEvent", "locality", "verbatimLongitude"]] + }, + "http://rs.tdwg.org/dwc/terms/kingdom": { + "name": "kingdom", + "description": "The full scientific name of the kingdom in which the taxon is classified", + "group": "Taxon", + "mappingPaths": [["CollectionObject", "determinations", "taxon", "kingdom"]] + }, + "http://rs.tdwg.org/dwc/terms/phylum": { + "name": "phylum", + "description": "The full scientific name of the phylum in which the taxon is classified", + "group": "Taxon", + "mappingPaths": [["CollectionObject", "determinations", "taxon", "phylum"]] + }, + "http://rs.tdwg.org/dwc/terms/class": { + "name": "class", + "description": "The full scientific name of the class in which the taxon is classified", + "group": "Taxon", + "mappingPaths": [["CollectionObject", "determinations", "taxon", "class"]] + }, + "http://rs.tdwg.org/dwc/terms/order": { + "name": "order", + "description": "The full scientific name of the order in which the taxon is classified", + "group": "Taxon", + "mappingPaths": [["CollectionObject", "determinations", "taxon", "order"]] + }, + "http://rs.tdwg.org/dwc/terms/family": { + "name": "family", + "description": "The full scientific name of the family in which the taxon is classified", + "group": "Taxon", + "mappingPaths": [["CollectionObject", "determinations", "taxon", "family"]] + }, + "http://rs.tdwg.org/dwc/terms/genus": { + "name": "genus", + "description": "The full scientific name of the genus in which the taxon is classified", + "group": "Taxon", + "mappingPaths": [["CollectionObject", "determinations", "taxon", "genus"]] + }, + "http://rs.tdwg.org/dwc/terms/specificEpithet": { + "name": "specificEpithet", + "description": "The name of the first or species epithet of the scientificName", + "group": "Taxon", + "mappingPaths": [["CollectionObject", "determinations", "taxon", "species"]] + }, + "http://rs.tdwg.org/dwc/terms/infraspecificEpithet": { + "name": "infraspecificEpithet", + "description": "The name of the lowest or terminal infraspecific epithet of the scientificName", + "group": "Taxon", + "mappingPaths": [["CollectionObject", "determinations", "taxon", "subspecies"]] + }, + "http://rs.tdwg.org/dwc/terms/taxonRank": { + "name": "taxonRank", + "description": "The taxonomic rank of the most specific name in the scientificName", + "group": "Taxon", + "mappingPaths": [["CollectionObject", "determinations", "taxon", "rankId"]] + }, + "http://rs.tdwg.org/dwc/terms/scientificName": { + "name": "scientificName", + "description": "The full scientific name, with authorship and date information if known", + "group": "Taxon", + "mappingPaths": [["CollectionObject", "determinations", "taxon", "fullName"]] + }, + "http://rs.tdwg.org/dwc/terms/scientificNameAuthorship": { + "name": "scientificNameAuthorship", + "description": "The authorship information for the scientificName", + "group": "Taxon", + "mappingPaths": [["CollectionObject", "determinations", "taxon", "author"]] + }, + "http://rs.tdwg.org/dwc/terms/vernacularName": { + "name": "vernacularName", + "description": "A common or vernacular name", + "group": "Taxon", + "mappingPaths": [["CollectionObject", "determinations", "taxon", "commonName"]] + }, + "http://rs.tdwg.org/dwc/terms/higherClassification": { + "name": "higherClassification", + "description": "A list of taxa names terminating at the rank immediately superior to the referenced taxon", + "group": "Taxon", + "mappingPaths": [] + }, + "http://rs.tdwg.org/dwc/terms/taxonomicStatus": { + "name": "taxonomicStatus", + "description": "The status of the use of the scientificName as a label for a taxon", + "group": "Taxon", + "mappingPaths": [] + }, + "http://rs.tdwg.org/dwc/terms/basisOfRecord": { + "name": "basisOfRecord", + "description": "The specific nature of the data record", + "group": "Record", + "mappingPaths": [] + }, + "http://rs.tdwg.org/dwc/terms/institutionCode": { + "name": "institutionCode", + "description": "The name or acronym in use by the institution having custody of the object(s) or information referred to in the record", + "group": "Record", + "mappingPaths": [["CollectionObject", "collection", "institution", "code"]] + }, + "http://rs.tdwg.org/dwc/terms/collectionCode": { + "name": "collectionCode", + "description": "The name, acronym, coden, or initialism identifying the collection or data set from which the record was derived", + "group": "Record", + "mappingPaths": [["CollectionObject", "collection", "code"]] + }, + "http://rs.tdwg.org/dwc/terms/datasetName": { + "name": "datasetName", + "description": "The name identifying the data set from which the record was derived", + "group": "Record", + "mappingPaths": [] + }, + "http://rs.tdwg.org/dwc/terms/ownerInstitutionCode": { + "name": "ownerInstitutionCode", + "description": "The name or acronym in use by the institution having ownership of the object(s) or information referred to in the record", + "group": "Record", + "mappingPaths": [] + }, + "http://rs.tdwg.org/dwc/terms/informationWithheld": { + "name": "informationWithheld", + "description": "Additional information that exists, but that has not been shared in the given record", + "group": "Record", + "mappingPaths": [] + }, + "http://rs.tdwg.org/dwc/terms/dataGeneralizations": { + "name": "dataGeneralizations", + "description": "Actions taken to make the shared data less specific or complete than in its original form", + "group": "Record", + "mappingPaths": [] + }, + "http://rs.tdwg.org/dwc/terms/identifiedBy": { + "name": "identifiedBy", + "description": "A person, group, or organization who assigned the Taxon to the subject", + "group": "Identification", + "mappingPaths": [["CollectionObject", "determinations", "determiner", "lastName"]] + }, + "http://rs.tdwg.org/dwc/terms/dateIdentified": { + "name": "dateIdentified", + "description": "The date on which the subject was determined as representing the Taxon", + "group": "Identification", + "mappingPaths": [["CollectionObject", "determinations", "determinedDate"]] + }, + "http://rs.tdwg.org/dwc/terms/identificationRemarks": { + "name": "identificationRemarks", + "description": "Comments or notes about the Identification", + "group": "Identification", + "mappingPaths": [["CollectionObject", "determinations", "remarks"]] + }, + "http://rs.tdwg.org/dwc/terms/typeStatus": { + "name": "typeStatus", + "description": "A list of nomenclatural types applied to the subject", + "group": "Identification", + "mappingPaths": [["CollectionObject", "determinations", "typeStatusName"]] + }, + "http://rs.tdwg.org/dwc/terms/associatedMedia": { + "name": "associatedMedia", + "description": "A list of identifiers of media associated with the Occurrence", + "group": "Occurrence", + "mappingPaths": [] + }, + "http://rs.tdwg.org/dwc/terms/associatedReferences": { + "name": "associatedReferences", + "description": "A list of identifiers of literature associated with the Occurrence", + "group": "Occurrence", + "mappingPaths": [] + }, + "http://rs.tdwg.org/dwc/terms/associatedSequences": { + "name": "associatedSequences", + "description": "A list of identifiers of genetic sequence information associated with the Occurrence", + "group": "Occurrence", + "mappingPaths": [] + }, + "http://rs.tdwg.org/dwc/terms/associatedTaxa": { + "name": "associatedTaxa", + "description": "A list of identifiers or names of taxa and the associations of this Occurrence to each of them", + "group": "Occurrence", + "mappingPaths": [] + } + } + }, + "dc": { + "name": "Dublin Core", + "abbreviation": "dc", + "vocabularyURI": "http://purl.org/dc/terms/", + "description": "Dublin Core metadata terms", + "terms": { + "http://purl.org/dc/terms/type": { + "name": "type", + "description": "The nature or genre of the resource", + "group": "Record", + "mappingPaths": [] + }, + "http://purl.org/dc/terms/modified": { + "name": "modified", + "description": "The most recent date-time on which the resource was changed", + "group": "Record", + "mappingPaths": [["CollectionObject", "timestampModified"]] + }, + "http://purl.org/dc/terms/language": { + "name": "language", + "description": "A language of the resource", + "group": "Record", + "mappingPaths": [] + }, + "http://purl.org/dc/terms/license": { + "name": "license", + "description": "A legal document giving official permission to do something with the resource", + "group": "Record", + "mappingPaths": [] + }, + "http://purl.org/dc/terms/rightsHolder": { + "name": "rightsHolder", + "description": "A person or organization owning or managing rights over the resource", + "group": "Record", + "mappingPaths": [] + }, + "http://purl.org/dc/terms/accessRights": { + "name": "accessRights", + "description": "Information about who can access the resource or an indication of its security status", + "group": "Record", + "mappingPaths": [] + }, + "http://purl.org/dc/terms/bibliographicCitation": { + "name": "bibliographicCitation", + "description": "A bibliographic reference for the resource", + "group": "Record", + "mappingPaths": [] + }, + "http://purl.org/dc/terms/references": { + "name": "references", + "description": "A related resource that is referenced, cited, or otherwise pointed to by the described resource", + "group": "Record", + "mappingPaths": [] + } + } + }, + "ac": { + "name": "Audiovisual Core", + "abbreviation": "ac", + "vocabularyURI": "http://rs.tdwg.org/ac/terms/", + "description": "Audiovisual Core terms for multimedia resources", + "terms": { + "http://rs.tdwg.org/ac/terms/accessURI": { + "name": "accessURI", + "description": "A URI that uniquely identifies a service that provides a representation of the underlying resource", + "group": "Media", + "mappingPaths": [["CollectionObject", "collectionObjectAttachments", "attachment", "attachmentLocation"]] + }, + "http://purl.org/dc/terms/format": { + "name": "format", + "description": "The file format, physical medium, or dimensions of the resource", + "group": "Media", + "mappingPaths": [["CollectionObject", "collectionObjectAttachments", "attachment", "mimeType"]] + }, + "http://rs.tdwg.org/ac/terms/subtype": { + "name": "subtype", + "description": "Any type term from the vocabulary of types used that further refines the media type", + "group": "Media", + "mappingPaths": [] + }, + "http://rs.tdwg.org/ac/terms/caption": { + "name": "caption", + "description": "Text to be displayed together with the media representation", + "group": "Media", + "mappingPaths": [["CollectionObject", "collectionObjectAttachments", "attachment", "title"]] + }, + "http://rs.tdwg.org/ac/terms/tag": { + "name": "tag", + "description": "A tag or keyword associated with the media item", + "group": "Media", + "mappingPaths": [] + } + } + } + } +} diff --git a/specifyweb/backend/export/tests.py b/specifyweb/backend/export/tests.py deleted file mode 100644 index 501deb776c1..00000000000 --- a/specifyweb/backend/export/tests.py +++ /dev/null @@ -1,16 +0,0 @@ -""" -This file demonstrates writing tests using the unittest module. These will pass -when you run "manage.py test". - -Replace this with more appropriate tests for your application. -""" - -from django.test import TestCase - - -class SimpleTest(TestCase): - def test_basic_addition(self): - """ - Tests that 1 + 1 always equals 2. - """ - self.assertEqual(1 + 1, 2) diff --git a/specifyweb/backend/export/tests/__init__.py b/specifyweb/backend/export/tests/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/specifyweb/backend/export/tests/test_cache.py b/specifyweb/backend/export/tests/test_cache.py new file mode 100644 index 00000000000..eba57175829 --- /dev/null +++ b/specifyweb/backend/export/tests/test_cache.py @@ -0,0 +1,120 @@ +from django.db import connection +from django.test import TestCase, TransactionTestCase + +from specifyweb.backend.export.cache import ( + create_cache_table, drop_cache_table, get_cache_table_name, + _build_single_cache, +) +from specifyweb.backend.export.dwca_utils import sanitize_column_name + + +class CacheTableNameTests(TestCase): + + def test_cache_table_name_generation(self): + name = get_cache_table_name(5, 4) + self.assertEqual(name, 'dwc_cache_5_4') + + def test_cache_table_name_sanitization(self): + # Special chars in prefix are not stripped by get_cache_table_name, + # but create_cache_table sanitizes the full name. + name = get_cache_table_name(1, 2, prefix='bad;prefix') + # create_cache_table will strip the semicolon + self.assertIn('bad', name) + + +class CacheTableOperationsTests(TransactionTestCase): + + def _table_exists(self, name): + with connection.cursor() as cursor: + cursor.execute( + "SELECT COUNT(*) FROM information_schema.tables " + "WHERE table_name = %s", [name] + ) + return cursor.fetchone()[0] > 0 + + def test_create_and_drop_cache_table(self): + table_name = 'dwc_cache_test_99' + # create_cache_table auto-prepends an `id` PK; only pass user columns. + columns = [('val', 'VARCHAR(128)')] + create_cache_table(table_name, columns) + self.assertTrue(self._table_exists(table_name)) + + drop_cache_table(table_name) + self.assertFalse(self._table_exists(table_name)) + + def test_cache_table_name_sanitization_in_create(self): + # Semicolons and other special chars are stripped from table name. + dirty_name = 'test;drop--table' + columns = [('val', 'INT')] + create_cache_table(dirty_name, columns) + safe_name = 'testdroptable' + self.assertTrue(self._table_exists(safe_name)) + drop_cache_table(safe_name) + + +class SanitizeColumnNameTests(TestCase): + + def test_simple_name(self): + self.assertEqual(sanitize_column_name('catalogNumber'), 'catalogNumber') + + def test_uri_with_slash(self): + self.assertEqual( + sanitize_column_name('http://rs.tdwg.org/dwc/terms/catalogNumber'), + 'catalogNumber', + ) + + def test_uri_with_hash(self): + self.assertEqual( + sanitize_column_name('http://purl.org/dc/terms#modified'), + 'modified', + ) + + def test_special_chars_replaced(self): + self.assertEqual(sanitize_column_name('some-field.name'), 'some_field_name') + + def test_truncation_at_64(self): + long_name = 'a' * 100 + self.assertEqual(len(sanitize_column_name(long_name)), 64) + + +class BuildSingleCacheTests(TransactionTestCase): + + def _table_exists(self, name): + with connection.cursor() as cursor: + cursor.execute( + "SELECT COUNT(*) FROM information_schema.tables " + "WHERE table_name = %s AND table_schema = DATABASE()", [name] + ) + return cursor.fetchone()[0] > 0 + + def _get_columns(self, table_name): + with connection.cursor() as cursor: + cursor.execute( + "SELECT column_name FROM information_schema.columns " + "WHERE table_name = %s AND table_schema = DATABASE() " + "ORDER BY ordinal_position", [table_name] + ) + return [row[0] for row in cursor.fetchall()] + + def test_build_creates_table_with_columns(self): + """Verify cache table creation with correct columns from field terms.""" + table_name = 'dwc_cache_build_test' + columns = [ + ('occurrence_id', 'VARCHAR(256)'), + ('catalogNumber', 'TEXT'), + ('locality', 'TEXT'), + ] + try: + create_cache_table(table_name, columns) + self.assertTrue(self._table_exists(table_name)) + + db_columns = self._get_columns(table_name) + self.assertIn('occurrence_id', db_columns) + self.assertIn('catalogNumber', db_columns) + self.assertIn('locality', db_columns) + # 3 user columns + auto-prepended `id` primary key + self.assertEqual(len(db_columns), 4) + self.assertIn('id', db_columns) + finally: + drop_cache_table(table_name) + diff --git a/specifyweb/backend/export/tests/test_models.py b/specifyweb/backend/export/tests/test_models.py new file mode 100644 index 00000000000..03245b65345 --- /dev/null +++ b/specifyweb/backend/export/tests/test_models.py @@ -0,0 +1,251 @@ +from django.db import IntegrityError +from django.test import TestCase + +from specifyweb.specify.tests.test_api import MainSetupTearDown +from specifyweb.specify.models import Spquery, Spqueryfield +from specifyweb.backend.export.models import ( + SchemaMapping, ExportDataSet, ExportDataSetExtension, CacheTableMeta, +) + + +class SchemaMappingTests(MainSetupTearDown, TestCase): + + def _make_query(self, name='test query'): + return Spquery.objects.create( + name=name, + contextname='CollectionObject', + contexttableid=1, + createdbyagent=self.agent, + specifyuser=self.specifyuser, + ) + + def test_create_schema_mapping(self): + query = self._make_query() + mapping = SchemaMapping.objects.create( + query=query, + mapping_type='Core', + name='DwC Core Mapping', + createdbyagent=self.agent, + specifyuser=self.specifyuser, + ) + mapping.refresh_from_db() + self.assertEqual(mapping.query_id, query.pk) + self.assertEqual(mapping.mapping_type, 'Core') + self.assertEqual(mapping.name, 'DwC Core Mapping') + self.assertFalse(mapping.is_default) + + def test_schema_mapping_query_onetoone(self): + query = self._make_query() + SchemaMapping.objects.create( + query=query, + mapping_type='Core', + name='First', + specifyuser=self.specifyuser, + ) + with self.assertRaises(IntegrityError): + SchemaMapping.objects.create( + query=query, + mapping_type='Extension', + name='Second', + specifyuser=self.specifyuser, + ) + + def test_schema_mapping_cascade_delete(self): + query = self._make_query() + SchemaMapping.objects.create( + query=query, + mapping_type='Core', + name='Cascade Test', + specifyuser=self.specifyuser, + ) + self.assertEqual(SchemaMapping.objects.count(), 1) + query.delete() + self.assertEqual(SchemaMapping.objects.count(), 0) + + def test_spqueryfield_term_nullable(self): + query = self._make_query() + + # Field without DwC term — backward compatible + field_no_term = Spqueryfield.objects.create( + query=query, + fieldname='catalogNumber', + operstart=0, + sorttype=0, + position=0, + startvalue='', + stringid='1.collectionobject.catalogNumber', + tablelist='1', + ) + field_no_term.refresh_from_db() + self.assertIsNone(field_no_term.term) + self.assertFalse(field_no_term.isstatic) + self.assertIsNone(field_no_term.staticvalue) + + # Field with DwC term + field_with_term = Spqueryfield.objects.create( + query=query, + fieldname='catalogNumber', + operstart=0, + sorttype=0, + position=1, + startvalue='', + stringid='1.collectionobject.catalogNumber', + tablelist='1', + term='http://rs.tdwg.org/dwc/terms/catalogNumber', + isstatic=False, + ) + field_with_term.refresh_from_db() + self.assertEqual( + field_with_term.term, + 'http://rs.tdwg.org/dwc/terms/catalogNumber', + ) + + # Static field + field_static = Spqueryfield.objects.create( + query=query, + fieldname='catalogNumber', + operstart=0, + sorttype=0, + position=2, + startvalue='', + stringid='1.collectionobject.catalogNumber', + tablelist='1', + term='http://rs.tdwg.org/dwc/terms/basisOfRecord', + isstatic=True, + staticvalue='PreservedSpecimen', + ) + field_static.refresh_from_db() + self.assertTrue(field_static.isstatic) + self.assertEqual(field_static.staticvalue, 'PreservedSpecimen') + + +class ExportDataSetTests(MainSetupTearDown, TestCase): + + def _make_mapping(self, name='test mapping'): + query = Spquery.objects.create( + name='q', + contextname='CollectionObject', + contexttableid=1, + createdbyagent=self.agent, + specifyuser=self.specifyuser, + ) + return SchemaMapping.objects.create( + query=query, mapping_type='Core', name=name, + specifyuser=self.specifyuser, + ) + + def test_create_export_dataset(self): + mapping = self._make_mapping() + ds = ExportDataSet.objects.create( + exportname='My Export', + filename='export.zip', + coremapping=mapping, + collection=self.collection, + ) + ds.refresh_from_db() + self.assertEqual(ds.exportname, 'My Export') + self.assertEqual(ds.filename, 'export.zip') + self.assertFalse(ds.rss) + self.assertIsNone(ds.frequency) + self.assertIsNone(ds.lastexported) + self.assertEqual(ds.coremapping_id, mapping.pk) + self.assertEqual(ds.collection_id, self.collection.pk) + + def test_export_dataset_extension(self): + core = self._make_mapping('core') + ext_mapping = self._make_mapping('ext') + ds = ExportDataSet.objects.create( + exportname='DS', filename='ds.zip', + coremapping=core, collection=self.collection, + ) + ext = ExportDataSetExtension.objects.create( + exportdataset=ds, schemamapping=ext_mapping, sortorder=1, + ) + ext.refresh_from_db() + self.assertEqual(ext.exportdataset_id, ds.pk) + self.assertEqual(ext.schemamapping_id, ext_mapping.pk) + self.assertEqual(ext.sortorder, 1) + + # unique_together enforced + with self.assertRaises(IntegrityError): + ExportDataSetExtension.objects.create( + exportdataset=ds, schemamapping=ext_mapping, sortorder=2, + ) + + def test_clone_mapping(self): + """Clone endpoint creates new SpQuery, SpQueryFields, and SchemaMapping.""" + original_mapping = self._make_mapping('Original') + original_query = original_mapping.query + + # Add query fields to the original query + Spqueryfield.objects.create( + query=original_query, + fieldname='catalogNumber', + operstart=0, + sorttype=0, + position=0, + startvalue='', + stringid='1.collectionobject.catalogNumber', + tablelist='1', + term='http://rs.tdwg.org/dwc/terms/catalogNumber', + ) + Spqueryfield.objects.create( + query=original_query, + fieldname='locality', + operstart=0, + sorttype=0, + position=1, + startvalue='', + stringid='1.collectionobject.locality', + tablelist='1', + term='http://rs.tdwg.org/dwc/terms/locality', + isstatic=True, + staticvalue='Some Place', + ) + + from django.test import RequestFactory + from specifyweb.backend.export.views import clone_mapping + + factory = RequestFactory() + request = factory.post(f'/export/clone_mapping/{original_mapping.id}/') + request.user = self.specifyuser + request.specify_user = self.specifyuser + + # Mock permission check — in tests, permissions are not configured + from unittest.mock import patch + with patch('specifyweb.backend.export.views.check_permission_targets'): + response = clone_mapping(request, original_mapping.id) + + self.assertEqual(response.status_code, 200) + import json + data = json.loads(response.content) + self.assertIn('id', data) + self.assertEqual(data['name'], 'Copy of Original') + self.assertFalse(data['isDefault']) + self.assertNotEqual(data['queryId'], original_query.id) + + # Verify new query has cloned fields + new_query = Spquery.objects.get(id=data['queryId']) + self.assertEqual(new_query.name, f'Copy of {original_query.name}') + self.assertEqual(new_query.fields.count(), 2) + + # Verify field data was cloned + cloned_field = new_query.fields.get(position=0) + self.assertEqual(cloned_field.term, 'http://rs.tdwg.org/dwc/terms/catalogNumber') + cloned_static = new_query.fields.get(position=1) + self.assertTrue(cloned_static.isstatic) + self.assertEqual(cloned_static.staticvalue, 'Some Place') + + def test_cache_table_meta(self): + mapping = self._make_mapping() + meta = CacheTableMeta.objects.create( + schemamapping=mapping, + collection=self.collection, + tablename='dwc_cache_1_4', + ) + meta.refresh_from_db() + self.assertEqual(meta.schemamapping_id, mapping.pk) + self.assertEqual(meta.tablename, 'dwc_cache_1_4') + self.assertIsNone(meta.lastbuilt) + self.assertIsNone(meta.rowcount) + self.assertEqual(meta.buildstatus, 'idle') diff --git a/specifyweb/backend/export/urls.py b/specifyweb/backend/export/urls.py index 897ad233f44..34a59fb5a41 100644 --- a/specifyweb/backend/export/urls.py +++ b/specifyweb/backend/export/urls.py @@ -8,4 +8,8 @@ path('make_dwca/', views.export), path('extract_query//', views.extract_query), path('force_update/', views.force_update), + path('schema_terms/', views.get_schema_terms), + path('list_mappings/', views.list_mappings), + path('list_export_datasets/', views.list_export_datasets), + path('clone_mapping//', views.clone_mapping), ] diff --git a/specifyweb/backend/export/views.py b/specifyweb/backend/export/views.py index 9381d36166d..1e459d2597c 100644 --- a/specifyweb/backend/export/views.py +++ b/specifyweb/backend/export/views.py @@ -190,3 +190,137 @@ def extract_query(request, query_id): """ query = Spquery.objects.get(id=query_id) return HttpResponse(extract(query), 'text/xml') + +class SchemaMappingPT(PermissionTarget): + resource = "/export/schema_mapping" + create = PermissionTargetAction() + read = PermissionTargetAction() + update = PermissionTargetAction() + delete = PermissionTargetAction() + +class ExportPackagePT(PermissionTarget): + resource = "/export/export_package" + create = PermissionTargetAction() + read = PermissionTargetAction() + execute = PermissionTargetAction() + +@require_GET +@login_maybe_required +def get_schema_terms(request): + """Serve the DwC schema terms vocabulary as JSON.""" + terms_path = os.path.join(os.path.dirname(__file__), 'schema_terms.json') + with open(terms_path) as f: + return HttpResponse(f.read(), content_type='application/json') + + +@require_GET +@login_maybe_required +def list_mappings(request): + """List all schema mappings.""" + check_permission_targets(None, request.specify_user.id, [SchemaMappingPT.read]) + from specifyweb.backend.export.models import SchemaMapping + mappings = SchemaMapping.objects.all().values('id', 'name', 'mapping_type', 'is_default', 'query_id') + return HttpResponse(json.dumps([ + { + 'id': m['id'], + 'name': m['name'], + 'mappingType': m['mapping_type'], + 'isDefault': m['is_default'], + 'queryId': m['query_id'], + } + for m in mappings + ]), content_type='application/json') + + +@require_GET +@login_maybe_required +def list_export_datasets(request): + """List all export datasets.""" + check_permission_targets(None, request.specify_user.id, [ExportPackagePT.read]) + from specifyweb.backend.export.models import ExportDataSet + datasets = ExportDataSet.objects.all().values( + 'id', 'exportname', 'filename', 'rss', 'frequency', + 'coremapping_id', 'collection_id', 'lastexported' + ) + return HttpResponse(json.dumps([ + { + 'id': d['id'], + 'exportName': d['exportname'], + 'fileName': d['filename'], + 'isRss': d['rss'], + 'frequency': d['frequency'], + 'coreMappingId': d['coremapping_id'], + 'collectionId': d['collection_id'], + 'lastExported': d['lastexported'].isoformat() if d['lastexported'] else None, + } + for d in datasets + ]), content_type='application/json') + + +@require_POST +@login_maybe_required +def clone_mapping(request, mapping_id): + """Deep-copy a SchemaMapping: creates new SpQuery with all SpQueryFields, + creates new SchemaMapping pointing to the new query.""" + check_permission_targets(None, request.specify_user.id, [SchemaMappingPT.create]) + + from specifyweb.backend.export.models import SchemaMapping + from specifyweb.specify.models import Spqueryfield + + try: + original = SchemaMapping.objects.select_related('query').get(id=mapping_id) + except SchemaMapping.DoesNotExist: + raise Http404 + + # Clone the SpQuery + old_query = original.query + new_query = Spquery.objects.create( + name=f'Copy of {old_query.name}', + contextname=old_query.contextname, + contexttableid=old_query.contexttableid, + specifyuser=request.specify_user, + isfavorite=False, + ordinal=old_query.ordinal, + searchsynonymy=old_query.searchsynonymy, + selectdistinct=old_query.selectdistinct, + smushed=old_query.smushed, + countonly=old_query.countonly, + ) + + # Clone all query fields + for field in old_query.fields.all(): + Spqueryfield.objects.create( + query=new_query, + fieldname=field.fieldname, + stringid=field.stringid, + tablelist=field.tablelist, + operstart=field.operstart, + startvalue=field.startvalue, + position=field.position, + sorttype=field.sorttype, + isdisplay=field.isdisplay, + isnot=field.isnot, + isrelfld=field.isrelfld, + formatname=field.formatname, + term=field.term, + isstatic=field.isstatic, + staticvalue=field.staticvalue, + ) + + # Clone the SchemaMapping + new_mapping = SchemaMapping.objects.create( + query=new_query, + mapping_type=original.mapping_type, + name=f'Copy of {original.name}', + is_default=False, + specifyuser=request.specify_user, + createdbyagent=request.specify_user.agents.first(), + ) + + return HttpResponse(json.dumps({ + 'id': new_mapping.id, + 'name': new_mapping.name, + 'mappingType': new_mapping.mapping_type, + 'isDefault': False, + 'queryId': new_query.id, + }), content_type='application/json') diff --git a/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts b/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts index cfb1bbf64ab..00bb47b7e9b 100644 --- a/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts +++ b/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts @@ -131,6 +131,18 @@ const rawUserTools = ensure>>>()({ url: '/specify/overlay/force-update-feed/', icon: icons.rss, }, + schemaMapper: { + title: headerText.schemaMapper(), + enabled: () => hasPermission('/export/schema_mapping', 'read'), + url: '/specify/overlay/schema-mapper/', + icon: icons.documentSearch, + }, + exportPackages: { + title: headerText.exportPackages(), + enabled: () => hasPermission('/export/export_package', 'read'), + url: '/specify/overlay/export-packages/', + icon: icons.archive, + }, }, [commonText.import()]: { localityUpdate: { diff --git a/specifyweb/frontend/js_src/lib/components/Router/OverlayRoutes.tsx b/specifyweb/frontend/js_src/lib/components/Router/OverlayRoutes.tsx index 86550dc04d1..86945d8135e 100644 --- a/specifyweb/frontend/js_src/lib/components/Router/OverlayRoutes.tsx +++ b/specifyweb/frontend/js_src/lib/components/Router/OverlayRoutes.tsx @@ -201,6 +201,22 @@ export const overlayRoutes: RA = [ ({ ForceUpdateFeedOverlay }) => ForceUpdateFeedOverlay ), }, + { + path: 'schema-mapper', + title: headerText.schemaMapper(), + element: () => + import('../SchemaMapper/index').then( + ({ SchemaMapperOverlay }) => SchemaMapperOverlay + ), + }, + { + path: 'export-packages', + title: headerText.exportPackages(), + element: () => + import('../SchemaMapper/ExportPackages/index').then( + ({ ExportPackagesOverlay }) => ExportPackagesOverlay + ), + }, { path: 'about', title: welcomeText.aboutSpecify(), diff --git a/specifyweb/frontend/js_src/lib/components/SchemaConfig/Field.tsx b/specifyweb/frontend/js_src/lib/components/SchemaConfig/Field.tsx index dde71819d9f..a856b0c2725 100644 --- a/specifyweb/frontend/js_src/lib/components/SchemaConfig/Field.tsx +++ b/specifyweb/frontend/js_src/lib/components/SchemaConfig/Field.tsx @@ -1,6 +1,7 @@ import React from 'react'; import { commonText } from '../../localization/common'; +import { headerText } from '../../localization/header'; import { resourcesText } from '../../localization/resources'; import { schemaText } from '../../localization/schema'; import { Input, Label } from '../Atoms/Form'; @@ -103,6 +104,14 @@ export function SchemaConfigField({ schemaData={schemaData} onFormatted={handleFormatted} /> +
+ + {headerText.darwinCore()} + +
+

{headerText.noDwcTerms()}

+
+
); } diff --git a/specifyweb/frontend/js_src/lib/components/SchemaMapper/ExportPackages/index.tsx b/specifyweb/frontend/js_src/lib/components/SchemaMapper/ExportPackages/index.tsx new file mode 100644 index 00000000000..b3b2f05dbc7 --- /dev/null +++ b/specifyweb/frontend/js_src/lib/components/SchemaMapper/ExportPackages/index.tsx @@ -0,0 +1,71 @@ +import React from 'react'; + +import { commonText } from '../../../localization/common'; +import { headerText } from '../../../localization/header'; +import { Button } from '../../Atoms/Button'; +import { icons } from '../../Atoms/Icons'; +import { Dialog } from '../../Molecules/Dialog'; +import { OverlayContext } from '../../Router/Router'; + +type ExportPackageRecord = { + readonly id: number; + readonly exportName: string; + readonly fileName: string; + readonly isRss: boolean; + readonly lastExported: string | undefined; +}; + +export function ExportPackagesOverlay(): JSX.Element { + const handleClose = React.useContext(OverlayContext); + const [packages, setPackages] = React.useState< + ReadonlyArray + >([]); + + React.useEffect(() => { + fetch('/export/list_export_datasets/') + .then(async (response) => response.json()) + .then(setPackages) + .catch(() => {}); + }, []); + + return ( + {commonText.close()}} + header={headerText.exportPackages()} + icon={icons.archive} + onClose={handleClose} + > +
+ {packages.length === 0 ? ( +

No export packages configured

+ ) : ( +
    + {packages.map((pkg) => ( +
  • +
    + {pkg.exportName} + + ({pkg.fileName}) + + {pkg.isRss && ( + + RSS + + )} +
    +
    + Edit + Clone +
    +
  • + ))} +
+ )} + New Export Package +
+
+ ); +} diff --git a/specifyweb/frontend/js_src/lib/components/SchemaMapper/MappingList.tsx b/specifyweb/frontend/js_src/lib/components/SchemaMapper/MappingList.tsx new file mode 100644 index 00000000000..1f9ca75fc58 --- /dev/null +++ b/specifyweb/frontend/js_src/lib/components/SchemaMapper/MappingList.tsx @@ -0,0 +1,49 @@ +import React from 'react'; + +import { commonText } from '../../localization/common'; +import { Button } from '../Atoms/Button'; +import { icons } from '../Atoms/Icons'; +import type { MappingRecord } from './types'; + +export function MappingList({ + mappings, + onEdit: handleEdit, + onClone: handleClone, +}: { + readonly mappings: ReadonlyArray; + readonly onEdit: (id: number) => void; + readonly onClone: (id: number) => void; +}): JSX.Element { + return ( +
    + {mappings.map((mapping) => ( +
  • + + {mapping.isDefault && ( + + {icons.key} + + )} + {mapping.name} + + + handleEdit(mapping.id)} + /> + handleClone(mapping.id)} + /> + +
  • + ))} +
+ ); +} diff --git a/specifyweb/frontend/js_src/lib/components/SchemaMapper/TermDropdown.tsx b/specifyweb/frontend/js_src/lib/components/SchemaMapper/TermDropdown.tsx new file mode 100644 index 00000000000..842d1d65648 --- /dev/null +++ b/specifyweb/frontend/js_src/lib/components/SchemaMapper/TermDropdown.tsx @@ -0,0 +1,88 @@ +import React from 'react'; + +import { commonText } from '../../localization/common'; +import { Button } from '../Atoms/Button'; +import { Input } from '../Atoms/Form'; +import type { DwcTerm } from './types'; + +export function TermDropdown({ + selectedIri, + vocabularyTerms, + onChange: handleChange, +}: { + readonly selectedIri: string | undefined; + readonly vocabularyTerms: Readonly>; + readonly onChange: (iri: string | undefined) => void; +}): JSX.Element { + const [search, setSearch] = React.useState(''); + const [isOpen, setIsOpen] = React.useState(false); + + const filteredTerms = React.useMemo(() => { + const lowerSearch = search.toLowerCase(); + return Object.entries(vocabularyTerms).filter( + ([iri, term]) => + term.label.toLowerCase().includes(lowerSearch) || + iri.toLowerCase().includes(lowerSearch) + ); + }, [vocabularyTerms, search]); + + const selectedTerm = + selectedIri === undefined ? undefined : vocabularyTerms[selectedIri]; + + return ( +
+
+ setIsOpen(true)} + onValueChange={(value) => { + setSearch(value); + setIsOpen(true); + }} + /> + {selectedIri !== undefined && ( + { + handleChange(undefined); + setSearch(''); + }} + /> + )} +
+ {isOpen && ( +
    + {filteredTerms.length === 0 ? ( +
  • + {commonText.noResults()} +
  • + ) : ( + filteredTerms.map(([iri, term]) => ( +
  • + +
  • + )) + )} +
+ )} +
+ ); +} diff --git a/specifyweb/frontend/js_src/lib/components/SchemaMapper/VocabularyDialog.tsx b/specifyweb/frontend/js_src/lib/components/SchemaMapper/VocabularyDialog.tsx new file mode 100644 index 00000000000..f3d1343a056 --- /dev/null +++ b/specifyweb/frontend/js_src/lib/components/SchemaMapper/VocabularyDialog.tsx @@ -0,0 +1,78 @@ +import React from 'react'; + +import { commonText } from '../../localization/common'; +import { headerText } from '../../localization/header'; +import { Button } from '../Atoms/Button'; +import { icons } from '../Atoms/Icons'; +import { Dialog } from '../Molecules/Dialog'; +import type { DwcVocabulary } from './types'; + +const vocabularies: ReadonlyArray = [ + { + key: 'dwc', + name: 'Darwin Core', + abbreviation: 'dwc', + description: + 'Core terms for sharing biodiversity occurrence data and related information.', + terms: {}, + }, + { + key: 'dc', + name: 'Dublin Core', + abbreviation: 'dc', + description: + 'General-purpose metadata terms for describing resources.', + terms: {}, + }, + { + key: 'ac', + name: 'Audubon Core', + abbreviation: 'ac', + description: + 'Terms for describing biodiversity-related multimedia resources.', + terms: {}, + }, +]; + +export function VocabularyDialog({ + onClose: handleClose, + onSelected: handleSelected, +}: { + readonly onClose: () => void; + readonly onSelected: (vocabularyKey: string) => void; +}): JSX.Element { + return ( + {commonText.cancel()} + } + header={headerText.selectVocabulary()} + icon={icons.documentSearch} + onClose={handleClose} + > +
    + {vocabularies.map((vocabulary) => ( +
  • + +
  • + ))} +
+
+ ); +} diff --git a/specifyweb/frontend/js_src/lib/components/SchemaMapper/__tests__/vocabulary.test.ts b/specifyweb/frontend/js_src/lib/components/SchemaMapper/__tests__/vocabulary.test.ts new file mode 100644 index 00000000000..29e50de00a6 --- /dev/null +++ b/specifyweb/frontend/js_src/lib/components/SchemaMapper/__tests__/vocabulary.test.ts @@ -0,0 +1,72 @@ +import { findTermByIri } from '../vocabulary'; +import type { SchemaTerms } from '../vocabulary'; + +const mockVocabularies: SchemaTerms['vocabularies'] = { + dwc: { + name: 'Darwin Core', + abbreviation: 'dwc', + vocabularyURI: 'http://rs.tdwg.org/dwc/terms/', + description: 'Darwin Core standard terms for biodiversity data', + terms: { + 'http://rs.tdwg.org/dwc/terms/occurrenceID': { + name: 'occurrenceID', + description: 'An identifier for the Occurrence', + group: 'Occurrence', + mappingPaths: [['CollectionObject', 'guid']], + }, + 'http://rs.tdwg.org/dwc/terms/catalogNumber': { + name: 'catalogNumber', + description: + 'An identifier for the record within the data set or collection', + group: 'Occurrence', + mappingPaths: [['CollectionObject', 'catalogNumber']], + }, + }, + }, + dc: { + name: 'Dublin Core', + abbreviation: 'dc', + vocabularyURI: 'http://purl.org/dc/terms/', + description: 'Dublin Core metadata terms', + terms: { + 'http://purl.org/dc/terms/modified': { + name: 'modified', + description: + 'The most recent date-time on which the resource was changed', + group: 'Record', + mappingPaths: [['CollectionObject', 'timestampModified']], + }, + }, + }, +}; + +describe('findTermByIri', () => { + test('returns correct term and vocabulary for a known DwC IRI', () => { + const result = findTermByIri( + mockVocabularies, + 'http://rs.tdwg.org/dwc/terms/occurrenceID' + ); + expect(result).toBeDefined(); + expect(result!.term.name).toBe('occurrenceID'); + expect(result!.term.group).toBe('Occurrence'); + expect(result!.vocabulary.abbreviation).toBe('dwc'); + }); + + test('returns correct term for a Dublin Core IRI', () => { + const result = findTermByIri( + mockVocabularies, + 'http://purl.org/dc/terms/modified' + ); + expect(result).toBeDefined(); + expect(result!.term.name).toBe('modified'); + expect(result!.vocabulary.abbreviation).toBe('dc'); + }); + + test('returns undefined for an unknown IRI', () => { + const result = findTermByIri( + mockVocabularies, + 'http://example.org/unknown/term' + ); + expect(result).toBeUndefined(); + }); +}); diff --git a/specifyweb/frontend/js_src/lib/components/SchemaMapper/index.tsx b/specifyweb/frontend/js_src/lib/components/SchemaMapper/index.tsx new file mode 100644 index 00000000000..126652f6715 --- /dev/null +++ b/specifyweb/frontend/js_src/lib/components/SchemaMapper/index.tsx @@ -0,0 +1,79 @@ +import React from 'react'; + +import { commonText } from '../../localization/common'; +import { headerText } from '../../localization/header'; +import { Button } from '../Atoms/Button'; +import { icons } from '../Atoms/Icons'; +import { Dialog } from '../Molecules/Dialog'; +import { OverlayContext } from '../Router/Router'; +import { MappingList } from './MappingList'; +import type { MappingRecord } from './types'; + +export function SchemaMapperOverlay(): JSX.Element { + const handleClose = React.useContext(OverlayContext); + + // TODO: fetch schema mappings from API + const coreMappings: ReadonlyArray = []; + const extensionMappings: ReadonlyArray = []; + + return ( + {commonText.close()} + } + header={headerText.schemaMapper()} + icon={icons.documentSearch} + onClose={handleClose} + > +
+
+

+ {headerText.coreMappings()} +

+ {coreMappings.length === 0 ? ( +

+ {headerText.noCoreMappings()} +

+ ) : ( + { + /* TODO: clone mapping */ + }} + onEdit={(_id) => { + /* TODO: open mapping editor */ + }} + /> + )} +
+
+

+ {headerText.extensionMappings()} +

+ {extensionMappings.length === 0 ? ( +

+ {headerText.noExtensionMappings()} +

+ ) : ( + { + /* TODO: clone mapping */ + }} + onEdit={(_id) => { + /* TODO: open mapping editor */ + }} + /> + )} +
+ { + /* TODO: open new mapping dialog */ + }} + > + {headerText.newMapping()} + +
+
+ ); +} diff --git a/specifyweb/frontend/js_src/lib/components/SchemaMapper/types.ts b/specifyweb/frontend/js_src/lib/components/SchemaMapper/types.ts new file mode 100644 index 00000000000..2c26e46b6fc --- /dev/null +++ b/specifyweb/frontend/js_src/lib/components/SchemaMapper/types.ts @@ -0,0 +1,33 @@ +export type MappingRecord = { + readonly id: number; + readonly name: string; + readonly mappingType: 'Core' | 'Extension'; + readonly isDefault: boolean; + readonly queryId: number; +}; + +export type MappingField = { + readonly id: number; + readonly position: number; + readonly stringId: string; + readonly fieldName: string; + readonly term: string | undefined; + readonly isStatic: boolean; + readonly staticValue: string | undefined; +}; + +export type DwcTerm = { + readonly iri: string; + readonly label: string; + readonly definition: string; + readonly comments: string; + readonly examples: string; +}; + +export type DwcVocabulary = { + readonly key: string; + readonly name: string; + readonly abbreviation: string; + readonly description: string; + readonly terms: Readonly>; +}; diff --git a/specifyweb/frontend/js_src/lib/components/SchemaMapper/vocabulary.ts b/specifyweb/frontend/js_src/lib/components/SchemaMapper/vocabulary.ts new file mode 100644 index 00000000000..0aa501ff195 --- /dev/null +++ b/specifyweb/frontend/js_src/lib/components/SchemaMapper/vocabulary.ts @@ -0,0 +1,40 @@ +// Types and fetch function for DwC schema terms vocabulary + +export type DwcTerm = { + readonly name: string; + readonly description: string; + readonly group: string; + readonly mappingPaths: ReadonlyArray>; +}; + +export type Vocabulary = { + readonly name: string; + readonly abbreviation: string; + readonly vocabularyURI: string; + readonly description: string; + readonly terms: Readonly>; +}; + +export type SchemaTerms = { + readonly vocabularies: Readonly>; +}; + +let cachedTerms: SchemaTerms | undefined; + +export async function fetchSchemaTerms(): Promise { + if (cachedTerms !== undefined) return cachedTerms; + const response = await fetch('/export/schema_terms/'); + cachedTerms = await response.json(); + return cachedTerms!; +} + +export function findTermByIri( + vocabularies: SchemaTerms['vocabularies'], + iri: string +): { vocabulary: Vocabulary; term: DwcTerm } | undefined { + for (const vocab of Object.values(vocabularies)) { + const term = vocab.terms[iri]; + if (term !== undefined) return { vocabulary: vocab, term }; + } + return undefined; +} diff --git a/specifyweb/frontend/js_src/lib/localization/header.ts b/specifyweb/frontend/js_src/lib/localization/header.ts index 152298d4370..bc21685cbe6 100644 --- a/specifyweb/frontend/js_src/lib/localization/header.ts +++ b/specifyweb/frontend/js_src/lib/localization/header.ts @@ -497,6 +497,106 @@ export const headerText = createDictionary({ 'pt-br': 'Documentação', 'hr-hr': 'Dokumentacija', }, + schemaMapper: { + 'en-us': 'Schema Mapper', + 'de-ch': 'Schema-Zuordnung', + 'es-es': 'Mapeador de esquema', + 'fr-fr': 'Mappeur de schéma', + 'pt-br': 'Mapeador de esquema', + 'ru-ru': 'Сопоставление схемы', + 'uk-ua': 'Зіставлення схеми', + 'hr-hr': 'Mapiranje sheme', + }, + exportPackages: { + 'en-us': 'Export Packages', + 'de-ch': 'Export-Pakete', + 'es-es': 'Paquetes de exportación', + 'fr-fr': "Paquets d'exportation", + 'pt-br': 'Pacotes de exportação', + 'ru-ru': 'Пакеты экспорта', + 'uk-ua': 'Пакети експорту', + 'hr-hr': 'Paketi za izvoz', + }, + coreMappings: { + 'en-us': 'Core Mappings', + 'de-ch': 'Kern-Zuordnungen', + 'es-es': 'Mapeos principales', + 'fr-fr': 'Mappages principaux', + 'pt-br': 'Mapeamentos principais', + 'ru-ru': 'Основные сопоставления', + 'uk-ua': 'Основні зіставлення', + 'hr-hr': 'Osnovna mapiranja', + }, + extensionMappings: { + 'en-us': 'Extension Mappings', + 'de-ch': 'Erweiterungs-Zuordnungen', + 'es-es': 'Mapeos de extensión', + 'fr-fr': "Mappages d'extension", + 'pt-br': 'Mapeamentos de extensão', + 'ru-ru': 'Сопоставления расширений', + 'uk-ua': 'Зіставлення розширень', + 'hr-hr': 'Mapiranja proširenja', + }, + noCoreMappings: { + 'en-us': 'No core mappings configured', + 'de-ch': 'Keine Kern-Zuordnungen konfiguriert', + 'es-es': 'No hay mapeos principales configurados', + 'fr-fr': 'Aucun mappage principal configuré', + 'pt-br': 'Nenhum mapeamento principal configurado', + 'ru-ru': 'Основные сопоставления не настроены', + 'uk-ua': 'Основні зіставлення не налаштовані', + 'hr-hr': 'Nema konfiguriranih osnovnih mapiranja', + }, + noExtensionMappings: { + 'en-us': 'No extension mappings configured', + 'de-ch': 'Keine Erweiterungs-Zuordnungen konfiguriert', + 'es-es': 'No hay mapeos de extensión configurados', + 'fr-fr': "Aucun mappage d'extension configuré", + 'pt-br': 'Nenhum mapeamento de extensão configurado', + 'ru-ru': 'Сопоставления расширений не настроены', + 'uk-ua': 'Зіставлення розширень не налаштовані', + 'hr-hr': 'Nema konfiguriranih mapiranja proširenja', + }, + newMapping: { + 'en-us': 'New Mapping', + 'de-ch': 'Neue Zuordnung', + 'es-es': 'Nuevo mapeo', + 'fr-fr': 'Nouveau mappage', + 'pt-br': 'Novo mapeamento', + 'ru-ru': 'Новое сопоставление', + 'uk-ua': 'Нове зіставлення', + 'hr-hr': 'Novo mapiranje', + }, + selectVocabulary: { + 'en-us': 'Select Vocabulary', + 'de-ch': 'Vokabular auswählen', + 'es-es': 'Seleccionar vocabulario', + 'fr-fr': 'Sélectionner le vocabulaire', + 'pt-br': 'Selecionar vocabulário', + 'ru-ru': 'Выбрать словарь', + 'uk-ua': 'Вибрати словник', + 'hr-hr': 'Odaberite rječnik', + }, + darwinCore: { + 'en-us': 'Darwin Core', + 'de-ch': 'Darwin Core', + 'es-es': 'Darwin Core', + 'fr-fr': 'Darwin Core', + 'pt-br': 'Darwin Core', + 'ru-ru': 'Darwin Core', + 'uk-ua': 'Darwin Core', + 'hr-hr': 'Darwin Core', + }, + noDwcTerms: { + 'en-us': 'No Darwin Core terms mapped to this field', + 'de-ch': 'Keine Darwin-Core-Begriffe diesem Feld zugeordnet', + 'es-es': 'No hay términos de Darwin Core mapeados a este campo', + 'fr-fr': 'Aucun terme Darwin Core mappé à ce champ', + 'pt-br': 'Nenhum termo Darwin Core mapeado para este campo', + 'ru-ru': 'Нет терминов Darwin Core, сопоставленных с этим полем', + 'uk-ua': 'Немає термінів Darwin Core, зіставлених із цим полем', + 'hr-hr': 'Nema Darwin Core pojmova mapiranih na ovo polje', + }, chronostratigraphicChart: { 'en-us': 'Chronostratigraphic Chart', 'de-ch': 'Chronostratigraphische Tabelle',