diff --git a/arches/app/datatypes/base.py b/arches/app/datatypes/base.py
index 0bab29928d6..efa6adabc5d 100644
--- a/arches/app/datatypes/base.py
+++ b/arches/app/datatypes/base.py
@@ -193,7 +193,7 @@ def get_tile_data(self, tile):
except:
data = tile["data"]
provisionaledits = tile["provisionaledits"]
- if data is not None and len(list(data.keys())) > 0:
+ if data is not None and any(data.values()):
return data
elif provisionaledits is not None and len(list(provisionaledits.keys())) > 0:
if len(list(provisionaledits.keys())) > 1:
diff --git a/arches/app/datatypes/datatypes.py b/arches/app/datatypes/datatypes.py
index 04213df1067..b2322becaf7 100644
--- a/arches/app/datatypes/datatypes.py
+++ b/arches/app/datatypes/datatypes.py
@@ -1535,7 +1535,7 @@ def append_to_document(self, document, nodevalue, nodeid, tile, provisional=Fals
for f in tile.data[str(nodeid)]:
val = {"string": f["name"], "nodegroup_id": tile.nodegroup_id, "provisional": provisional}
document["strings"].append(val)
- except KeyError as e:
+ except (KeyError, TypeError) as e:
for k, pe in tile.provisionaledits.items():
for f in pe["value"][nodeid]:
val = {"string": f["name"], "nodegroup_id": tile.nodegroup_id, "provisional": provisional}
diff --git a/arches/app/datatypes/url.py b/arches/app/datatypes/url.py
index c2f2d36dc7e..cf496dd8718 100644
--- a/arches/app/datatypes/url.py
+++ b/arches/app/datatypes/url.py
@@ -100,9 +100,12 @@ def transform_value_for_tile(self, value, **kwargs):
try:
return ast.literal_eval(value)
except:
- # this will probably fail validation, but that is ok. We need the error to report the value.
- return value
+ if isinstance(value, dict):
+ return value
+ else:
+ return {"url": value, "url_label": ""}
except BaseException:
+ # this will probably fail validation, but that is ok. We need the error to report the value.
if isinstance(value, dict):
return value
else:
diff --git a/arches/app/etl_modules/base_data_editor.py b/arches/app/etl_modules/base_data_editor.py
index 7aa8b118575..8031326304b 100644
--- a/arches/app/etl_modules/base_data_editor.py
+++ b/arches/app/etl_modules/base_data_editor.py
@@ -1,18 +1,17 @@
from datetime import datetime
import json
import logging
-import requests
-from urllib.parse import urlparse, urlunparse
+from urllib.parse import urlsplit, parse_qs
import uuid
from django.db import connection
-from django.db.models.functions import Lower
+from django.http import HttpRequest
from django.utils.translation import ugettext as _
from arches.app.datatypes.datatypes import DataTypeFactory
from arches.app.etl_modules.base_import_module import BaseImportModule
from arches.app.models.models import GraphModel, Node
from arches.app.models.system_settings import settings
import arches.app.tasks as tasks
-from arches.app.utils.index_database import index_resources_by_transaction
+from arches.app.views.search import search_results
logger = logging.getLogger(__name__)
@@ -101,11 +100,16 @@ def log_event(self, cursor, status):
)
def get_resourceids_from_search_url(self, search_url):
- parsed_url = urlparse(search_url)
- search_result_url = urlunparse(parsed_url._replace(path="/search/resources"))
- response = requests.get(search_result_url + "&export=true")
- search_results = response.json()["results"]["hits"]["hits"]
- return [result["_source"]["resourceinstanceid"] for result in search_results]
+ request = HttpRequest()
+ request.user = self.request.user
+ request.method = "GET"
+ request.GET["export"] = True
+ params = parse_qs(urlsplit(search_url).query)
+ for k, v in params.items():
+ request.GET.__setitem__(k, v[0])
+ response = search_results(request)
+ results = json.loads(response.content)['results']['hits']['hits']
+ return [result["_source"]["resourceinstanceid"] for result in results]
def validate(self, request):
return {"success": True, "data": {}}
@@ -168,26 +172,15 @@ def get_preview_data(self, graph_id, node_id, resourceids, language_code, old_te
+ text_query
)
- tile_sub_query = (
+ resource_count_query = (
"""
- AND resourceinstanceid IN (SELECT DISTINCT t.resourceinstanceid FROM tiles t, nodes n
+ SELECT count(DISTINCT t.resourceinstanceid) FROM tiles t, nodes n
WHERE t.nodegroupid = n.nodegroupid
"""
+ node_id_query
+ graph_id_query
+ resourceids_query
+ text_query
- + ")"
- )
-
- resource_count_query = (
- """
- SELECT count(n.resourceinstanceid) FROM resource_instances n
- WHERE 0 = 0
- """
- + graph_id_query
- + resourceids_query
- + tile_sub_query
)
with connection.cursor() as cursor:
@@ -367,7 +360,7 @@ def run_load_task(self, loadid, graph_id, node_id, operation, language_code, old
return {"success": False, "data": {"title": _("Error"), "message": data_staged["message"]}}
if data_updated["success"]:
- data_updated = self.save_to_tiles(loadid)
+ data_updated = self.save_to_tiles(loadid, finalize_import=False)
return {"success": True, "data": "done"}
else:
with connection.cursor() as cursor:
diff --git a/arches/app/etl_modules/base_import_module.py b/arches/app/etl_modules/base_import_module.py
index a95bed8d76c..d040efe8ba4 100644
--- a/arches/app/etl_modules/base_import_module.py
+++ b/arches/app/etl_modules/base_import_module.py
@@ -46,13 +46,13 @@ def reverse(self, request, **kwargs):
logger.warn(response)
return response
- def save_to_tiles(self, loadid, multiprocessing=True):
+ def save_to_tiles(self, loadid, finalize_import=True, multiprocessing=True):
self.loadid = loadid
with connection.cursor() as cursor:
try:
cursor.execute("""CALL __arches_prepare_bulk_load();""")
cursor.execute("""SELECT * FROM __arches_staging_to_tile(%s)""", [self.loadid])
- row = cursor.fetchall()
+ saved = cursor.fetchone()[0]
except (IntegrityError, ProgrammingError) as e:
logger.error(e)
cursor.execute(
@@ -66,19 +66,40 @@ def save_to_tiles(self, loadid, multiprocessing=True):
"message": _("Unable to insert record into staging table"),
}
finally:
- cursor.execute("""CALL __arches_complete_bulk_load();""")
+ try:
+ cursor.execute("""CALL __arches_complete_bulk_load();""")
- if row[0][0]:
+ if finalize_import:
+ cursor.execute("""SELECT __arches_refresh_spatial_views();""")
+ refresh_successful = cursor.fetchone()[0]
+ if not refresh_successful:
+ raise Exception('Unable to refresh spatial views')
+ except Exception as e:
+ logger.exception(e)
+ cursor.execute(
+ """UPDATE load_event SET (status, indexed_time, complete, successful) = (%s, %s, %s, %s) WHERE loadid = %s""",
+ ("unindexed", datetime.now(), True, True, loadid),
+ )
+
+ if saved:
cursor.execute(
"""UPDATE load_event SET (status, load_end_time) = (%s, %s) WHERE loadid = %s""",
("completed", datetime.now(), loadid),
)
- index_resources_by_transaction(loadid, quiet=True, use_multiprocessing=False, recalculate_descriptors=True)
- cursor.execute(
- """UPDATE load_event SET (status, indexed_time, complete, successful) = (%s, %s, %s, %s) WHERE loadid = %s""",
- ("indexed", datetime.now(), True, True, loadid),
- )
- return {"success": True, "data": "success"}
+ try:
+ index_resources_by_transaction(loadid, quiet=True, use_multiprocessing=False, recalculate_descriptors=True)
+ cursor.execute(
+ """UPDATE load_event SET (status, indexed_time, complete, successful) = (%s, %s, %s, %s) WHERE loadid = %s""",
+ ("indexed", datetime.now(), True, True, loadid),
+ )
+ return {"success": True, "data": "indexed"}
+ except Exception as e:
+ logger.exception(e)
+ cursor.execute(
+ """UPDATE load_event SET (status, load_end_time) = (%s, %s) WHERE loadid = %s""",
+ ("unindexed", datetime.now(), loadid),
+ )
+ return {"success": False, "data": "saved"}
else:
cursor.execute(
"""UPDATE load_event SET status = %s, load_end_time = %s WHERE loadid = %s""",
diff --git a/arches/app/etl_modules/branch_csv_importer.py b/arches/app/etl_modules/branch_csv_importer.py
index 6f437b8e987..34230bbe99a 100644
--- a/arches/app/etl_modules/branch_csv_importer.py
+++ b/arches/app/etl_modules/branch_csv_importer.py
@@ -140,7 +140,7 @@ def process_worksheet(self, worksheet, cursor, node_lookup, nodegroup_lookup):
row_count = 0
for row in worksheet.rows:
cell_values = [cell.value for cell in row]
- if len(cell_values) == 0:
+ if len(cell_values) == 0 or any(cell_values) is False:
continue
resourceid = cell_values[0]
if resourceid is None:
@@ -268,6 +268,11 @@ def read(self, request):
result["summary"]["files"][file.filename] = {"size": (self.filesize_format(file.file_size))}
result["summary"]["cumulative_excel_files_size"] = self.cumulative_excel_files_size
default_storage.save(os.path.join(self.temp_dir, file.filename), File(zip_ref.open(file)))
+ elif content.name.split(".")[-1] == "xlsx":
+ self.cumulative_excel_files_size += content.size
+ result["summary"]["files"][content.name] = {"size": (self.filesize_format(content.size))}
+ result["summary"]["cumulative_excel_files_size"] = self.cumulative_excel_files_size
+ default_storage.save(os.path.join(self.temp_dir, content.name), File(content))
return {"success": result, "data": result}
def start(self, request):
diff --git a/arches/app/media/css/arches.scss b/arches/app/media/css/arches.scss
index f85c5b786cf..abc10441032 100644
--- a/arches/app/media/css/arches.scss
+++ b/arches/app/media/css/arches.scss
@@ -12544,7 +12544,8 @@ ul.select2-choices:after {
}
.iiif-widget-report {
- margin: 0 0 0 320px;
+ width: 425px;
+ margin: 0 20px 10px 0px;
}
.iiif-widget-report .iiif-leaflet {
diff --git a/arches/app/media/js/views/components/etl_modules/base-bulk-string-editor.js b/arches/app/media/js/views/components/etl_modules/base-bulk-string-editor.js
index 2bd6764e97c..cc71d760d0b 100644
--- a/arches/app/media/js/views/components/etl_modules/base-bulk-string-editor.js
+++ b/arches/app/media/js/views/components/etl_modules/base-bulk-string-editor.js
@@ -191,9 +191,8 @@ define([
}
self.addAllFormData();
- self.loading(true);
+ params.activeTab("import");
self.submit('write').then(data => {
- params.activeTab("import");
console.log(data.result);
}).fail( function(err) {
self.alert(
diff --git a/arches/app/models/fields/i18n.py b/arches/app/models/fields/i18n.py
index a350141bb0d..fc3d6653d5c 100644
--- a/arches/app/models/fields/i18n.py
+++ b/arches/app/models/fields/i18n.py
@@ -24,6 +24,18 @@ def _parse(self, value, lang, use_nulls):
if isinstance(value, str) and value != "null":
try:
ret = json.loads(value)
+
+ # the following is a fix for issue #9623 - using double quotation marks in i18n input
+ # re https://github.com/archesproject/arches/issues/9623
+ # the reason we have to do this next check is that we assumed that if the
+ # json.loads method doesn't fail we have a python dict. That's usually
+ # true unless you have a simple string wrapped in quotes
+ # eg: '"hello world"' rather than simply 'hello world'
+ # the quoted string loads without error but is not a dict
+ # hence the need for this check
+ if not isinstance(ret, dict):
+ ret = {}
+ raise Exception("value is not a json object")
except:
ret[lang] = value
self.value_is_primitive = True
diff --git a/arches/app/models/migrations/9648_add_empty_key_value_pairs_to_tiles.py b/arches/app/models/migrations/9648_add_empty_key_value_pairs_to_tiles.py
new file mode 100644
index 00000000000..496bf4f360c
--- /dev/null
+++ b/arches/app/models/migrations/9648_add_empty_key_value_pairs_to_tiles.py
@@ -0,0 +1,29 @@
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+ dependencies = [
+ ("models", "9477_fix_for_spatial_view_dbf_function_edtf_displaying_null"),
+ ]
+
+ def forwards_func(apps, schema_editor):
+ TileModel = apps.get_model("models", "TileModel")
+ Node = apps.get_model("models", "Node")
+
+ for tile in TileModel.objects.filter(data={}, provisionaledits__isnull=False):
+ for node in Node.objects.filter(nodegroup_id=tile.nodegroup_id):
+ if not str(node.pk) in tile.data:
+ tile.data[str(node.pk)] = None
+ tile.save()
+
+ def reverse_func(apps, schema_editor):
+ TileModel = apps.get_model("models", "TileModel")
+
+ for tile in TileModel.objects.filter(provisionaledits__isnull=False):
+ if bool(tile.provisionaledits and not any(tile.data.values())):
+ tile.data = {}
+ tile.save()
+
+ operations = [
+ migrations.RunPython(forwards_func, reverse_func),
+ ]
diff --git a/arches/app/models/migrations/9670_improve_bulk_load_performance.py b/arches/app/models/migrations/9670_improve_bulk_load_performance.py
new file mode 100644
index 00000000000..088e7eea19a
--- /dev/null
+++ b/arches/app/models/migrations/9670_improve_bulk_load_performance.py
@@ -0,0 +1,70 @@
+from django.db import migrations
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("models", "9648_add_empty_key_value_pairs_to_tiles"),
+ ]
+
+ update_check_excess_tiles_trigger = """
+ create or replace procedure __arches_complete_bulk_load() AS
+ $$
+ DECLARE
+ cardinality_violations bigint;
+ BEGIN
+ alter table tiles enable trigger __arches_check_excess_tiles_trigger;
+ alter table tiles enable trigger __arches_trg_update_spatial_attributes;
+ END
+ $$
+ language plpgsql;
+ """
+
+ restore_check_excess_tiles_trigger = """
+ create or replace procedure __arches_complete_bulk_load() as
+ $$
+ DECLARE
+ cardinality_violations bigint;
+ BEGIN
+ alter table tiles enable trigger __arches_check_excess_tiles_trigger;
+ alter table tiles enable trigger __arches_trg_update_spatial_attributes;
+
+ if (not __arches_refresh_spatial_views()) then
+ Raise EXCEPTION 'Unable to refresh spatial views';
+ end if;
+
+ with cardinality_violations as (SELECT t.resourceinstanceid,
+ t.nodegroupid,
+ COALESCE(t.parenttileid::text, '') parent_tileid,
+ count(*)
+ FROM tiles t,
+ node_groups ng
+ WHERE t.nodegroupid = ng.nodegroupid
+ AND ng.cardinality = '1'
+ group by t.resourceinstanceid, t.nodegroupid, parent_tileid
+ having count(*) > 1)
+ select count(*)
+ into cardinality_violations
+ from cardinality_violations;
+
+ if (cardinality_violations > 0) then
+ Raise Exception 'Cardinality violations found. Run `%` to list violations',
+ 'select * from __arches_get_tile_cardinality_violations()';
+ else
+ Raise Notice 'No cardinality violations found';
+ end if;
+ END $$
+ language plpgsql;
+ """
+
+ create_index_on_load_staging_tileid = """
+ CREATE INDEX IF NOT EXISTS load_staging_tileid ON load_staging (tileid);
+ """
+
+ drop_index_on_load_staging_tileid = """
+ DROP INDEX IF EXISTS load_staging_tileid;
+ """
+
+ operations = [
+ migrations.RunSQL(update_check_excess_tiles_trigger, restore_check_excess_tiles_trigger),
+ migrations.RunSQL(create_index_on_load_staging_tileid, drop_index_on_load_staging_tileid),
+ ]
diff --git a/arches/app/models/migrations/9746_related_resource_post_save_bug.py b/arches/app/models/migrations/9746_related_resource_post_save_bug.py
new file mode 100644
index 00000000000..25944620d9f
--- /dev/null
+++ b/arches/app/models/migrations/9746_related_resource_post_save_bug.py
@@ -0,0 +1,222 @@
+# Generated by Django 3.2.19 on 2023-07-07 19:09
+
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('models', '9670_improve_bulk_load_performance'),
+ ]
+
+ forward_sql = """
+ CREATE OR REPLACE FUNCTION public.__arches_create_resource_x_resource_relationships(IN tile_id uuid)
+ RETURNS boolean
+ LANGUAGE 'plpgsql'
+ VOLATILE
+ PARALLEL UNSAFE
+ COST 100
+
+ AS $BODY$
+ DECLARE
+ resourceinstancefrom_id uuid;
+ from_graphid uuid;
+ relational_count text;
+ val boolean = true;
+ BEGIN
+ select count(*) into relational_count from tiles where tiledata::text like '%resourceX%' and tileid = tile_id;
+
+ IF relational_count = '0'
+ THEN
+ RETURN false;
+ END IF;
+
+ --https://dbfiddle.uk/?rdbms=postgres_12&fiddle=21e25754f355a492dfd7b4a134182d2e
+
+ SELECT resourceinstanceid INTO resourceinstancefrom_id FROM tiles WHERE tileid = tile_id;
+ SELECT graphid INTO from_graphid FROM resource_instances WHERE resourceinstanceid = resourceinstancefrom_id;
+
+ DELETE FROM resource_x_resource WHERE tileid = tile_id;
+
+ WITH updated_tiles as (
+ select * from tiles t
+ WHERE
+ t.tileid = tile_id
+ )
+ , relationships AS (
+ SELECT n.nodeid, n.config,
+ jsonb_array_elements(tt.tiledata->n.nodeid::text) AS relationship
+ FROM updated_tiles tt
+ LEFT JOIN nodes n ON tt.nodegroupid = n.nodegroupid
+ WHERE n.datatype IN ('resource-instance-list', 'resource-instance')
+ AND tt.tiledata->>n.nodeid::text IS NOT null
+ )
+ , relationships2 AS (
+ SELECT r.nodeid, r.config, r.relationship, (SELECT ri.graphid
+ FROM resource_instances ri
+ WHERE (r.relationship->>'resourceId')::uuid = ri.resourceinstanceid) AS to_graphid
+ FROM relationships r
+ )
+ , relationships3 AS (
+ SELECT fr.nodeid, fr.relationship, fr.to_graphid,
+ (
+ SELECT graphs->>'ontologyProperty'
+ FROM jsonb_array_elements(fr.config->'graphs') AS graphs
+ WHERE graphs->>'graphid' = fr.to_graphid::text
+ ) AS defaultOntologyProperty,
+ (
+ SELECT graphs->>'inverseOntologyProperty'
+ FROM jsonb_array_elements(fr.config->'graphs') AS graphs
+ WHERE graphs->>'graphid' = fr.to_graphid::text
+ ) AS defaultInverseOntologyProperty
+ FROM relationships2 fr
+ )
+
+ INSERT INTO resource_x_resource (
+ resourcexid,
+ notes,
+ relationshiptype,
+ inverserelationshiptype,
+ resourceinstanceidfrom,
+ resourceinstanceidto,
+ resourceinstancefrom_graphid,
+ resourceinstanceto_graphid,
+ tileid,
+ nodeid,
+ created,
+ modified
+ ) (SELECT
+ CASE relationship->>'resourceXresourceId'
+ WHEN '' THEN uuid_generate_v4()
+ ELSE (relationship->>'resourceXresourceId')::uuid
+ END,
+ '',
+ CASE relationship->>'ontologyProperty'
+ WHEN '' THEN defaultOntologyProperty
+ ELSE relationship->>'ontologyProperty'
+ END,
+ CASE relationship->>'inverseOntologyProperty'
+ WHEN '' THEN defaultInverseOntologyProperty
+ ELSE relationship->>'inverseOntologyProperty'
+ END,
+ resourceinstancefrom_id,
+ (relationship->>'resourceId')::uuid,
+ from_graphid,
+ to_graphid,
+ tile_id,
+ nodeid,
+ now(),
+ now()
+ FROM relationships3);
+ RETURN val;
+ END;
+ $BODY$;
+ """
+
+ reverse_sql = """
+ CREATE OR REPLACE FUNCTION public.__arches_create_resource_x_resource_relationships(IN tile_id uuid)
+ RETURNS boolean
+ LANGUAGE 'plpgsql'
+ VOLATILE
+ PARALLEL UNSAFE
+ COST 100
+
+ AS $BODY$
+ DECLARE
+ resourceinstancefrom_id uuid;
+ from_graphid uuid;
+ relational_count text;
+ val boolean = true;
+ BEGIN
+ select count(*) into relational_count from tiles where tiledata::text like '%resourceX%' and tileid = tile_id;
+
+ IF relational_count = '0'
+ THEN
+ RETURN false;
+ END IF;
+
+ --https://dbfiddle.uk/?rdbms=postgres_12&fiddle=21e25754f355a492dfd7b4a134182d2e
+
+ SELECT resourceinstanceid INTO resourceinstancefrom_id FROM tiles WHERE tileid = tile_id;
+ SELECT graphid INTO from_graphid FROM resource_instances WHERE resourceinstanceid = resourceinstancefrom_id;
+
+ DELETE FROM resource_x_resource WHERE tileid = tile_id;
+
+ WITH updated_tiles as (
+ select * from tiles t
+ WHERE
+ t.tileid = tile_id
+ )
+ , relationships AS (
+ SELECT n.nodeid, n.config,
+ jsonb_array_elements(tt.tiledata->n.nodeid::text) AS relationship
+ FROM updated_tiles tt
+ LEFT JOIN nodes n ON tt.nodegroupid = n.nodegroupid
+ WHERE n.datatype IN ('resource-instance-list', 'resource-instance')
+ AND tt.tiledata->>n.nodeid::text IS NOT null
+ )
+ , relationships2 AS (
+ SELECT r.nodeid, r.config, r.relationship, (SELECT ri.graphid
+ FROM resource_instances ri
+ WHERE r.relationship->>'resourceId' = ri.resourceinstanceid::text) AS to_graphid
+ FROM relationships r
+ )
+ , relationships3 AS (
+ SELECT fr.nodeid, fr.relationship, fr.to_graphid,
+ (
+ SELECT graphs->>'ontologyProperty'
+ FROM jsonb_array_elements(fr.config->'graphs') AS graphs
+ WHERE graphs->>'graphid' = fr.to_graphid::text
+ ) AS defaultOntologyProperty,
+ (
+ SELECT graphs->>'inverseOntologyProperty'
+ FROM jsonb_array_elements(fr.config->'graphs') AS graphs
+ WHERE graphs->>'graphid' = fr.to_graphid::text
+ ) AS defaultInverseOntologyProperty
+ FROM relationships2 fr
+ )
+
+ INSERT INTO resource_x_resource (
+ resourcexid,
+ notes,
+ relationshiptype,
+ inverserelationshiptype,
+ resourceinstanceidfrom,
+ resourceinstanceidto,
+ resourceinstancefrom_graphid,
+ resourceinstanceto_graphid,
+ tileid,
+ nodeid,
+ created,
+ modified
+ ) (SELECT
+ CASE relationship->>'resourceXresourceId'
+ WHEN '' THEN uuid_generate_v4()
+ ELSE (relationship->>'resourceXresourceId')::uuid
+ END,
+ '',
+ CASE relationship->>'ontologyProperty'
+ WHEN '' THEN defaultOntologyProperty
+ ELSE relationship->>'ontologyProperty'
+ END,
+ CASE relationship->>'inverseOntologyProperty'
+ WHEN '' THEN defaultInverseOntologyProperty
+ ELSE relationship->>'inverseOntologyProperty'
+ END,
+ resourceinstancefrom_id,
+ (relationship->>'resourceId')::uuid,
+ from_graphid,
+ to_graphid,
+ tile_id,
+ nodeid,
+ now(),
+ now()
+ FROM relationships3);
+ RETURN val;
+ END;
+ $BODY$;
+ """
+
+ operations = [
+ migrations.RunSQL(forward_sql, reverse_sql),
+ ]
diff --git a/arches/app/models/models.py b/arches/app/models/models.py
index 80d0ade4502..9dbfef8738d 100644
--- a/arches/app/models/models.py
+++ b/arches/app/models/models.py
@@ -1100,8 +1100,15 @@ def __init__(self, *args, **kwargs):
if not self.tileid:
self.tileid = uuid.uuid4()
+ def is_fully_provisional(self):
+ return bool(self.provisionaledits and not any(self.data.values()))
+
def save(self, *args, **kwargs):
- if self.sortorder is None or (self.provisionaledits is not None and self.data == {}):
+ if self.sortorder is None or self.is_fully_provisional():
+ for node in Node.objects.filter(nodegroup_id=self.nodegroup_id):
+ if not str(node.pk) in self.data:
+ self.data[str(node.pk)] = None
+
sortorder_max = TileModel.objects.filter(
nodegroup_id=self.nodegroup_id, resourceinstance_id=self.resourceinstance_id
).aggregate(Max("sortorder"))["sortorder__max"]
diff --git a/arches/app/models/tile.py b/arches/app/models/tile.py
index afc04ef34f6..71bd7f057bd 100644
--- a/arches/app/models/tile.py
+++ b/arches/app/models/tile.py
@@ -210,9 +210,8 @@ def is_provisional(self):
been approved by a user in the resource reviewer group
"""
-
result = False
- if self.provisionaledits is not None and len(self.data) == 0:
+ if self.provisionaledits is not None and not any(self.data.values()):
result = True
return result
@@ -566,7 +565,7 @@ def after_update_all(self):
def is_blank(self):
if self.data != {}:
- if len([item for item in list(self.data.values()) if item is not None]) > 0:
+ if any(self.data.values()):
return False
child_tiles_are_blank = True
diff --git a/arches/app/templates/javascript.htm b/arches/app/templates/javascript.htm
index 535aaca9988..353b9ffd95a 100644
--- a/arches/app/templates/javascript.htm
+++ b/arches/app/templates/javascript.htm
@@ -712,7 +712,7 @@
download-templates='{% trans "Download Templates" as downloadTemplates %} "{{ downloadTemplates|escapejs }}"'
select-template='{% trans "Select Template" as selectTemplate %} "{{ selectTemplate|escapejs }}"'
upload-zip-file='{% trans "Upload .zip File" as uploadZipFile %} "{{ uploadZipFile|escapejs }}"'
- upload-your-zip-file='{% trans "Upload Your .zip File" as uploadYourZipFile %} "{{ uploadYourZipFile|escapejs }}"'
+ upload-your-xlsx-zip-file='{% trans "Upload Your .xlsx or .zip File" as uploadYourXlsxZipFile %} "{{ uploadYourXlsxZipFile|escapejs }}"'
branch-excel='{% trans "Branch Excel" as branchExcel %} "{{ branchExcel|escapejs }}"'
file-upload-summary='{% trans "File Upload Summary" as fileUploadSummary %} "{{ fileUploadSummary|escapejs }}"'
file='{% trans "File" as file %} "{{ file|escapejs }}"'
@@ -736,6 +736,7 @@
remove-from-history='{% trans "remove from history" as removeFromHistory %} "{{ removeFromHistory|escapejs }}"'
number-of-resources-updated='{% trans "Number of Resources Updated" as numberOfResourcesUpdated %} "{{ numberOfResourcesUpdated|escapejs }}"'
indexing='{% trans "indexing" as indexing %} "{{ indexing|escapejs }}"'
+ loaded-but-unindexed='{% trans "loaded but unindexed" as loadedButUnindexed %} "{{ loadedButUnindexed|escapejs }}"'
validating='{% trans "validating" as validating %} "{{ validating|escapejs }}"'
completed='{% trans "completed" as completed %} "{{ completed|escapejs }}"'
failed='{% trans "failed" as failed %} "{{ failed|escapejs }}"'
diff --git a/arches/app/templates/views/components/etl_modules/branch-csv-importer.htm b/arches/app/templates/views/components/etl_modules/branch-csv-importer.htm
index 59cb764e513..4440ee6d667 100644
--- a/arches/app/templates/views/components/etl_modules/branch-csv-importer.htm
+++ b/arches/app/templates/views/components/etl_modules/branch-csv-importer.htm
@@ -50,7 +50,7 @@
-
+
diff --git a/arches/app/templates/views/components/plugins/etl-manager.htm b/arches/app/templates/views/components/plugins/etl-manager.htm
index 3bafd6f0a8a..17e25b09e06 100644
--- a/arches/app/templates/views/components/plugins/etl-manager.htm
+++ b/arches/app/templates/views/components/plugins/etl-manager.htm
@@ -143,6 +143,12 @@
+
+
+