Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update master with latest in 7.5.x #9843

Merged
merged 77 commits into from
Jul 27, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
77 commits
Select commit Hold shift + click to select a range
ea24bf5
updates jquery to track minor/patch versions #9527
chrabyrd May 12, 2023
c1288dd
Merge pull request #9529 from archesproject/9527-cbyrd-updates-jquery…
chiatt May 17, 2023
f1c6d60
Simplify value check, re #9648
chiatt Jun 19, 2023
bc5551c
Add check for prov tiles to model, re #9648
chiatt Jun 19, 2023
b27a04d
creates empty key/value pairs for all nodes in a tile #9648
chrabyrd Jun 29, 2023
46f6754
adds migration for tiledata #9648
chrabyrd Jun 29, 2023
62e0caf
nit #9648
chrabyrd Jun 29, 2023
ad12852
updates failing tests #9648"
chrabyrd Jun 30, 2023
faa9294
fixes failing tests #9648
chrabyrd Jun 30, 2023
89dc9b1
Update trigger function and add index, #9671
njkim Jul 3, 2023
ceaea6c
Skip check cardinality for editor, #9669
njkim Jul 3, 2023
e387d68
Try catching error after load before index, #9671
njkim Jul 3, 2023
c78beee
Fix migrations, #9670
njkim Jul 3, 2023
e9c47ee
Update sql syntax, #9670
njkim Jul 3, 2023
4b02d35
updates base datatype get_tile_data #9648
chrabyrd Jul 4, 2023
ac4c58a
updates datatype index command to handle TypeError #9648
chrabyrd Jul 5, 2023
b103a89
updates labelbasedgraph to handle provisional edits #9715
chrabyrd Jul 5, 2023
4955bfa
Patch upgrade for django
chiatt Jul 6, 2023
ed072c5
Merge pull request #9731 from archesproject/django_patch_3.2.20_in_ar…
aarongundel Jul 6, 2023
49a2d6f
PR changes #9648
chrabyrd Jul 7, 2023
efe7769
reverses changes to tests since updating TileModel.save logic #9648
chrabyrd Jul 7, 2023
10c40d7
Merge pull request #9704 from archesproject/9648_prevent_prov_edit_er…
chiatt Jul 7, 2023
612f9ec
Merge pull request #9717 from archesproject/9715-cbyrd-updates-labelb…
chiatt Jul 8, 2023
eae4a83
fix type cast issue with __arches_create_resource_x_resource_relation…
apeters Jul 8, 2023
30847d5
Handles for blank lines in branch excel, re #9750
chiatt Jul 9, 2023
50b0b12
Merge pull request #9751 from archesproject/9750_prevent_empty_line_e…
ryan86 Jul 10, 2023
0c93787
merge
chiatt Jul 11, 2023
8c4e162
Merge pull request #9755 from archesproject/update_74_with_62
chiatt Jul 11, 2023
005195b
Merge pull request #9756 from archesproject/dev/7.4.x
chiatt Jul 11, 2023
2afa9c8
Pass missing args to bulkdisambiguatedresource api endpoint
chiatt Jul 14, 2023
11d4bda
Sets default width, re #8419
chiatt Jul 14, 2023
a12711e
Update js dep patch releases
chiatt Jul 14, 2023
76e749f
9561 Json-Ld fix : Omit Edge from Null Domain
khodgkinson-he Jul 17, 2023
05ddb64
Merge pull request #9747 from archesproject/9746_related_resource_pos…
chiatt Jul 18, 2023
26d3107
Merge pull request #9781 from archesproject/8419_iiif_widget_in_report
njkim Jul 18, 2023
2a8ff43
Merge pull request #9782 from archesproject/js_patch_updates
njkim Jul 18, 2023
77fc6cc
Update rdffile.py as per PR suggestion
khodgkinson-he Jul 18, 2023
1940bc9
Apply fix to unpreloaded data
chiatt Jul 18, 2023
6004dbc
Merge pull request #9783 from HistoricEngland/9561_JsonLd_Edgefix
chiatt Jul 18, 2023
c33ef02
Merge pull request #9780 from archesproject/tile_filter
njkim Jul 18, 2023
19285dd
Merge branch 'dev/6.2.x' into merge_6_into_7
chiatt Jul 18, 2023
24c3482
Merge pull request #9790 from archesproject/merge_6_into_7
chiatt Jul 18, 2023
94abd5f
Merge pull request #9791 from archesproject/dev/7.4.x
chiatt Jul 19, 2023
9134337
Add support for xlsx file, #8469
njkim Jul 19, 2023
d95bc0d
adding speechmark conversion
SDScandrettKint Jul 19, 2023
03ec37b
replacing / with - in create package
SDScandrettKint Jul 19, 2023
64da097
Merge pull request #9795 from SDScandrettKint/9777_create_package_sla…
chiatt Jul 19, 2023
679f509
slight improvement on how we handle for quoted strings in i18n string…
apeters Jul 19, 2023
9d1d558
Merge pull request #9794 from SDScandrettKint/9623_model_abstract_iss…
chiatt Jul 20, 2023
d69c385
standarize the way triggers are managed in the bulk editor, re #9670
apeters Jul 20, 2023
8277166
improve responsiveness of the UI, re #9670
apeters Jul 20, 2023
aab54e8
Ensure pytz and tzdata are at version compatible with Celery, re #9797
chiatt Jul 20, 2023
6203b04
Merge pull request #9714 from archesproject/9670_improve_bulk_load_pe…
apeters Jul 20, 2023
131d69d
fixes migration order #9801
chrabyrd Jul 20, 2023
6e9a81f
Merge pull request #9802 from archesproject/9801-cbyrd-fix-migration-…
chiatt Jul 20, 2023
304f162
Fix logic to use search url in bulk editor, #9806
njkim Jul 21, 2023
41a2fb9
Merge pull request #9808 from archesproject/9806_search_url_bulk_editor
chiatt Jul 21, 2023
cf340da
Merge pull request #9809 from archesproject/8469_update_branch_excel
chiatt Jul 21, 2023
2f66aac
fixes broken card tree #9805
chrabyrd Jul 21, 2023
3a02230
Merge pull request #9814 from archesproject/9805_fix_card_tree
chiatt Jul 21, 2023
80e2379
fixes missing nodegroup issue #9803
chrabyrd Jul 20, 2023
efd70cd
Merge pull request #9815 from archesproject/9803_fix_cardinality_chec…
chiatt Jul 21, 2023
fbeaf19
Merge pull request #9816 from archesproject/dev/6.2.x
chiatt Jul 21, 2023
8b150f9
Check individual concept labels when importing with --create_concepts…
atapscott Jun 30, 2023
82bac3b
Merge pull request #9817 from archesproject/dev/7.4.x
chiatt Jul 22, 2023
8da0ff2
Merge pull request #9818 from archesproject/9706_move_to_dev74
chiatt Jul 22, 2023
e50e7c3
Update to return value as an object form, #9523
njkim Jul 24, 2023
b42d90b
Move comments, #9824
njkim Jul 24, 2023
ec270e4
Add upper bound on urllib3 (<2) #9826
jacobtylerwalls Jul 24, 2023
a47c7c0
Merge pull request #9827 from jacobtylerwalls/urllib3-upper-bound
chiatt Jul 25, 2023
245f484
Merge 6.2.x into 7.4.x
chiatt Jul 25, 2023
efb110c
Merge pull request #9835 from archesproject/update_7_with_6
chiatt Jul 25, 2023
2cfb73d
Merge pull request #9825 from archesproject/9824_url_datatype_tile_tr…
chiatt Jul 26, 2023
015fc01
Simplify counting to avoid unecessary join, #9387
njkim Jul 26, 2023
c390c1b
Merge pull request #9800 from archesproject/9797_failing_celery_conne…
njkim Jul 27, 2023
8661345
Merge pull request #9838 from archesproject/9837_fix_preview_bulk_editor
apeters Jul 27, 2023
afe0941
Merge branch 'dev/7.4.x' into dev/7.5.x
apeters Jul 27, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion arches/app/datatypes/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ def get_tile_data(self, tile):
except:
data = tile["data"]
provisionaledits = tile["provisionaledits"]
if data is not None and len(list(data.keys())) > 0:
if data is not None and any(data.values()):
return data
elif provisionaledits is not None and len(list(provisionaledits.keys())) > 0:
if len(list(provisionaledits.keys())) > 1:
Expand Down
2 changes: 1 addition & 1 deletion arches/app/datatypes/datatypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -1535,7 +1535,7 @@ def append_to_document(self, document, nodevalue, nodeid, tile, provisional=Fals
for f in tile.data[str(nodeid)]:
val = {"string": f["name"], "nodegroup_id": tile.nodegroup_id, "provisional": provisional}
document["strings"].append(val)
except KeyError as e:
except (KeyError, TypeError) as e:
for k, pe in tile.provisionaledits.items():
for f in pe["value"][nodeid]:
val = {"string": f["name"], "nodegroup_id": tile.nodegroup_id, "provisional": provisional}
Expand Down
7 changes: 5 additions & 2 deletions arches/app/datatypes/url.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,9 +100,12 @@ def transform_value_for_tile(self, value, **kwargs):
try:
return ast.literal_eval(value)
except:
# this will probably fail validation, but that is ok. We need the error to report the value.
return value
if isinstance(value, dict):
return value
else:
return {"url": value, "url_label": ""}
except BaseException:
# this will probably fail validation, but that is ok. We need the error to report the value.
if isinstance(value, dict):
return value
else:
Expand Down
39 changes: 16 additions & 23 deletions arches/app/etl_modules/base_data_editor.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,17 @@
from datetime import datetime
import json
import logging
import requests
from urllib.parse import urlparse, urlunparse
from urllib.parse import urlsplit, parse_qs
import uuid
from django.db import connection
from django.db.models.functions import Lower
from django.http import HttpRequest
from django.utils.translation import ugettext as _
from arches.app.datatypes.datatypes import DataTypeFactory
from arches.app.etl_modules.base_import_module import BaseImportModule
from arches.app.models.models import GraphModel, Node
from arches.app.models.system_settings import settings
import arches.app.tasks as tasks
from arches.app.utils.index_database import index_resources_by_transaction
from arches.app.views.search import search_results

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -101,11 +100,16 @@ def log_event(self, cursor, status):
)

def get_resourceids_from_search_url(self, search_url):
parsed_url = urlparse(search_url)
search_result_url = urlunparse(parsed_url._replace(path="/search/resources"))
response = requests.get(search_result_url + "&export=true")
search_results = response.json()["results"]["hits"]["hits"]
return [result["_source"]["resourceinstanceid"] for result in search_results]
request = HttpRequest()
request.user = self.request.user
request.method = "GET"
request.GET["export"] = True
params = parse_qs(urlsplit(search_url).query)
for k, v in params.items():
request.GET.__setitem__(k, v[0])
response = search_results(request)
results = json.loads(response.content)['results']['hits']['hits']
return [result["_source"]["resourceinstanceid"] for result in results]

def validate(self, request):
return {"success": True, "data": {}}
Expand Down Expand Up @@ -168,26 +172,15 @@ def get_preview_data(self, graph_id, node_id, resourceids, language_code, old_te
+ text_query
)

tile_sub_query = (
resource_count_query = (
"""
AND resourceinstanceid IN (SELECT DISTINCT t.resourceinstanceid FROM tiles t, nodes n
SELECT count(DISTINCT t.resourceinstanceid) FROM tiles t, nodes n
WHERE t.nodegroupid = n.nodegroupid
"""
+ node_id_query
+ graph_id_query
+ resourceids_query
+ text_query
+ ")"
)

resource_count_query = (
"""
SELECT count(n.resourceinstanceid) FROM resource_instances n
WHERE 0 = 0
"""
+ graph_id_query
+ resourceids_query
+ tile_sub_query
)

with connection.cursor() as cursor:
Expand Down Expand Up @@ -367,7 +360,7 @@ def run_load_task(self, loadid, graph_id, node_id, operation, language_code, old
return {"success": False, "data": {"title": _("Error"), "message": data_staged["message"]}}

if data_updated["success"]:
data_updated = self.save_to_tiles(loadid)
data_updated = self.save_to_tiles(loadid, finalize_import=False)
return {"success": True, "data": "done"}
else:
with connection.cursor() as cursor:
Expand Down
41 changes: 31 additions & 10 deletions arches/app/etl_modules/base_import_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,13 +46,13 @@ def reverse(self, request, **kwargs):
logger.warn(response)
return response

def save_to_tiles(self, loadid, multiprocessing=True):
def save_to_tiles(self, loadid, finalize_import=True, multiprocessing=True):
self.loadid = loadid
with connection.cursor() as cursor:
try:
cursor.execute("""CALL __arches_prepare_bulk_load();""")
cursor.execute("""SELECT * FROM __arches_staging_to_tile(%s)""", [self.loadid])
row = cursor.fetchall()
saved = cursor.fetchone()[0]
except (IntegrityError, ProgrammingError) as e:
logger.error(e)
cursor.execute(
Expand All @@ -66,19 +66,40 @@ def save_to_tiles(self, loadid, multiprocessing=True):
"message": _("Unable to insert record into staging table"),
}
finally:
cursor.execute("""CALL __arches_complete_bulk_load();""")
try:
cursor.execute("""CALL __arches_complete_bulk_load();""")

if row[0][0]:
if finalize_import:
cursor.execute("""SELECT __arches_refresh_spatial_views();""")
refresh_successful = cursor.fetchone()[0]
if not refresh_successful:
raise Exception('Unable to refresh spatial views')
except Exception as e:
logger.exception(e)
cursor.execute(
"""UPDATE load_event SET (status, indexed_time, complete, successful) = (%s, %s, %s, %s) WHERE loadid = %s""",
("unindexed", datetime.now(), True, True, loadid),
)

if saved:
cursor.execute(
"""UPDATE load_event SET (status, load_end_time) = (%s, %s) WHERE loadid = %s""",
("completed", datetime.now(), loadid),
)
index_resources_by_transaction(loadid, quiet=True, use_multiprocessing=False, recalculate_descriptors=True)
cursor.execute(
"""UPDATE load_event SET (status, indexed_time, complete, successful) = (%s, %s, %s, %s) WHERE loadid = %s""",
("indexed", datetime.now(), True, True, loadid),
)
return {"success": True, "data": "success"}
try:
index_resources_by_transaction(loadid, quiet=True, use_multiprocessing=False, recalculate_descriptors=True)
cursor.execute(
"""UPDATE load_event SET (status, indexed_time, complete, successful) = (%s, %s, %s, %s) WHERE loadid = %s""",
("indexed", datetime.now(), True, True, loadid),
)
return {"success": True, "data": "indexed"}
except Exception as e:
logger.exception(e)
cursor.execute(
"""UPDATE load_event SET (status, load_end_time) = (%s, %s) WHERE loadid = %s""",
("unindexed", datetime.now(), loadid),
)
return {"success": False, "data": "saved"}
else:
cursor.execute(
"""UPDATE load_event SET status = %s, load_end_time = %s WHERE loadid = %s""",
Expand Down
7 changes: 6 additions & 1 deletion arches/app/etl_modules/branch_csv_importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ def process_worksheet(self, worksheet, cursor, node_lookup, nodegroup_lookup):
row_count = 0
for row in worksheet.rows:
cell_values = [cell.value for cell in row]
if len(cell_values) == 0:
if len(cell_values) == 0 or any(cell_values) is False:
continue
resourceid = cell_values[0]
if resourceid is None:
Expand Down Expand Up @@ -268,6 +268,11 @@ def read(self, request):
result["summary"]["files"][file.filename] = {"size": (self.filesize_format(file.file_size))}
result["summary"]["cumulative_excel_files_size"] = self.cumulative_excel_files_size
default_storage.save(os.path.join(self.temp_dir, file.filename), File(zip_ref.open(file)))
elif content.name.split(".")[-1] == "xlsx":
self.cumulative_excel_files_size += content.size
result["summary"]["files"][content.name] = {"size": (self.filesize_format(content.size))}
result["summary"]["cumulative_excel_files_size"] = self.cumulative_excel_files_size
default_storage.save(os.path.join(self.temp_dir, content.name), File(content))
return {"success": result, "data": result}

def start(self, request):
Expand Down
3 changes: 2 additions & 1 deletion arches/app/media/css/arches.scss
Original file line number Diff line number Diff line change
Expand Up @@ -12544,7 +12544,8 @@ ul.select2-choices:after {
}

.iiif-widget-report {
margin: 0 0 0 320px;
width: 425px;
margin: 0 20px 10px 0px;
}

.iiif-widget-report .iiif-leaflet {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -191,9 +191,8 @@ define([
}

self.addAllFormData();
self.loading(true);
params.activeTab("import");
self.submit('write').then(data => {
params.activeTab("import");
console.log(data.result);
}).fail( function(err) {
self.alert(
Expand Down
12 changes: 12 additions & 0 deletions arches/app/models/fields/i18n.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,18 @@ def _parse(self, value, lang, use_nulls):
if isinstance(value, str) and value != "null":
try:
ret = json.loads(value)

# the following is a fix for issue #9623 - using double quotation marks in i18n input
# re https://github.com/archesproject/arches/issues/9623
# the reason we have to do this next check is that we assumed that if the
# json.loads method doesn't fail we have a python dict. That's usually
# true unless you have a simple string wrapped in quotes
# eg: '"hello world"' rather than simply 'hello world'
# the quoted string loads without error but is not a dict
# hence the need for this check
if not isinstance(ret, dict):
ret = {}
raise Exception("value is not a json object")
except:
ret[lang] = value
self.value_is_primitive = True
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
from django.db import migrations, models


class Migration(migrations.Migration):
dependencies = [
("models", "9477_fix_for_spatial_view_dbf_function_edtf_displaying_null"),
]

def forwards_func(apps, schema_editor):
TileModel = apps.get_model("models", "TileModel")
Node = apps.get_model("models", "Node")

for tile in TileModel.objects.filter(data={}, provisionaledits__isnull=False):
for node in Node.objects.filter(nodegroup_id=tile.nodegroup_id):
if not str(node.pk) in tile.data:
tile.data[str(node.pk)] = None
tile.save()

def reverse_func(apps, schema_editor):
TileModel = apps.get_model("models", "TileModel")

for tile in TileModel.objects.filter(provisionaledits__isnull=False):
if bool(tile.provisionaledits and not any(tile.data.values())):
tile.data = {}
tile.save()

operations = [
migrations.RunPython(forwards_func, reverse_func),
]
70 changes: 70 additions & 0 deletions arches/app/models/migrations/9670_improve_bulk_load_performance.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
from django.db import migrations

class Migration(migrations.Migration):

dependencies = [
("models", "9648_add_empty_key_value_pairs_to_tiles"),
]

update_check_excess_tiles_trigger = """
create or replace procedure __arches_complete_bulk_load() AS
$$
DECLARE
cardinality_violations bigint;
BEGIN
alter table tiles enable trigger __arches_check_excess_tiles_trigger;
alter table tiles enable trigger __arches_trg_update_spatial_attributes;
END
$$
language plpgsql;
"""

restore_check_excess_tiles_trigger = """
create or replace procedure __arches_complete_bulk_load() as
$$
DECLARE
cardinality_violations bigint;
BEGIN
alter table tiles enable trigger __arches_check_excess_tiles_trigger;
alter table tiles enable trigger __arches_trg_update_spatial_attributes;

if (not __arches_refresh_spatial_views()) then
Raise EXCEPTION 'Unable to refresh spatial views';
end if;

with cardinality_violations as (SELECT t.resourceinstanceid,
t.nodegroupid,
COALESCE(t.parenttileid::text, '') parent_tileid,
count(*)
FROM tiles t,
node_groups ng
WHERE t.nodegroupid = ng.nodegroupid
AND ng.cardinality = '1'
group by t.resourceinstanceid, t.nodegroupid, parent_tileid
having count(*) > 1)
select count(*)
into cardinality_violations
from cardinality_violations;

if (cardinality_violations > 0) then
Raise Exception 'Cardinality violations found. Run `%` to list violations',
'select * from __arches_get_tile_cardinality_violations()';
else
Raise Notice 'No cardinality violations found';
end if;
END $$
language plpgsql;
"""

create_index_on_load_staging_tileid = """
CREATE INDEX IF NOT EXISTS load_staging_tileid ON load_staging (tileid);
"""

drop_index_on_load_staging_tileid = """
DROP INDEX IF EXISTS load_staging_tileid;
"""

operations = [
migrations.RunSQL(update_check_excess_tiles_trigger, restore_check_excess_tiles_trigger),
migrations.RunSQL(create_index_on_load_staging_tileid, drop_index_on_load_staging_tileid),
]
Loading