Skip to content

Commit

Permalink
- Fix: race on upload transaction
Browse files Browse the repository at this point in the history
(cherry picked from commit 5f252b2)
  • Loading branch information
afabiani committed Nov 29, 2021
1 parent de3f263 commit 240cd5b
Show file tree
Hide file tree
Showing 3 changed files with 66 additions and 64 deletions.
4 changes: 4 additions & 0 deletions geonode/geoserver/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,6 +217,10 @@ def geoserver_finalize_upload(
if lock.acquire() is True:
from geonode.upload.models import Upload
upload = Upload.objects.get(import_id=import_id)

if upload.layer is not None:
return

upload.layer = instance
upload.save()

Expand Down
66 changes: 33 additions & 33 deletions geonode/upload/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,39 +129,39 @@ def update_from_session(self, upload_session, layer=None):
if layer and not self.layer:
self.layer = layer

if upload_session.base_file and self.layer and self.layer.name:
uploaded_files = upload_session.base_file[0]
base_file = uploaded_files.base_file
aux_files = uploaded_files.auxillary_files
sld_files = uploaded_files.sld_files
xml_files = uploaded_files.xml_files

if not UploadFile.objects.filter(upload=self, file=base_file).count():
uploaded_file = UploadFile.objects.create_from_upload(
self,
base_file,
None,
base=True)

if uploaded_file and uploaded_file.name:
assigned_name = uploaded_file.name
for _f in aux_files:
UploadFile.objects.create_from_upload(
self,
_f,
assigned_name)

for _f in sld_files:
UploadFile.objects.create_from_upload(
self,
_f,
assigned_name)

for _f in xml_files:
UploadFile.objects.create_from_upload(
self,
_f,
assigned_name)
if upload_session.base_file and self.layer and self.layer.name:
uploaded_files = upload_session.base_file[0]
base_file = uploaded_files.base_file
aux_files = uploaded_files.auxillary_files
sld_files = uploaded_files.sld_files
xml_files = uploaded_files.xml_files

if not UploadFile.objects.filter(upload=self, file=base_file).count():
uploaded_file = UploadFile.objects.create_from_upload(
self,
base_file,
None,
base=True)

if uploaded_file and uploaded_file.name:
assigned_name = uploaded_file.name
for _f in aux_files:
UploadFile.objects.create_from_upload(
self,
_f,
assigned_name)

for _f in sld_files:
UploadFile.objects.create_from_upload(
self,
_f,
assigned_name)

for _f in xml_files:
UploadFile.objects.create_from_upload(
self,
_f,
assigned_name)

if "COMPLETE" == self.state:
self.complete = True
Expand Down
60 changes: 29 additions & 31 deletions geonode/upload/upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -750,40 +750,38 @@ def final_step(upload_session, user, charset="UTF-8", layer_id=None):
_log(
f"There was an error updating the mosaic temporal extent: {str(e)}")
else:
try:
with transaction.atomic():
saved_dataset_filter = Layer.objects.filter(
store=target.name,
alternate=alternate,
workspace=target.workspace_name,
name=task.layer.name)
if not saved_dataset_filter.exists():
saved_layer = Layer.objects.create(
uuid=layer_uuid or str(uuid.uuid1()),
store=target.name,
storeType=target.store_type,
alternate=alternate,
workspace=target.workspace_name,
title=title,
name=task.layer.name,
abstract=abstract or '',
owner=user,
temporal_extent_start=start,
temporal_extent_end=end,
is_mosaic=False,
has_time=has_time,
has_elevation=has_elevation,
time_regex=upload_session.mosaic_time_regex)
created = True
else:
saved_layer = saved_dataset_filter.get()
created = False
except IntegrityError as e:
Upload.objects.invalidate_from_session(upload_session)
raise UploadException.from_exc(_('Error configuring Layer'), e)
saved_dataset_filter = Layer.objects.filter(
store=target.name,
alternate=alternate,
workspace=target.workspace_name,
name=task.layer.name)
if not saved_dataset_filter.exists():
saved_layer = Layer.objects.create(
uuid=layer_uuid or str(uuid.uuid1()),
store=target.name,
storeType=target.store_type,
alternate=alternate,
workspace=target.workspace_name,
title=title,
name=task.layer.name,
abstract=abstract or '',
owner=user,
temporal_extent_start=start,
temporal_extent_end=end,
is_mosaic=False,
has_time=has_time,
has_elevation=has_elevation,
time_regex=upload_session.mosaic_time_regex)
created = True
else:
saved_layer = saved_dataset_filter.get()
created = False

assert saved_layer

if not created:
return saved_layer

# Create a new upload session
try:
with transaction.atomic():
Expand Down

0 comments on commit 240cd5b

Please sign in to comment.