diff --git a/docs/data.rst b/docs/data.rst index 851e351b57..9dcedd8860 100644 --- a/docs/data.rst +++ b/docs/data.rst @@ -570,7 +570,12 @@ Response Query submitted data of a specific form ---------------------------------------- -Use the `query` or `data` parameter to pass in a JSON key/value query. +Use the `query` or `data` parameter to pass in a JSON key/value query. + +When quering a date time field whose value is in ISO format such as ``2020-12-18T09:36:19.767455+00:00``, it is important to ensure the ``+`` (plus) is encoded to ``%2b``. + +``+`` without encoding is parsed as whitespace. So ``2020-12-18T09:36:19.767455+00:00`` should be converted to ``2020-12-18T09:36:19.767455%2b00:00``. + Example I ^^^^^^^^^ diff --git a/docs/forms.rst b/docs/forms.rst index 5c4184b859..065afa063f 100644 --- a/docs/forms.rst +++ b/docs/forms.rst @@ -1627,4 +1627,4 @@ If the upload is still running: HTTP 202 Accepted { "job_status": "PENDING" - } + } \ No newline at end of file diff --git a/onadata/apps/api/tasks.py b/onadata/apps/api/tasks.py index 268cbe1931..09a751ae26 100644 --- a/onadata/apps/api/tasks.py +++ b/onadata/apps/api/tasks.py @@ -18,10 +18,15 @@ from onadata.apps.api import tools from onadata.libs.utils.email import send_generic_email from onadata.libs.utils.model_tools import queryset_iterator +from onadata.libs.utils.cache_tools import ( + safe_delete, + XFORM_REGENERATE_INSTANCE_JSON_TASK, +) from onadata.apps.logger.models import Instance, ProjectInvitation, XForm from onadata.libs.utils.email import ProjectInvitationEmail from onadata.celeryapp import app + User = get_user_model() @@ -145,3 +150,35 @@ def send_project_invitation_email_async( else: email = ProjectInvitationEmail(invitation, url) email.send() + + +@app.task(track_started=True) +def regenerate_form_instance_json(xform_id: int): + """Regenerate a form's instances json + + Json data recreated afresh and any existing json data is overriden + """ + try: + xform: XForm = XForm.objects.get(pk=xform_id) + except XForm.DoesNotExist as err: + logging.exception(err) + + else: + if not xform.is_instance_json_regenerated: + instances = xform.instances.filter(deleted_at__isnull=True) + + for instance in queryset_iterator(instances): + # We do not want to trigger Model.save or any signal + # Queryset.update is a workaround to achieve this. + # Instance.save and the post/pre signals may contain + # some side-effects which we are not interested in e.g + # updating date_modified which we do not want + Instance.objects.filter(pk=instance.pk).update( + json=instance.get_full_dict() + ) + + xform.is_instance_json_regenerated = True + xform.save() + # Clear cache used to store the task id from the AsyncResult + cache_key = f"{XFORM_REGENERATE_INSTANCE_JSON_TASK}{xform_id}" + safe_delete(cache_key) diff --git a/onadata/apps/api/tests/fixtures/osm/osm.csv b/onadata/apps/api/tests/fixtures/osm/osm.csv index 4207557d14..f064b89130 100644 --- a/onadata/apps/api/tests/fixtures/osm/osm.csv +++ b/onadata/apps/api/tests/fixtures/osm/osm.csv @@ -1,2 +1,2 @@ photo,osm_road,osm_building,fav_color,form_completed,meta/instanceID,_uuid,_submission_time,_tags,_notes,_version,_duration,_submitted_by,_total_media,_media_count,_media_all_received,osm_road:ctr:lat,osm_road:ctr:lon,osm_road:highway,osm_road:lanes,osm_road:name,osm_road:way:id,osm_building:building,osm_building:building:levels,osm_building:ctr:lat,osm_building:ctr:lon,osm_building:name,osm_building:way:id -1424308569120.jpg,OSMWay234134797.osm,OSMWay34298972.osm,red,2015-02-19T04:18:21.427+03,uuid:d3ef929e-e3e7-456c-9f27-7679c0074f4f,d3ef929e-e3e7-456c-9f27-7679c0074f4f,2013-02-18T15:54:01,,,201511091147,,bob,3,2,False,23.708174238006087,90.40946505581161,tertiary,2,Patuatuli Road,234134797,yes,4,23.707316084046038,90.40849938337506,kol,34298972 +1424308569120.jpg,OSMWay234134797.osm,OSMWay34298972.osm,red,2015-02-19T04:18:21.427+03,uuid:d3ef929e-e3e7-456c-9f27-7679c0074f4f,d3ef929e-e3e7-456c-9f27-7679c0074f4f,2013-02-18T15:54:01+00:00,,,201511091147,,bob,3,2,False,23.708174238006087,90.40946505581161,tertiary,2,Patuatuli Road,234134797,yes,4,23.707316084046038,90.40849938337506,kol,34298972 diff --git a/onadata/apps/api/tests/test_tasks.py b/onadata/apps/api/tests/test_tasks.py index d1a1a3e648..868f0dc4b4 100644 --- a/onadata/apps/api/tests/test_tasks.py +++ b/onadata/apps/api/tests/test_tasks.py @@ -1,12 +1,16 @@ """Tests for module onadata.apps.api.tasks""" +import sys from unittest.mock import patch +from django.core.cache import cache + from onadata.apps.main.tests.test_base import TestBase from onadata.apps.api.tasks import ( send_project_invitation_email_async, + regenerate_form_instance_json, ) -from onadata.apps.logger.models import ProjectInvitation +from onadata.apps.logger.models import ProjectInvitation, Instance from onadata.libs.utils.user_auth import get_user_default_project from onadata.libs.utils.email import ProjectInvitationEmail @@ -30,3 +34,70 @@ def test_sends_email(self, mock_send): url = "https://example.com/register" send_project_invitation_email_async(self.invitation.id, url) mock_send.assert_called_once() + + +class RegenerateFormInstanceJsonTestCase(TestBase): + """Tests for regenerate_form_instance_json""" + + def test_regenerates_instances_json(self): + """Regenerates instances json""" + + def mock_get_full_dict(self): # pylint: disable=unused-argument + return {} + + with patch.object(Instance, "get_full_dict", mock_get_full_dict): + self._publish_transportation_form_and_submit_instance() + + cache_key = f"xfm-regenerate_instance_json_task-{self.xform.pk}" + cache.set(cache_key, "foo") + instance = self.xform.instances.first() + self.assertFalse(instance.json) + self.assertFalse(self.xform.is_instance_json_regenerated) + regenerate_form_instance_json.delay(self.xform.pk) + instance.refresh_from_db() + self.assertTrue(instance.json) + self.xform.refresh_from_db() + self.assertTrue(self.xform.is_instance_json_regenerated) + # task_id stored in cache should be deleted + self.assertIsNone(cache.get(cache_key)) + + def test_json_overriden(self): + """Existing json is overriden""" + + def mock_get_full_dict(self): # pylint: disable=unused-argument + return {"foo": "bar"} + + with patch.object(Instance, "get_full_dict", mock_get_full_dict): + self._publish_transportation_form_and_submit_instance() + + instance = self.xform.instances.first() + self.assertEqual(instance.json.get("foo"), "bar") + regenerate_form_instance_json.delay(self.xform.pk) + instance.refresh_from_db() + self.assertFalse("foo" in instance.json) + + @patch("logging.exception") + def test_form_id_invalid(self, mock_log_exception): + """An invalid xform_id is handled""" + + regenerate_form_instance_json.delay(sys.maxsize) + + mock_log_exception.assert_called_once() + + def test_already_generated(self): + """Regeneration fails for a form whose regeneration has already been done""" + + def mock_get_full_dict(self): # pylint: disable=unused-argument + return {} + + with patch.object(Instance, "get_full_dict", mock_get_full_dict): + self._publish_transportation_form_and_submit_instance() + + self.xform.is_instance_json_regenerated = True + self.xform.save() + instance = self.xform.instances.first() + self.assertFalse(instance.json) + self.assertTrue(self.xform.is_instance_json_regenerated) + regenerate_form_instance_json.delay(self.xform.pk) + instance.refresh_from_db() + self.assertFalse(instance.json) diff --git a/onadata/apps/api/tests/viewsets/test_data_viewset.py b/onadata/apps/api/tests/viewsets/test_data_viewset.py index 67ed2f53a6..dcdc50a310 100644 --- a/onadata/apps/api/tests/viewsets/test_data_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_data_viewset.py @@ -8,6 +8,7 @@ import json import logging import os +import pytz import csv from io import StringIO from builtins import open @@ -29,7 +30,7 @@ from django_digest.test import DigestAuth from flaky import flaky from httmock import HTTMock, urlmatch -from mock import patch +from mock import patch, Mock from onadata.apps.api.tests.viewsets.test_abstract_viewset import ( TestAbstractViewSet, @@ -606,7 +607,7 @@ def test_data_pagination(self): self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 4) - # Query param returns correct pagination headers + # Pagination works with "query" query parameter request = self.factory.get( "/", data={"page_size": "1", "query": "ambulance"}, **self.extra ) @@ -616,6 +617,48 @@ def test_data_pagination(self): self.assertEqual( response["Link"], ('; rel="next"') ) + self.assertEqual(len(response.data), 1) + # Pagination works with "sort" query parametr + instances = self.xform.instances.all().order_by("-date_modified") + self.assertEqual(instances.count(), 4) + request = self.factory.get( + "/", + data={"page": "1", "page_size": "2", "sort": '{"date_modified":-1}'}, + **self.extra, + ) + response = view(request, pk=formid) + self.assertEqual(response.status_code, 200) + self.assertIn("Link", response) + self.assertEqual( + response["Link"], ('; rel="next"') + ) + self.assertEqual(len(response.data), 2) + self.assertEqual(response.data[0]["_id"], instances[0].pk) + # Pagination works with multiple query params + instances = ( + self.xform.instances.all() + .order_by("-date_modified") + .extra(where=["json::text ~* cast(%s as text)"], params=["ambulance"]) + ) + self.assertEqual(instances.count(), 2) + request = self.factory.get( + "/", + data={ + "page": "1", + "page_size": "1", + "sort": '{"date_modified":-1}', + "query": "ambulance", + }, + **self.extra, + ) + response = view(request, pk=formid) + self.assertEqual(response.status_code, 200) + self.assertIn("Link", response) + self.assertEqual( + response["Link"], ('; rel="next"') + ) + self.assertEqual(len(response.data), 1) + self.assertEqual(response.data[0]["_id"], instances[0].pk) def test_sort_query_param_with_invalid_values(self): self._make_submissions() @@ -1070,27 +1113,27 @@ def test_filter_by_date_modified(self): self._make_submissions() view = DataViewSet.as_view({"get": "list"}) request = self.factory.get("/", **self.extra) - formid = self.xform.pk - instance = self.xform.instances.all().order_by("pk")[0] - response = view(request, pk=formid) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(response.data), 4) - - instance = self.xform.instances.all().order_by("-date_created")[0] + instances = self.xform.instances.all().order_by("pk") + self.assertEqual(len(instances), 4) + instance = instances[2] date_modified = instance.date_modified.isoformat() - - query_str = '{"_date_modified": {"$gte": "%s"},' ' "_submitted_by": "%s"}' % ( - date_modified, - "bob", - ) + # greater than or equal to + query_str = '{"_date_modified": {"$gte": "%s"}}' % date_modified data = {"query": query_str} request = self.factory.get("/", data=data, **self.extra) - response = view(request, pk=formid) + response = view(request, pk=self.xform.pk) self.assertEqual(response.status_code, 200) - expected_count = self.xform.instances.filter( - date_modified__gte=date_modified - ).count() - self.assertEqual(len(response.data), expected_count) + self.assertEqual(len(response.data), 2) + self.assertEqual(response.data[0]["_id"], instances[2].pk) + self.assertEqual(response.data[1]["_id"], instances[3].pk) + # greater than + query_str = '{"_date_modified": {"$gt": "%s"}}' % date_modified + data = {"query": query_str} + request = self.factory.get("/", data=data, **self.extra) + response = view(request, pk=self.xform.pk) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 1) + self.assertEqual(response.data[0]["_id"], instances[3].pk) def test_filter_by_submission_time_and_submitted_by_with_data_arg(self): self._make_submissions() @@ -2339,7 +2382,8 @@ def test_geotraces_in_repeats(self): | | end repeat | """ self.xform = self._publish_markdown( - md, self.user, self.project, id_string="geotraces") + md, self.user, self.project, id_string="geotraces" + ) # publish submissions self._publish_submit_geoms_in_repeats("Geotraces") view = DataViewSet.as_view({"get": "list"}) @@ -2400,7 +2444,8 @@ def test_geoshapes_in_repeats(self): | | end repeat | """ self.xform = self._publish_markdown( - md, self.user, self.project, id_string="geoshapes") + md, self.user, self.project, id_string="geoshapes" + ) # publish submissions self._publish_submit_geoms_in_repeats("Geoshapes") view = DataViewSet.as_view({"get": "list"}) @@ -3194,7 +3239,7 @@ def test_floip_format(self): floip_list = json.loads(response.content) self.assertTrue(isinstance(floip_list, list)) floip_row = [x for x in floip_list if x[-2] == "none"][0] - self.assertEqual(floip_row[0], response.data[0]["_submission_time"] + "+00:00") + self.assertEqual(floip_row[0], response.data[0]["_submission_time"]) self.assertEqual(floip_row[2], "bob") self.assertEqual(floip_row[3], response.data[0]["_uuid"]) self.assertEqual( @@ -3361,24 +3406,27 @@ def test_data_list_xml_format(self): """Test DataViewSet list XML""" # create submission media_file = "1335783522563.jpg" - self._make_submission_w_attachment( - os.path.join( - self.this_directory, - "fixtures", - "transportation", - "instances", - "transport_2011-07-25_19-05-49_2", - "transport_2011-07-25_19-05-49_2.xml", - ), - os.path.join( - self.this_directory, - "fixtures", - "transportation", - "instances", - "transport_2011-07-25_19-05-49_2", - media_file, - ), - ) + mocked_now = datetime.datetime(2023, 9, 20, 12, 49, 0, tzinfo=pytz.utc) + + with patch("django.utils.timezone.now", Mock(return_value=mocked_now)): + self._make_submission_w_attachment( + os.path.join( + self.this_directory, + "fixtures", + "transportation", + "instances", + "transport_2011-07-25_19-05-49_2", + "transport_2011-07-25_19-05-49_2.xml", + ), + os.path.join( + self.this_directory, + "fixtures", + "transportation", + "instances", + "transport_2011-07-25_19-05-49_2", + media_file, + ), + ) view = DataViewSet.as_view({"get": "list"}) request = self.factory.get("/", **self.extra) @@ -3394,7 +3442,7 @@ def test_data_list_xml_format(self): returned_xml = response.content.decode("utf-8") server_time = ET.fromstring(returned_xml).attrib.get("serverTime") edited = instance.last_edited is not None - submission_time = instance.date_created.strftime(MONGO_STRFTIME) + submission_time = instance.date_created.isoformat() attachment = instance.attachments.first() expected_xml = ( '\n' diff --git a/onadata/apps/api/tests/viewsets/test_dataview_viewset.py b/onadata/apps/api/tests/viewsets/test_dataview_viewset.py index 995db9d828..35d9f20f29 100644 --- a/onadata/apps/api/tests/viewsets/test_dataview_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_dataview_viewset.py @@ -25,7 +25,7 @@ filter_to_field_lookup, get_field_lookup, get_filter_kwargs, - apply_filters + apply_filters, ) from onadata.apps.api.viewsets.note_viewset import NoteViewSet from onadata.libs.serializers.xform_serializer import XFormSerializer @@ -79,51 +79,36 @@ def test_create_dataview(self): self._create_dataview() def test_filter_to_field_lookup(self): - self.assertEqual( - filter_to_field_lookup("="), "__iexact" - ) - self.assertEqual( - filter_to_field_lookup("<"), "__lt" - ) - self.assertEqual( - filter_to_field_lookup(">"), "__gt" - ) + self.assertEqual(filter_to_field_lookup("="), "__iexact") + self.assertEqual(filter_to_field_lookup("<"), "__lt") + self.assertEqual(filter_to_field_lookup(">"), "__gt") def test_get_field_lookup(self): - self.assertEqual( - get_field_lookup("q1", "="), "json__q1__iexact" - ) - self.assertEqual( - get_field_lookup("q1", "<"), "json__q1__lt" - ) - self.assertEqual( - get_field_lookup("q1", ">"), "json__q1__gt" - ) + self.assertEqual(get_field_lookup("q1", "="), "json__q1__iexact") + self.assertEqual(get_field_lookup("q1", "<"), "json__q1__lt") + self.assertEqual(get_field_lookup("q1", ">"), "json__q1__gt") def test_get_filter_kwargs(self): self.assertEqual( get_filter_kwargs([{"value": 2, "column": "first_column", "filter": "<"}]), - {'json__first_column__lt': '2'} + {"json__first_column__lt": "2"}, ) self.assertEqual( get_filter_kwargs([{"value": 2, "column": "first_column", "filter": ">"}]), - {'json__first_column__gt': '2'} + {"json__first_column__gt": "2"}, ) self.assertEqual( get_filter_kwargs([{"value": 2, "column": "first_column", "filter": "="}]), - {'json__first_column__iexact': '2'} + {"json__first_column__iexact": "2"}, ) def test_apply_filters(self): # update these filters - filters = [{'value': 'orange', 'column': 'fruit', 'filter': '='}] + filters = [{"value": "orange", "column": "fruit", "filter": "="}] xml = 'orange' instance = Instance(xform=self.xform, xml=xml) instance.save() - self.assertEqual( - apply_filters(self.xform.instances, filters).first().xml, - xml - ) + self.assertEqual(apply_filters(self.xform.instances, filters).first().xml, xml) # delete instance instance.delete() @@ -172,68 +157,75 @@ def test_dataview_with_attachment_field(self): self.assertEqual("image/png", attachment_info.get("mimetype")) self.assertEqual( f"{self.user.username}/attachments/{self.xform.id}_{self.xform.id_string}/{media_file}", - attachment_info.get("filename"),) + attachment_info.get("filename"), + ) self.assertEqual(response.status_code, 200) # Attachment viewset works ok for filtered datasets attachment_list_view = AttachmentViewSet.as_view({"get": "list"}) request = self.factory.get("/?dataview=" + str(self.data_view.pk), **self.extra) response = attachment_list_view(request) - attachments = Attachment.objects.filter( - instance__xform=self.data_view.xform) + attachments = Attachment.objects.filter(instance__xform=self.data_view.xform) self.assertEqual(1, len(response.data)) - self.assertEqual(self.data_view.query, - [{'value': 'no', 'column': 'pizza_fan', 'filter': '='}]) - serialized_attachments = AttachmentSerializer( - attachments, - many=True, context={'request': request}).data self.assertEqual( - serialized_attachments, - response.data) + self.data_view.query, + [{"value": "no", "column": "pizza_fan", "filter": "="}], + ) + serialized_attachments = AttachmentSerializer( + attachments, many=True, context={"request": request} + ).data + self.assertEqual(serialized_attachments, response.data) # create profile for alice - alice_data = {'username': 'alice', 'email': 'alice@localhost.com', - 'password1': 'alice', 'password2': 'alice', - 'first_name': 'Alice', 'last_name': 'A', - 'city': 'Nairobi', 'country': 'KE'} + alice_data = { + "username": "alice", + "email": "alice@localhost.com", + "password1": "alice", + "password2": "alice", + "first_name": "Alice", + "last_name": "A", + "city": "Nairobi", + "country": "KE", + } alice_profile = self._create_user_profile(extra_post_data=alice_data) self.extra = {"HTTP_AUTHORIZATION": f"Token {alice_profile.user.auth_token}"} # check that user with no permisisons can not list attachment objects request = self.factory.get("/?dataview=" + str(self.data_view.pk), **self.extra) response = attachment_list_view(request) - attachments = Attachment.objects.filter( - instance__xform=self.data_view.xform) + attachments = Attachment.objects.filter(instance__xform=self.data_view.xform) self.assertEqual(0, len(response.data)) - self.assertEqual(self.data_view.query, - [{'value': 'no', 'column': 'pizza_fan', 'filter': '='}]) self.assertEqual( - [], - response.data) + self.data_view.query, + [{"value": "no", "column": "pizza_fan", "filter": "="}], + ) + self.assertEqual([], response.data) # check that user with no permisisons can not view a specific attachment object attachment_list_view = AttachmentViewSet.as_view({"get": "retrieve"}) request = self.factory.get("/?dataview=" + str(self.data_view.pk), **self.extra) - response = attachment_list_view( - request, pk=attachments.first().pk) - self.assertEqual(self.data_view.query, - [{'value': 'no', 'column': 'pizza_fan', 'filter': '='}]) + response = attachment_list_view(request, pk=attachments.first().pk) + self.assertEqual( + self.data_view.query, + [{"value": "no", "column": "pizza_fan", "filter": "="}], + ) self.assertEqual(response.status_code, 404) response_data = json.loads(json.dumps(response.data)) - self.assertEqual(response_data, {'detail': 'Not found.'}) + self.assertEqual(response_data, {"detail": "Not found."}) # a user with permissions can view a specific attachment object attachment_list_view = AttachmentViewSet.as_view({"get": "retrieve"}) self.extra = {"HTTP_AUTHORIZATION": f"Token {self.user.auth_token}"} request = self.factory.get("/?dataview=" + str(self.data_view.pk), **self.extra) - response = attachment_list_view( - request, pk=attachments.first().pk) - self.assertEqual(self.data_view.query, - [{'value': 'no', 'column': 'pizza_fan', 'filter': '='}]) + response = attachment_list_view(request, pk=attachments.first().pk) + self.assertEqual( + self.data_view.query, + [{"value": "no", "column": "pizza_fan", "filter": "="}], + ) self.assertEqual(response.status_code, 200) serialized_attachment = AttachmentSerializer( - attachments.first(), - context={'request': request}).data + attachments.first(), context={"request": request} + ).data self.assertEqual(response.data, serialized_attachment) # pylint: disable=invalid-name @@ -309,7 +301,9 @@ def test_get_dataview(self): response.data["url"], f"http://testserver/api/v1/dataviews/{self.data_view.pk}", ) - self.assertEqual(response.data["last_submission_time"], "2015-03-09T13:34:05") + self.assertEqual( + response.data["last_submission_time"], "2015-03-09T13:34:05.537766+00:00" + ) # Public self.project.shared = True @@ -1011,9 +1005,7 @@ def test_xlsx_export_with_choice_labels(self, async_result): "xform": f"http://testserver/api/v1/forms/{xform.pk}", "project": f"http://testserver/api/v1/projects/{project.pk}", "columns": '["name", "age", "gender", "pizza_type"]', - "query": ( - '[{"column":"age","filter":"=","value":"28"}]' - ), + "query": ('[{"column":"age","filter":"=","value":"28"}]'), } self._create_dataview(data=data) @@ -1023,10 +1015,7 @@ def test_xlsx_export_with_choice_labels(self, async_result): } ) - data = { - "format": "xlsx", - "show_choice_labels": "true" - } + data = {"format": "xlsx", "show_choice_labels": "true"} request = self.factory.get("/", data=data, **self.extra) response = view(request, pk=self.data_view.pk) @@ -1060,27 +1049,27 @@ def test_xlsx_export_with_choice_labels(self, async_result): sheet_data = list(main_sheet.values)[1] inst = self.xform.instances.get(id=sheet_data[4]) expected_headers = ( - 'name', - 'age', - 'gender', - 'pizza_type', - '_id', - '_uuid', - '_submission_time', - '_index', - '_parent_table_name', - '_parent_index', - '_tags', - '_notes', - '_version', - '_duration', - '_submitted_by', + "name", + "age", + "gender", + "pizza_type", + "_id", + "_uuid", + "_submission_time", + "_index", + "_parent_table_name", + "_parent_index", + "_tags", + "_notes", + "_version", + "_duration", + "_submitted_by", ) expected_data = ( - 'Dennis Wambua', + "Dennis Wambua", 28, - 'Male', - 'New York think crust!', + "Male", + "New York think crust!", inst.id, inst.uuid, inst.date_created.replace(microsecond=0, tzinfo=None), @@ -1089,7 +1078,7 @@ def test_xlsx_export_with_choice_labels(self, async_result): -1, None, None, - '4444', + "4444", 50, inst.user.username, ) @@ -1111,9 +1100,7 @@ def test_csv_export_with_choice_labels(self, async_result): "xform": f"http://testserver/api/v1/forms/{xform.pk}", "project": f"http://testserver/api/v1/projects/{project.pk}", "columns": '["name", "age", "gender", "pizza_type"]', - "query": ( - '[{"column":"age","filter":"=","value":"28"}]' - ), + "query": ('[{"column":"age","filter":"=","value":"28"}]'), } self._create_dataview(data=data) @@ -1123,10 +1110,7 @@ def test_csv_export_with_choice_labels(self, async_result): } ) - data = { - "format": "csv", - "show_choice_labels": "true" - } + data = {"format": "csv", "show_choice_labels": "true"} request = self.factory.get("/", data=data, **self.extra) response = view(request, pk=self.data_view.pk) @@ -1153,13 +1137,8 @@ def test_csv_export_with_choice_labels(self, async_result): export = Export.objects.get(task_id=task_id) self.assertTrue(export.is_successful) with default_storage.open(export.filepath, "r") as f: - expected_data = [ - 'Dennis Wambua', - '28', - 'Male', - 'New York think crust!' - ] - expected_headers = ['name', 'age', 'gender', 'pizza_type'] + expected_data = ["Dennis Wambua", "28", "Male", "New York think crust!"] + expected_headers = ["name", "age", "gender", "pizza_type"] csv_reader = csv.reader(f) headers = next(csv_reader) self.assertEqual(expected_headers, headers) @@ -1285,7 +1264,6 @@ def test_get_charts_data_for_grouped_field(self): # pylint: disable=invalid-name def test_get_charts_data_field_not_in_dataview_columns(self): - self._create_dataview() self.view = DataViewViewSet.as_view( { @@ -1426,54 +1404,59 @@ def test_geopoint_submission_dataview(self): # geojson pagination, fields and geofield params works ok request = self.factory.get( - "/?geofield=_geolocation&page=1&page_size=1&fields=name", - **self.extra) - response = view(request, pk=self.data_view.pk, format='geojson') + "/?geofield=_geolocation&page=1&page_size=1&fields=name", **self.extra + ) + response = view(request, pk=self.data_view.pk, format="geojson") # we get correct content type headers = dict(response.items()) self.assertEqual(headers["Content-Type"], "application/geo+json") self.assertEqual(response.status_code, 200) - del response.data['features'][0]['properties']['xform'] - del response.data['features'][0]['properties']['id'] + del response.data["features"][0]["properties"]["xform"] + del response.data["features"][0]["properties"]["id"] self.assertEqual( - {'type': 'FeatureCollection', - 'features': [ - {'type': 'Feature', - 'geometry': None, - 'properties': {'name': 'Kameli'}}]}, - response.data + { + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": None, + "properties": {"name": "Kameli"}, + } + ], + }, + response.data, ) request = self.factory.get( - "/?geofield=_geolocation&page=9&page_size=1&fields=name", - **self.extra) - response = view(request, pk=self.data_view.pk, format='geojson') + "/?geofield=_geolocation&page=9&page_size=1&fields=name", **self.extra + ) + response = view(request, pk=self.data_view.pk, format="geojson") self.assertEqual(response.status_code, 200) - del response.data['features'][0]['properties']['xform'] - del response.data['features'][0]['properties']['id'] + del response.data["features"][0]["properties"]["xform"] + del response.data["features"][0]["properties"]["id"] self.assertEqual( - {'type': 'FeatureCollection', - 'features': - [ - {'type': 'Feature', - 'geometry': - {'type': - 'GeometryCollection', - 'geometries': - [ - {'type': 'Point', - 'coordinates': [36.8304, -1.2655]}]}, - 'properties': {'name': 'Kameli'}}]}, - response.data + { + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": { + "type": "GeometryCollection", + "geometries": [ + {"type": "Point", "coordinates": [36.8304, -1.2655]} + ], + }, + "properties": {"name": "Kameli"}, + } + ], + }, + response.data, ) request = self.factory.get( - "/?geofield=_geolocation&page=10&page_size=1&fields=name", - **self.extra) - response = view(request, pk=self.data_view.pk, format='geojson') - self.assertEqual(response.status_code, 404) - self.assertEqual( - {'detail': 'Invalid page.'}, - response.data + "/?geofield=_geolocation&page=10&page_size=1&fields=name", **self.extra ) + response = view(request, pk=self.data_view.pk, format="geojson") + self.assertEqual(response.status_code, 404) + self.assertEqual({"detail": "Invalid page."}, response.data) # pylint: disable=invalid-name def test_dataview_project_cache_cleared(self): @@ -1536,10 +1519,12 @@ def test_dataview_update_refreshes_cached_data(self): response = self.view(request, pk=self.data_view.pk) expected_count = 3 - expected_last_submission_time = "2015-03-09T13:34:05" + expected_last_submission_time = "2015-03-09T13:34:05.537766+00:00" self.assertEqual(response.data["count"], expected_count) - self.assertEqual(response.data["last_submission_time"], "2015-03-09T13:34:05") + self.assertEqual( + response.data["last_submission_time"], "2015-03-09T13:34:05.537766+00:00" + ) cache_dict = cache.get(f"{DATAVIEW_COUNT}{self.data_view.xform.pk}") self.assertEqual(cache_dict.get(self.data_view.pk), expected_count) @@ -1878,11 +1863,11 @@ def test_export_xls_dataview_with_date_filter(self, async_result): first_datetime = start_date.strftime(MONGO_STRFTIME) second_datetime = start_date + timedelta(days=1, hours=20) query_str = ( - '{"_submission_time": {"$gte": "' + - first_datetime + - '", "$lte": "' + - second_datetime.strftime(MONGO_STRFTIME) + - '"}}' + '{"_submission_time": {"$gte": "' + + first_datetime + + '", "$lte": "' + + second_datetime.strftime(MONGO_STRFTIME) + + '"}}' ) view = DataViewViewSet.as_view( @@ -1941,11 +1926,11 @@ def test_csv_export_dataview_date_filter(self): first_datetime = start_date.strftime(MONGO_STRFTIME) second_datetime = start_date + timedelta(days=1, hours=20) query_str = ( - '{"_submission_time": {"$gte": "' + - first_datetime + - '", "$lte": "' + - second_datetime.strftime(MONGO_STRFTIME) + - '"}}' + '{"_submission_time": {"$gte": "' + + first_datetime + + '", "$lte": "' + + second_datetime.strftime(MONGO_STRFTIME) + + '"}}' ) count = Export.objects.all().count() @@ -1984,11 +1969,11 @@ def test_csv_export_async_dataview_date_filter(self, async_result): first_datetime = start_date.strftime(MONGO_STRFTIME) second_datetime = start_date + timedelta(days=1, hours=20) query_str = ( - '{"_submission_time": {"$gte": "' + - first_datetime + - '", "$lte": "' + - second_datetime.strftime(MONGO_STRFTIME) + - '"}}' + '{"_submission_time": {"$gte": "' + + first_datetime + + '", "$lte": "' + + second_datetime.strftime(MONGO_STRFTIME) + + '"}}' ) count = Export.objects.all().count() diff --git a/onadata/apps/api/tests/viewsets/test_stats_viewset.py b/onadata/apps/api/tests/viewsets/test_stats_viewset.py index 83e395102d..1c21c2bbdc 100644 --- a/onadata/apps/api/tests/viewsets/test_stats_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_stats_viewset.py @@ -4,7 +4,6 @@ from django.core.files.base import ContentFile from django.test import RequestFactory from builtins import open -from mock import patch from onadata.apps.main.tests.test_base import TestBase from onadata.apps.api.viewsets.stats_viewset import StatsViewSet @@ -21,9 +20,7 @@ def setUp(self): self.factory = RequestFactory() self.extra = {"HTTP_AUTHORIZATION": "Token %s" % self.user.auth_token} - @patch("onadata.apps.logger.models.instance.submission_time") - def test_submissions_stats(self, mock_time): - self._set_mock_time(mock_time) + def test_submissions_stats(self): self._publish_transportation_form() self._make_submissions() view = SubmissionStatsViewSet.as_view({"get": "list"}) @@ -62,9 +59,7 @@ def test_submissions_stats(self, mock_time): self.assertDictContainsSubset(data, response.data[0]) - @patch("onadata.apps.logger.models.instance.submission_time") - def test_submissions_stats_with_xform_in_delete_async_queue(self, mock_time): - self._set_mock_time(mock_time) + def test_submissions_stats_with_xform_in_delete_async_queue(self): self._publish_transportation_form() self._make_submissions() view = SubmissionStatsViewSet.as_view({"get": "list"}) diff --git a/onadata/apps/api/tests/viewsets/test_xform_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_viewset.py index 4c36f6cb38..3345631f4b 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_viewset.py @@ -8,6 +8,7 @@ import csv import json import os +import pytz import re from builtins import open from collections import OrderedDict @@ -2240,6 +2241,7 @@ def test_xform_serializer_none(self): "instances_with_geopoints": False, "has_hxl_support": False, "hash": "", + "is_instance_json_regenerated": False, } self.assertEqual(data, XFormSerializer(None).data) @@ -3837,18 +3839,24 @@ def test_csv_export_with_win_excel_utf8(self): self._publish_xls_form_to_project(xlsform_path=xlsform_path) # submit one hxl instance _submission_time = parse_datetime("2013-02-18 15:54:01Z") - self._make_submission( - os.path.join( - settings.PROJECT_ROOT, - "apps", - "main", - "tests", - "fixtures", - "hxl_test", - "hxl_example_2.xml", - ), - forced_submission_time=_submission_time, - ) + mock_date_modified = datetime(2023, 9, 20, 11, 41, 0, tzinfo=pytz.utc) + + with patch( + "django.utils.timezone.now", Mock(return_value=mock_date_modified) + ): + self._make_submission( + os.path.join( + settings.PROJECT_ROOT, + "apps", + "main", + "tests", + "fixtures", + "hxl_test", + "hxl_example_2.xml", + ), + forced_submission_time=_submission_time, + ) + self.assertTrue(self.xform.has_hxl_support) view = XFormViewSet.as_view({"get": "retrieve"}) @@ -3866,7 +3874,7 @@ def test_csv_export_with_win_excel_utf8(self): instance = self.xform.instances.first() data_id, date_modified = ( instance.pk, - instance.date_modified.strftime(MONGO_STRFTIME), + mock_date_modified.isoformat(), ) content = get_response_content(response) @@ -3877,7 +3885,7 @@ def test_csv_export_with_win_excel_utf8(self): "_total_media,_media_count,_media_all_received\n\ufeff#age" ",,,,,,,,,,,,,,\n\ufeff" "38,CR7,uuid:74ee8b73-48aa-4ced-9089-862f93d49c16," - "%s,74ee8b73-48aa-4ced-9089-862f93d49c16,2013-02-18T15:54:01," + "%s,74ee8b73-48aa-4ced-9089-862f93d49c16,2013-02-18T15:54:01+00:00," "%s,,,201604121155,,bob,0,0,True\n" % (data_id, date_modified) ) self.assertEqual(content, expected_content) @@ -3885,7 +3893,7 @@ def test_csv_export_with_win_excel_utf8(self): self.assertEqual(headers["Content-Type"], "application/csv") content_disposition = headers["Content-Disposition"] filename = filename_from_disposition(content_disposition) - basename, ext = os.path.splitext(filename) + _, ext = os.path.splitext(filename) self.assertEqual(ext, ".csv") # sort csv data in ascending order data = {"win_excel_utf8": False} @@ -3900,7 +3908,7 @@ def test_csv_export_with_win_excel_utf8(self): "tted_by,_total_media,_media_count,_media_all_received\n" "#age,,,,,,,,,,,,,,\n" "38,CR7,uuid:74ee8b73-48aa-4ced-9089-862f93d49c16" - ",%s,74ee8b73-48aa-4ced-9089-862f93d49c16,2013-02-18T15:54:01," + ",%s,74ee8b73-48aa-4ced-9089-862f93d49c16,2013-02-18T15:54:01+00:00," "%s,,,201604121155,,bob,0,0,True\n" % (data_id, date_modified) ) @@ -3967,7 +3975,7 @@ def test_csv_export_with_and_without_include_hxl(self): "_date_modified,_tags,_notes,_version,_duration,_submitted_by," "_total_media,_media_count,_media_all_received\n" "29,Lionel Messi,uuid:74ee8b73-48aa-4ced-9072-862f93d49c16," - f"{data_id},74ee8b73-48aa-4ced-9072-862f93d49c16,2013-02-18T15:54:01," + f"{data_id},74ee8b73-48aa-4ced-9072-862f93d49c16,2013-02-18T15:54:01+00:00," f"{date_modified},,,201604121155,,bob,0,0,True\n" ) self.assertEqual(expected_content, content) @@ -3990,7 +3998,7 @@ def test_csv_export_with_and_without_include_hxl(self): "_media_all_received\n" "#age,,,,,,,,,,,,,,\n" "29,Lionel Messi,uuid:74ee8b73-48aa-4ced-9072-862f93d49c16," - "%s,74ee8b73-48aa-4ced-9072-862f93d49c16,2013-02-18T15:54:01" + "%s,74ee8b73-48aa-4ced-9072-862f93d49c16,2013-02-18T15:54:01+00:00" ",%s,,,201604121155,,bob,0,0,True\n" % (data_id, date_modified) ) self.assertEqual(expected_content, content) @@ -4436,7 +4444,10 @@ def test_csv_export_filtered_by_date(self): "transportation_filtered_date.csv", ) - expected_submission = ["2015-12-02T00:00:00", "2015-12-03T00:00:00"] + expected_submission = [ + "2015-12-02T00:00:00+00:00", + "2015-12-03T00:00:00+00:00", + ] self._validate_csv_export( response, test_file_path, "_submission_time", expected_submission ) @@ -5009,9 +5020,9 @@ def test_xlsx_import(self): self.assertEqual( self.xform.instances.values("json___submission_time")[::1], [ - {"json___submission_time": "2023-02-03T10:27:41"}, - {"json___submission_time": "2023-02-03T10:27:42"}, - {"json___submission_time": "2023-03-13T08:42:57"}, + {"json___submission_time": "2023-02-03T10:27:41+00:00"}, + {"json___submission_time": "2023-02-03T10:27:42+00:00"}, + {"json___submission_time": "2023-03-13T08:42:57+00:00"}, ], ) self.assertEqual(response.status_code, 200) diff --git a/onadata/apps/api/viewsets/data_viewset.py b/onadata/apps/api/viewsets/data_viewset.py index 8ed0d70765..693f9ef86a 100644 --- a/onadata/apps/api/viewsets/data_viewset.py +++ b/onadata/apps/api/viewsets/data_viewset.py @@ -43,6 +43,7 @@ get_where_clause, query_data, query_fields_data, + query_count, _get_sort_fields, ParsedInstance, ) @@ -497,7 +498,8 @@ def _set_pagination_headers( query = self.request.query_params.get("query") base_url = url.split("?")[0] if query: - num_of_records = self.object_list.count() + query = self._parse_query(query) + num_of_records = query_count(xform, query=query) else: num_of_records = xform.num_of_submissions next_page_url = None @@ -660,6 +662,12 @@ def list(self, request, *args, **kwargs): return custom_response_handler(request, xform, query, export_type) + def _parse_query(self, query): + """Parse `query` query parameter""" + return filter_queryset_xform_meta_perms_sql( + self.get_object(), self.request.user, query + ) + # pylint: disable=too-many-arguments def set_object_list(self, query, fields, sort, start, limit, is_public_request): """ @@ -694,9 +702,7 @@ def set_object_list(self, query, fields, sort, start, limit, is_public_request): self.object_list = self.object_list[start_index:end_index] elif (sort or limit or start or fields) and not is_public_request: try: - query = filter_queryset_xform_meta_perms_sql( - self.get_object(), self.request.user, query - ) + query = self._parse_query(query) # pylint: disable=protected-access has_json_fields = sort and ParsedInstance._has_json_fields( _get_sort_fields(sort) @@ -800,22 +806,23 @@ def _get_data(self, query, fields, sort, start, limit, is_public_request): xform = self.get_object() num_of_submissions = xform.num_of_submissions should_paginate = num_of_submissions > retrieval_threshold + if should_paginate: self.paginator.page_size = retrieval_threshold - if not isinstance(self.object_list, types.GeneratorType) and should_paginate: + if should_paginate: query_param_keys = self.request.query_params current_page = query_param_keys.get(self.paginator.page_query_param, 1) current_page_size = query_param_keys.get( self.paginator.page_size_query_param, retrieval_threshold ) - self._set_pagination_headers( self.get_object(), current_page=current_page, current_page_size=current_page_size, ) + if not isinstance(self.object_list, types.GeneratorType) and should_paginate: try: # pylint: disable=attribute-defined-outside-init self.object_list = self.paginate_queryset(self.object_list) @@ -824,6 +831,7 @@ def _get_data(self, query, fields, sort, start, limit, is_public_request): self.object_list = self.paginate_queryset(self.object_list) stream_data = getattr(settings, "STREAM_DATA", False) + if stream_data: response = self._get_streaming_response() else: diff --git a/onadata/apps/api/viewsets/xform_viewset.py b/onadata/apps/api/viewsets/xform_viewset.py index 6620fd2f91..cfeff8c041 100644 --- a/onadata/apps/api/viewsets/xform_viewset.py +++ b/onadata/apps/api/viewsets/xform_viewset.py @@ -83,7 +83,10 @@ response_for_format, _get_export_type, ) -from onadata.libs.utils.cache_tools import PROJ_OWNER_CACHE, safe_delete +from onadata.libs.utils.cache_tools import ( + PROJ_OWNER_CACHE, + safe_delete, +) from onadata.libs.utils.common_tools import json_stream from onadata.libs.utils.csv_import import ( get_async_csv_submission_status, diff --git a/onadata/apps/logger/management/commands/recover_deleted_attachments.py b/onadata/apps/logger/management/commands/recover_deleted_attachments.py index 3199f68ec1..bf1f332e66 100644 --- a/onadata/apps/logger/management/commands/recover_deleted_attachments.py +++ b/onadata/apps/logger/management/commands/recover_deleted_attachments.py @@ -35,7 +35,7 @@ def recover_deleted_attachments(form_id: str, stdout=None): if stdout: stdout.write(f"Recovered {attachment.name} ID: {attachment.id}") # Regenerate instance JSON - instance.json = instance.get_full_dict(load_existing=False) + instance.json = instance.get_full_dict() instance.save() diff --git a/onadata/apps/logger/management/commands/regenerate_instance_json.py b/onadata/apps/logger/management/commands/regenerate_instance_json.py new file mode 100644 index 0000000000..a0f5f6c089 --- /dev/null +++ b/onadata/apps/logger/management/commands/regenerate_instance_json.py @@ -0,0 +1,78 @@ +""" +Management command python manage.py regenerate_instance_json + +Regenerates a form's instances json asynchronously +""" + +from celery.result import AsyncResult + +from django.core.management.base import BaseCommand, CommandError +from django.core.cache import cache + +from onadata.apps.api.tasks import regenerate_form_instance_json +from onadata.apps.logger.models import XForm +from onadata.libs.utils.cache_tools import ( + XFORM_REGENERATE_INSTANCE_JSON_TASK, + XFORM_REGENERATE_INSTANCE_JSON_TASK_TTL, +) + + +class Command(BaseCommand): + """Regenerate a form's instances json + + Json data recreated afresh and any existing json data is overriden + + Usage: + python manage.py regenerate_instance_json e.g + python manage.py regenerate_instance_json 689 567 453 + """ + + help = "Regenerate a form's instances json" + + def add_arguments(self, parser): + parser.add_argument("form_ids", nargs="+", type=int) + + def handle(self, *args, **options): + for form_id in options["form_ids"]: + try: + xform: XForm = XForm.objects.get(pk=form_id) + + except XForm.DoesNotExist: + raise CommandError( # pylint: disable=raise-missing-from + f"Form {form_id} does not exist" + ) + + self._regenerate_instance_json(xform) + + def _regenerate_instance_json(self, xform: XForm): + if xform.is_instance_json_regenerated: + # Async task completed successfully + self.stdout.write( + self.style.SUCCESS(f"Regeneration for {xform.pk} COMPLETE") + ) + return + + cache_key = f"{XFORM_REGENERATE_INSTANCE_JSON_TASK}{xform.pk}" + cached_task_id: str | None = cache.get(cache_key) + + if cached_task_id and AsyncResult(cached_task_id).state.upper() != "FAILURE": + # FAILURE is the only state that should trigger regeneration if + # a regeneration had earlier been triggered + self.stdout.write( + self.style.WARNING(f"Regeneration for {xform.pk} IN PROGRESS") + ) + return + + # Task has either failed or does not exist in cache, we create a new async task + # Celery backend expires the result after 1 day (24hrs) as outlined in the docs, + # https://docs.celeryq.dev/en/latest/userguide/configuration.html#result-expires + # If after 1 day you create an AsyncResult, the status will be PENDING. + # We therefore set the cache timeout to 1 day same as the Celery backend result + # expiry timeout + result: AsyncResult = regenerate_form_instance_json.apply_async(args=[xform.pk]) + cache.set( + cache_key, + result.task_id, + XFORM_REGENERATE_INSTANCE_JSON_TASK_TTL, + ) + self.stdout.write(f"Regeneration for {xform.pk} STARTED") diff --git a/onadata/apps/logger/management/commands/tests/__init__.py b/onadata/apps/logger/management/commands/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/onadata/apps/logger/management/commands/tests/test_regenerate_instance_json.py b/onadata/apps/logger/management/commands/tests/test_regenerate_instance_json.py new file mode 100644 index 0000000000..eb643d7c0b --- /dev/null +++ b/onadata/apps/logger/management/commands/tests/test_regenerate_instance_json.py @@ -0,0 +1,122 @@ +"""Tests for management command regenerate_instance_json""" +from io import StringIO + +from unittest.mock import patch, call + +from celery.result import AsyncResult + +from django.core.management import call_command +from django.core.cache import cache + +from onadata.apps.main.tests.test_base import TestBase +from onadata.apps.logger.models import XForm + +# pylint: disable=line-too-long + + +class RegenerateInstanceJsonTestCase(TestBase): + """Tests for management command regenerate_instance_json""" + + def setUp(self): + super().setUp() + + self._publish_transportation_form() + self._make_submissions() + self.cache_key = f"xfm-regenerate_instance_json_task-{self.xform.pk}" + self.out = StringIO() + + @patch( + "onadata.apps.logger.management.commands.regenerate_instance_json.regenerate_form_instance_json" + ) + def test_regenerates_instance_json(self, mock_regenerate): + """Json data for form submissions is regenerated + + Regeneration should be asynchronous + """ + task_id = "f78ef7bb-873f-4a28-bc8a-865da43a741f" + mock_async_result = AsyncResult(task_id) + mock_regenerate.apply_async.return_value = mock_async_result + call_command("regenerate_instance_json", (self.xform.pk), stdout=self.out) + self.assertIn(f"Regeneration for {self.xform.pk} STARTED", self.out.getvalue()) + mock_regenerate.apply_async.assert_called_once_with(args=[self.xform.pk]) + self.assertEqual(cache.get(self.cache_key), task_id) + + @patch( + "onadata.apps.logger.management.commands.regenerate_instance_json.regenerate_form_instance_json" + ) + def test_multiple_form_ids(self, mock_regenerate): + """Command supports multiple forms""" + self._publish_xlsx_file_with_external_choices() + form2 = XForm.objects.all()[1] + mock_regenerate.apply_async.side_effect = [ + AsyncResult("f78ef7bb-873f-4a28-bc8a-865da43a741f"), + AsyncResult("ca760839-d2d9-4244-938f-e884880ac0b4"), + ] + call_command( + "regenerate_instance_json", (self.xform.pk, form2.pk), stdout=self.out + ) + self.assertIn(f"Regeneration for {self.xform.pk} STARTED", self.out.getvalue()) + self.assertIn(f"Regeneration for {form2.pk} STARTED", self.out.getvalue()) + mock_regenerate.apply_async.assert_has_calls( + [call(args=[self.xform.pk]), call(args=[form2.pk])] + ) + self.assertEqual( + cache.get(self.cache_key), "f78ef7bb-873f-4a28-bc8a-865da43a741f" + ) + form2_cache = f"xfm-regenerate_instance_json_task-{form2.pk}" + self.assertEqual(cache.get(form2_cache), "ca760839-d2d9-4244-938f-e884880ac0b4") + + @patch( + "onadata.apps.logger.management.commands.regenerate_instance_json.regenerate_form_instance_json" + ) + def test_no_duplicate_work(self, mock_regenerate): + """If a regeneration finished successfully, we do not run it again""" + self.xform.is_instance_json_regenerated = True + self.xform.save() + call_command("regenerate_instance_json", (self.xform.pk), stdout=self.out) + self.assertIn(f"Regeneration for {self.xform.pk} COMPLETE", self.out.getvalue()) + mock_regenerate.apply_async.assert_not_called() + self.assertFalse(cache.get(self.cache_key)) + + def _mock_get_task_meta_failure(self) -> dict[str, str]: + return {"status": "FAILURE"} + + @patch.object(AsyncResult, "_get_task_meta", _mock_get_task_meta_failure) + @patch( + "onadata.apps.logger.management.commands.regenerate_instance_json.regenerate_form_instance_json" + ) + def test_task_state_failed(self, mock_regenerate): + """We regenerate if old celery task failed""" + old_task_id = "796dc413-e6ea-42b8-b658-e4ac9e22b02b" + cache.set(self.cache_key, old_task_id) + new_task_id = "f78ef7bb-873f-4a28-bc8a-865da43a741f" + mock_async_result = AsyncResult(new_task_id) + mock_regenerate.apply_async.return_value = mock_async_result + call_command("regenerate_instance_json", (self.xform.pk), stdout=self.out) + self.assertIn(f"Regeneration for {self.xform.pk} STARTED", self.out.getvalue()) + mock_regenerate.apply_async.assert_called_once_with(args=[self.xform.pk]) + self.assertEqual(cache.get(self.cache_key), new_task_id) + + def _mock_get_task_meta_non_failure(self) -> dict[str, str]: + return {"status": "FOO"} + + @patch.object(AsyncResult, "_get_task_meta", _mock_get_task_meta_non_failure) + @patch( + "onadata.apps.logger.management.commands.regenerate_instance_json.regenerate_form_instance_json" + ) + def test_task_state_not_failed(self, mock_regenerate): + """We do not regenerate if last celery task is in a state other than FAILURE + + FAILURE is the only state that should trigger regeneration if a regeneration + had earlier been triggered + """ + old_task_id = "796dc413-e6ea-42b8-b658-e4ac9e22b02b" + cache.set(self.cache_key, old_task_id) + mock_async_result = AsyncResult(old_task_id) + mock_regenerate.apply_async.return_value = mock_async_result + call_command("regenerate_instance_json", (self.xform.pk), stdout=self.out) + self.assertIn( + f"Regeneration for {self.xform.pk} IN PROGRESS", self.out.getvalue() + ) + mock_regenerate.apply_async.assert_not_called() + self.assertEqual(cache.get(self.cache_key), old_task_id) diff --git a/onadata/apps/logger/migrations/0001_pre-django-3-upgrade.py b/onadata/apps/logger/migrations/0001_pre-django-3-upgrade.py index 74bf18b414..567325de78 100644 --- a/onadata/apps/logger/migrations/0001_pre-django-3-upgrade.py +++ b/onadata/apps/logger/migrations/0001_pre-django-3-upgrade.py @@ -59,7 +59,7 @@ def regenerate_instance_json(apps, schema_editor): xform__downloadable=True, xform__deleted_at__isnull=True, ): - inst.json = inst.get_full_dict(load_existing=False) + inst.json = inst.get_full_dict() inst.save() @@ -1003,7 +1003,7 @@ class Migration(migrations.Migration): to=settings.AUTH_USER_MODEL, ), ), - migrations.RunPython(recalculate_xform_hash), + # migrations.RunPython(recalculate_xform_hash), migrations.AddField( model_name="instance", name="deleted_by", @@ -1223,8 +1223,8 @@ class Migration(migrations.Migration): name="uuid", field=models.CharField(db_index=True, default="", max_length=36), ), - migrations.RunPython(generate_uuid_if_missing), - migrations.RunPython(regenerate_instance_json), + # migrations.RunPython(generate_uuid_if_missing), + # migrations.RunPython(regenerate_instance_json), migrations.CreateModel( name="XFormVersion", fields=[ @@ -1264,5 +1264,5 @@ class Migration(migrations.Migration): "unique_together": {("xform", "version")}, }, ), - migrations.RunPython(create_initial_xform_version), + # migrations.RunPython(create_initial_xform_version), ] diff --git a/onadata/apps/logger/migrations/0009_auto_20230914_0927.py b/onadata/apps/logger/migrations/0009_auto_20230914_0927.py new file mode 100644 index 0000000000..a6e95ad0d1 --- /dev/null +++ b/onadata/apps/logger/migrations/0009_auto_20230914_0927.py @@ -0,0 +1,17 @@ +# Generated by Django 3.2.20 on 2023-09-14 13:27 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("logger", "0008_add_date_fields_indexing"), + ] + + operations = [ + migrations.AddField( + model_name="xform", + name="is_instance_json_regenerated", + field=models.BooleanField(default=False), + ), + ] diff --git a/onadata/apps/logger/migrations/0010_auto_20230921_0346.py b/onadata/apps/logger/migrations/0010_auto_20230921_0346.py new file mode 100644 index 0000000000..2dadd0a4ff --- /dev/null +++ b/onadata/apps/logger/migrations/0010_auto_20230921_0346.py @@ -0,0 +1,27 @@ +# Generated by Django 3.2.20 on 2023-09-21 07:46 + +from django.db import migrations, models +import django.utils.timezone + + +class Migration(migrations.Migration): + dependencies = [ + ("logger", "0009_auto_20230914_0927"), + ] + + operations = [ + migrations.AlterField( + model_name="instance", + name="date_created", + field=models.DateTimeField( + blank=True, default=django.utils.timezone.now, editable=False + ), + ), + migrations.AlterField( + model_name="instance", + name="date_modified", + field=models.DateTimeField( + blank=True, default=django.utils.timezone.now, editable=False + ), + ), + ] diff --git a/onadata/apps/logger/migrations/0062_auto_20210202_0248.py b/onadata/apps/logger/migrations/0062_auto_20210202_0248.py index 0dcfa1989d..01a08b6c8f 100644 --- a/onadata/apps/logger/migrations/0062_auto_20210202_0248.py +++ b/onadata/apps/logger/migrations/0062_auto_20210202_0248.py @@ -14,12 +14,11 @@ def regenerate_instance_json(apps, schema_editor): xform__downloadable=True, xform__deleted_at__isnull=True, ): - inst.json = inst.get_full_dict(load_existing=False) + inst.json = inst.get_full_dict() inst.save() class Migration(migrations.Migration): - dependencies = [ ("logger", "0061_auto_20200713_0814"), ] diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index 840d5e6aaf..c28b50ea61 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -60,7 +60,6 @@ LAST_EDITED, MEDIA_ALL_RECEIVED, MEDIA_COUNT, - MONGO_STRFTIME, NOTES, REVIEW_COMMENT, REVIEW_DATE, @@ -183,7 +182,7 @@ def get_id_string_from_xml_str(xml_str): return id_string -def submission_time(): +def now(): """Returns current timestamp via timezone.now().""" return timezone.now() @@ -237,7 +236,6 @@ def update_xform_submission_count_async(self, instance_id, created): def update_xform_submission_count(instance_id, created): """Updates the XForm submissions count on a new submission being created.""" if created: - # pylint: disable=import-outside-toplevel from multidb.pinning import use_master @@ -321,12 +319,12 @@ def _update_xform_submission_count_delete(instance): # update xform if no instance has geoms if ( - instance.xform.instances.filter( - deleted_at__isnull=True - ).exclude(geom=None).count() < - 1 + instance.xform.instances.filter(deleted_at__isnull=True) + .exclude(geom=None) + .count() + < 1 ): - if (instance.xform.polygon_xpaths() or instance.xform.geotrace_xpaths()): + if instance.xform.polygon_xpaths() or instance.xform.geotrace_xpaths(): instance.xform.instances_with_geopoints = True else: instance.xform.instances_with_geopoints = False @@ -341,32 +339,28 @@ def update_xform_submission_count_delete(sender, instance, **kwargs): @app.task(bind=True, max_retries=3) -def save_full_json_async(self, instance_id, created): +def save_full_json_async(self, instance_id): """ Celery task to asynchrounously generate and save an Instances JSON once a submission has been made """ try: - save_full_json(instance_id, created) + instance = Instance.objects.get(pk=instance_id) except Instance.DoesNotExist as e: if self.request.retries > 2: msg = f"Failed to save full JSON for Instance {instance_id}" report_exception(msg, e, sys.exc_info()) self.retry(exc=e, countdown=60 * self.request.retries) + else: + save_full_json(instance) -def save_full_json(instance_id, created): - """set json data, ensure the primary key is part of the json data""" - if created: - try: - instance = Instance.objects.get(pk=instance_id) - except Instance.DoesNotExist as e: - # Retry if run asynchrounously - if current_task.request.id: - raise e - else: - instance.json = instance.get_full_dict() - instance.save(update_fields=["json"]) +def save_full_json(instance: "Instance"): + """Save full json dict""" + # Queryset.update ensures the model's save is not called and + # the pre_save and post_save signals aren't sent + json = instance.get_full_dict() + Instance.objects.filter(pk=instance.pk).update(json=json, version=json.get(VERSION)) @app.task(bind=True, max_retries=3) @@ -474,16 +468,12 @@ def _set_geom(self): else: self.geom = None - def _set_json(self): - # pylint: disable=attribute-defined-outside-init - self.json = self.get_full_dict() - - def get_full_dict(self, load_existing=True): + def get_full_dict(self): """Returns the submission XML as a python dictionary object.""" - doc = self.json or {} if load_existing else {} # Get latest dict doc = self.get_dict() # pylint: disable=no-member + if self.id: geopoint = [self.point.y, self.point.x] if self.point else [None, None] doc.update( @@ -495,7 +485,7 @@ def get_full_dict(self, load_existing=True): STATUS: self.status, TAGS: list(self.tags.names()), NOTES: self.get_notes(), - VERSION: self.version, + VERSION: doc.get(VERSION, self.xform.version), DURATION: self.get_duration(), XFORM_ID_STRING: self._parser.get_xform_id_string(), XFORM_ID: self.xform.pk, @@ -508,30 +498,19 @@ def get_full_dict(self, load_existing=True): doc.update(osm.get_tags_with_prefix()) if isinstance(self.deleted_at, datetime): - doc[DELETEDAT] = self.deleted_at.strftime(MONGO_STRFTIME) + doc[DELETEDAT] = self.deleted_at.isoformat() # pylint: disable=no-member if self.has_a_review: review = self.get_latest_review() if review: doc[REVIEW_STATUS] = review.status - doc[REVIEW_DATE] = review.date_created.strftime(MONGO_STRFTIME) + doc[REVIEW_DATE] = review.date_created.isoformat() if review.get_note_text(): doc[REVIEW_COMMENT] = review.get_note_text() - # pylint: disable=attribute-defined-outside-init - # pylint: disable=access-member-before-definition - if not self.date_created: - self.date_created = submission_time() - - # pylint: disable=access-member-before-definition - if not self.date_modified: - self.date_modified = self.date_created - - doc[DATE_MODIFIED] = self.date_modified.strftime(MONGO_STRFTIME) - - doc[SUBMISSION_TIME] = self.date_created.strftime(MONGO_STRFTIME) - + doc[DATE_MODIFIED] = self.date_modified.isoformat() + doc[SUBMISSION_TIME] = self.date_created.isoformat() doc[TOTAL_MEDIA] = self.total_media doc[MEDIA_COUNT] = self.media_count doc[MEDIA_ALL_RECEIVED] = self.media_all_received @@ -650,13 +629,18 @@ class Instance(models.Model, InstanceBaseClass): "logger.XForm", null=False, related_name="instances", on_delete=models.CASCADE ) survey_type = models.ForeignKey("logger.SurveyType", on_delete=models.PROTECT) - # shows when we first received this instance - date_created = models.DateTimeField(auto_now_add=True) - + date_created = models.DateTimeField( + default=now, + editable=False, + blank=True, + ) # this will end up representing "date last parsed" - date_modified = models.DateTimeField(auto_now=True) - + date_modified = models.DateTimeField( + default=now, + editable=False, + blank=True, + ) # this will end up representing "date instance was deleted" deleted_at = models.DateTimeField(null=True, default=None) deleted_by = models.ForeignKey( @@ -698,9 +682,9 @@ class Meta: app_label = "logger" unique_together = ("xform", "uuid") indexes = [ - models.Index(fields=['date_created']), - models.Index(fields=['date_modified']), - models.Index(fields=['deleted_at']), + models.Index(fields=["date_created"]), + models.Index(fields=["date_modified"]), + models.Index(fields=["deleted_at"]), ] @classmethod @@ -746,8 +730,8 @@ def get_expected_media(self): media_list.extend([i["media/file"] for i in data["media"]]) else: media_xpaths = ( - self.xform.get_media_survey_xpaths() + - self.xform.get_osm_survey_xpaths() + self.xform.get_media_survey_xpaths() + + self.xform.get_osm_survey_xpaths() ) for media_xpath in media_xpaths: media_list.extend(get_values_matching_key(data, media_xpath)) @@ -780,6 +764,7 @@ def attachments_count(self): # pylint: disable=arguments-differ def save(self, *args, **kwargs): force = kwargs.get("force") + self.date_modified = now() if force: del kwargs["force"] @@ -787,11 +772,8 @@ def save(self, *args, **kwargs): self._check_is_merged_dataset() self._check_active(force) self._set_geom() - self._set_json() self._set_survey_type() self._set_uuid() - # pylint: disable=no-member - self.version = self.json.get(VERSION, self.xform.version) super().save(*args, **kwargs) @@ -823,7 +805,9 @@ def post_save_submission(sender, instance=None, created=False, **kwargs): - XForm submission count & instances_with_geopoints field - Project date modified - - Update the submission JSON field data + - Update the submission JSON field data. We save the full_json in + post_save signal because some implementations in get_full_dict + require the id to be available """ if instance.deleted_at is not None: _update_xform_submission_count_delete(instance) @@ -835,7 +819,7 @@ def post_save_submission(sender, instance=None, created=False, **kwargs): ) ) transaction.on_commit( - lambda: save_full_json_async.apply_async(args=[instance.pk, created]) + lambda: save_full_json_async.apply_async(args=[instance.pk]) ) transaction.on_commit( lambda: update_project_date_modified_async.apply_async( @@ -845,7 +829,7 @@ def post_save_submission(sender, instance=None, created=False, **kwargs): else: update_xform_submission_count(instance.pk, created) - save_full_json(instance.pk, created) + save_full_json(instance) update_project_date_modified(instance.pk, created) @@ -873,7 +857,7 @@ def permanently_delete_attachments(sender, instance=None, created=False, **kwarg pre_delete.connect( permanently_delete_attachments, sender=Instance, - dispatch_uid="permanently_delete_attachments" + dispatch_uid="permanently_delete_attachments", ) @@ -911,7 +895,7 @@ def attachments(self): @property def json(self): """Returns the XML submission as a python dictionary object.""" - return self.get_full_dict(load_existing=False) + return self.get_full_dict() @property def status(self): diff --git a/onadata/apps/logger/models/xform.py b/onadata/apps/logger/models/xform.py index e69b6a4af2..c0743e3222 100644 --- a/onadata/apps/logger/models/xform.py +++ b/onadata/apps/logger/models/xform.py @@ -132,14 +132,14 @@ def _build_obs_from_dict( "_parent_index": parent_index, } ) - for (k, v) in iteritems(dict_item): + for k, v in iteritems(dict_item): if isinstance(v, dict) and isinstance(v, list): if k in obs[table_name][-1]: raise AssertionError() obs[table_name][-1][k] = v obs[table_name][-1]["_index"] = this_index - for (k, v) in iteritems(dict_item): + for k, v in iteritems(dict_item): if isinstance(v, dict): kwargs = { "dict_item": v, @@ -893,7 +893,7 @@ class XForm(XFormMixin, BaseModel): ) # XForm was created as a merged dataset is_merged_dataset = models.BooleanField(default=False) - + is_instance_json_regenerated = models.BooleanField(default=False) tags = TaggableManager() class Meta: diff --git a/onadata/apps/logger/tests/models/test_instance.py b/onadata/apps/logger/tests/models/test_instance.py index cae12061fc..3d826abd28 100644 --- a/onadata/apps/logger/tests/models/test_instance.py +++ b/onadata/apps/logger/tests/models/test_instance.py @@ -1,11 +1,12 @@ import os +import pytz from datetime import datetime from datetime import timedelta from django.http.request import HttpRequest from django.utils.timezone import utc from django_digest.test import DigestAuth -from mock import patch +from mock import patch, Mock from onadata.apps.logger.models import XForm, Instance, SubmissionReview from onadata.apps.logger.models.instance import ( @@ -23,8 +24,6 @@ ) from onadata.libs.utils.common_tags import ( MONGO_STRFTIME, - SUBMISSION_TIME, - XFORM_ID_STRING, SUBMITTED_BY, ) @@ -36,24 +35,46 @@ def setUp(self): def test_stores_json(self): self._publish_transportation_form_and_submit_instance() instances = Instance.objects.all() + xform_id_string = XForm.objects.all()[0].id_string for instance in instances: self.assertNotEqual(instance.json, {}) + self.assertEqual(instance.json.get("_id"), instance.pk) + self.assertEqual( + instance.json.get("_date_modified"), instance.date_modified.isoformat() + ) + self.assertEqual( + instance.json.get("_submission_time"), instance.date_created.isoformat() + ) + self.assertEqual(instance.json.get("_xform_id_string"), xform_id_string) - @patch("django.utils.timezone.now") - def test_json_assigns_attributes(self, mock_time): - mock_time.return_value = datetime.utcnow().replace(tzinfo=utc) - self._publish_transportation_form_and_submit_instance() + def test_updates_json_date_modified_on_save(self): + """_date_modified in `json` field is updated on save""" + old_mocked_now = datetime(2023, 9, 21, 8, 27, 0, tzinfo=pytz.utc) - xform_id_string = XForm.objects.all()[0].id_string - instances = Instance.objects.all() + with patch("django.utils.timezone.now", Mock(return_value=old_mocked_now)): + self._publish_transportation_form_and_submit_instance() - for instance in instances: - self.assertEqual( - instance.json[SUBMISSION_TIME], - mock_time.return_value.strftime(MONGO_STRFTIME), - ) - self.assertEqual(instance.json[XFORM_ID_STRING], xform_id_string) + instance = Instance.objects.first() + self.assertEqual(instance.date_modified, old_mocked_now) + self.assertEqual( + instance.json.get("_date_modified"), old_mocked_now.isoformat() + ) + + # After saving the date_modified in json should update + mocked_now = datetime(2023, 9, 21, 9, 3, 0, tzinfo=pytz.utc) + + with patch("django.utils.timezone.now", Mock(return_value=mocked_now)): + instance.save() + + instance.refresh_from_db() + self.assertEqual(instance.date_modified, mocked_now) + self.assertEqual(instance.json.get("_date_modified"), mocked_now.isoformat()) + # date_created, _submission_time is not altered + self.assertEqual(instance.date_created, old_mocked_now) + self.assertEqual( + instance.json.get("_submission_time"), old_mocked_now.isoformat() + ) @patch("django.utils.timezone.now") def test_json_stores_user_attribute(self, mock_time): @@ -87,16 +108,6 @@ def test_json_stores_user_attribute(self, mock_time): pi = ParsedInstance.objects.get(instance=instance) self.assertEqual(pi.to_dict_for_mongo()[SUBMITTED_BY], "bob") - def test_json_time_match_submission_time(self): - self._publish_transportation_form_and_submit_instance() - instances = Instance.objects.all() - - for instance in instances: - self.assertEqual( - instance.json[SUBMISSION_TIME], - instance.date_created.strftime(MONGO_STRFTIME), - ) - def test_set_instances_with_geopoints_on_submission_false(self): self._publish_transportation_form() @@ -211,8 +222,7 @@ def test_query_filter_by_integer(self): self.assertEqual(self.xform.instances.count(), 4) self.assertEqual(len(data), 3) - @patch("onadata.apps.logger.models.instance.submission_time") - def test_query_filter_by_datetime_field(self, mock_time): + def test_query_filter_by_datetime_field(self): self._publish_transportation_form() now = datetime(2014, 1, 1, tzinfo=utc) times = [ @@ -221,7 +231,6 @@ def test_query_filter_by_datetime_field(self, mock_time): now + timedelta(seconds=2), now + timedelta(seconds=3), ] - mock_time.side_effect = times self._make_submissions() atime = None diff --git a/onadata/apps/main/tests/fixtures/csv_export/export.csv b/onadata/apps/main/tests/fixtures/csv_export/export.csv index acd9a44cae..b58c110365 100644 --- a/onadata/apps/main/tests/fixtures/csv_export/export.csv +++ b/onadata/apps/main/tests/fixtures/csv_export/export.csv @@ -1,2 +1,2 @@ bed_net[1]/member[1]/name,bed_net[1]/member[2]/name,bed_net[2]/member[1]/name,meta/instanceID,_uuid,_submission_time,_tags,_notes,_version,_duration,_submitted_by,_total_media,_media_count,_media_all_received -Andrew,Bob,Carl,uuid:4e274a99-f7a9-467c-bb44-9f9a8ceee9a7,4e274a99-f7a9-467c-bb44-9f9a8ceee9a7,2013-02-18T15:54:01,,,2014111,,bob,0,0,True +Andrew,Bob,Carl,uuid:4e274a99-f7a9-467c-bb44-9f9a8ceee9a7,4e274a99-f7a9-467c-bb44-9f9a8ceee9a7,2013-02-18T15:54:01+00:00,,,2014111,,bob,0,0,True diff --git a/onadata/apps/main/tests/fixtures/csv_export/tutorial_w_repeats.csv b/onadata/apps/main/tests/fixtures/csv_export/tutorial_w_repeats.csv index 770d27ef4b..f0ebe11424 100644 --- a/onadata/apps/main/tests/fixtures/csv_export/tutorial_w_repeats.csv +++ b/onadata/apps/main/tests/fixtures/csv_export/tutorial_w_repeats.csv @@ -1,2 +1,2 @@ name,age,picture,has_children,children[1]/childs_name,children[1]/childs_age,children[2]/childs_name,children[2]/childs_age,gps,_gps_latitude,_gps_longitude,_gps_altitude,_gps_precision,web_browsers/firefox,web_browsers/chrome,web_browsers/ie,web_browsers/safari,meta/instanceID,_uuid,_submission_time,_tags,_notes,_version,_duration,_submitted_by,_total_media,_media_count,_media_all_received -Bob,25,n/a,1,Tom,12,Dick,5,-1.2625621 36.7921711 0.0 20.0,-1.2625621,36.7921711,0.0,20.0,n/a,n/a,n/a,n/a,uuid:b31c6ac2-b8ca-4180-914f-c844fa10ed3b,b31c6ac2-b8ca-4180-914f-c844fa10ed3b,2013-02-18T15:54:01,,,2014111,,bob,0,0,True +Bob,25,n/a,1,Tom,12,Dick,5,-1.2625621 36.7921711 0.0 20.0,-1.2625621,36.7921711,0.0,20.0,n/a,n/a,n/a,n/a,uuid:b31c6ac2-b8ca-4180-914f-c844fa10ed3b,b31c6ac2-b8ca-4180-914f-c844fa10ed3b,2013-02-18T15:54:01+00:00,,,2014111,,bob,0,0,True diff --git a/onadata/apps/main/tests/fixtures/csv_export/tutorial_w_repeats_truncate_titles.csv b/onadata/apps/main/tests/fixtures/csv_export/tutorial_w_repeats_truncate_titles.csv index 62b7f5b004..49601028b2 100644 --- a/onadata/apps/main/tests/fixtures/csv_export/tutorial_w_repeats_truncate_titles.csv +++ b/onadata/apps/main/tests/fixtures/csv_export/tutorial_w_repeats_truncate_titles.csv @@ -1,2 +1,2 @@ name,age,picture,has_children,childs_name,childs_age,childs_name,childs_age,gps,_gps_latitude,_gps_longitude,_gps_altitude,_gps_precision,web_browsers/firefox,web_browsers/chrome,web_browsers/ie,web_browsers/safari,instanceID,_uuid,_submission_time,_tags,_notes,_version,_duration,_submitted_by,_total_media,_media_count,_media_all_received -Bob,25,n/a,1,Tom,12,Dick,5,-1.2625621 36.7921711 0.0 20.0,-1.2625621,36.7921711,0.0,20.0,n/a,n/a,n/a,n/a,uuid:b31c6ac2-b8ca-4180-914f-c844fa10ed3b,b31c6ac2-b8ca-4180-914f-c844fa10ed3b,2013-02-18T15:54:01,,,2014111,,bob,0,0,True +Bob,25,n/a,1,Tom,12,Dick,5,-1.2625621 36.7921711 0.0 20.0,-1.2625621,36.7921711,0.0,20.0,n/a,n/a,n/a,n/a,uuid:b31c6ac2-b8ca-4180-914f-c844fa10ed3b,b31c6ac2-b8ca-4180-914f-c844fa10ed3b,2013-02-18T15:54:01+00:00,,,2014111,,bob,0,0,True diff --git a/onadata/apps/main/tests/fixtures/transportation/transportation.csv b/onadata/apps/main/tests/fixtures/transportation/transportation.csv index cc2dc8c8b7..9edd6f926e 100644 --- a/onadata/apps/main/tests/fixtures/transportation/transportation.csv +++ b/onadata/apps/main/tests/fixtures/transportation/transportation.csv @@ -1,5 +1,5 @@ -"transport/available_transportation_types_to_referral_facility/ambulance","transport/available_transportation_types_to_referral_facility/bicycle","transport/available_transportation_types_to_referral_facility/boat_canoe","transport/available_transportation_types_to_referral_facility/bus","transport/available_transportation_types_to_referral_facility/donkey_mule_cart","transport/available_transportation_types_to_referral_facility/keke_pepe","transport/available_transportation_types_to_referral_facility/lorry","transport/available_transportation_types_to_referral_facility/motorbike","transport/available_transportation_types_to_referral_facility/taxi","transport/available_transportation_types_to_referral_facility/other","transport/available_transportation_types_to_referral_facility_other","transport/loop_over_transport_types_frequency/ambulance/frequency_to_referral_facility","transport/loop_over_transport_types_frequency/bicycle/frequency_to_referral_facility","transport/loop_over_transport_types_frequency/boat_canoe/frequency_to_referral_facility","transport/loop_over_transport_types_frequency/bus/frequency_to_referral_facility","transport/loop_over_transport_types_frequency/donkey_mule_cart/frequency_to_referral_facility","transport/loop_over_transport_types_frequency/keke_pepe/frequency_to_referral_facility","transport/loop_over_transport_types_frequency/lorry/frequency_to_referral_facility","transport/loop_over_transport_types_frequency/motorbike/frequency_to_referral_facility","transport/loop_over_transport_types_frequency/taxi/frequency_to_referral_facility","meta/instanceID","_uuid","_submission_time" -"False","False","False","False","False","False","False","False","False","False","n/a","n/a","n/a","n/a","n/a","n/a","n/a","n/a","n/a","n/a","uuid:5b2cc313-fc09-437e-8149-fcd32f695d41","5b2cc313-fc09-437e-8149-fcd32f695d41","2013-02-14T15:37:21", -"True","True","False","False","False","False","False","False","False","False","n/a","daily","weekly","n/a","n/a","n/a","n/a","n/a","n/a","n/a","uuid:f3d8dc65-91a6-4d0f-9e97-802128083390","f3d8dc65-91a6-4d0f-9e97-802128083390","2013-02-14T15:37:22", -"True","False","False","False","False","False","False","False","False","False","n/a","weekly","n/a","n/a","n/a","n/a","n/a","n/a","n/a","n/a","uuid:9c6f3468-cfda-46e8-84c1-75458e72805d","9c6f3468-cfda-46e8-84c1-75458e72805d","2013-02-14T15:37:23", -"False","False","False","False","False","False","False","False","True","True","camel","n/a","n/a","n/a","n/a","n/a","n/a","n/a","n/a","daily","uuid:9f0a1508-c3b7-4c99-be00-9b237c26bcbf","9f0a1508-c3b7-4c99-be00-9b237c26bcbf","2013-02-14T15:37:24", +transport/available_transportation_types_to_referral_facility/ambulance,transport/available_transportation_types_to_referral_facility/bicycle,transport/available_transportation_types_to_referral_facility/boat_canoe,transport/available_transportation_types_to_referral_facility/bus,transport/available_transportation_types_to_referral_facility/donkey_mule_cart,transport/available_transportation_types_to_referral_facility/keke_pepe,transport/available_transportation_types_to_referral_facility/lorry,transport/available_transportation_types_to_referral_facility/motorbike,transport/available_transportation_types_to_referral_facility/taxi,transport/available_transportation_types_to_referral_facility/other,transport/available_transportation_types_to_referral_facility_other,transport/loop_over_transport_types_frequency/ambulance/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/bicycle/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/boat_canoe/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/bus/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/donkey_mule_cart/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/keke_pepe/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/lorry/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/motorbike/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/taxi/frequency_to_referral_facility,meta/instanceID,_uuid,_submission_time +False,False,False,False,False,False,False,False,False,False,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,uuid:5b2cc313-fc09-437e-8149-fcd32f695d41,5b2cc313-fc09-437e-8149-fcd32f695d41,2013-02-14T15:37:21+00:00 +True,True,False,False,False,False,False,False,False,False,n/a,daily,weekly,n/a,n/a,n/a,n/a,n/a,n/a,n/a,uuid:f3d8dc65-91a6-4d0f-9e97-802128083390,f3d8dc65-91a6-4d0f-9e97-802128083390,2013-02-14T15:37:22+00:00 +True,False,False,False,False,False,False,False,False,False,n/a,weekly,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,uuid:9c6f3468-cfda-46e8-84c1-75458e72805d,9c6f3468-cfda-46e8-84c1-75458e72805d,2013-02-14T15:37:23+00:00 +False,False,False,False,False,False,False,False,True,True,camel,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,daily,uuid:9f0a1508-c3b7-4c99-be00-9b237c26bcbf,9f0a1508-c3b7-4c99-be00-9b237c26bcbf,2013-02-14T15:37:24+00:00 diff --git a/onadata/apps/main/tests/fixtures/userone/userone_with_dot_name_fields.csv b/onadata/apps/main/tests/fixtures/userone/userone_with_dot_name_fields.csv index b80057dfd6..d2dcecb664 100644 --- a/onadata/apps/main/tests/fixtures/userone/userone_with_dot_name_fields.csv +++ b/onadata/apps/main/tests/fixtures/userone/userone_with_dot_name_fields.csv @@ -1,2 +1,2 @@ Q1.1,Q1.2,Q1.3,Q1.4,_Q1.4_latitude,_Q1.4_longitude,_Q1.4_altitude,_Q1.4_precision,rQ6.4[1]/Q6.4,meta/instanceID,_uuid,_submission_time,_tags,_notes,_version,_duration,_submitted_by,_total_media,_media_count,_media_all_received -Office,4,n/a,-1.2624975 36.7923384 0.0 25.0,-1.2624975,36.7923384,0.0,25.0,Cool,uuid:a32f232c-77cb-4468-b55b-6495d5e5de7,a32f232c-77cb-4468-b55b-6495d5e5de7,2013-02-18T15:54:01,,,2014111,,bob,0,0,True +Office,4,n/a,-1.2624975 36.7923384 0.0 25.0,-1.2624975,36.7923384,0.0,25.0,Cool,uuid:a32f232c-77cb-4468-b55b-6495d5e5de7,a32f232c-77cb-4468-b55b-6495d5e5de7,2013-02-18T15:54:01+00:00,,,2014111,,bob,0,0,True diff --git a/onadata/apps/main/tests/test_process.py b/onadata/apps/main/tests/test_process.py index 549ffbcec9..27b6848e77 100644 --- a/onadata/apps/main/tests/test_process.py +++ b/onadata/apps/main/tests/test_process.py @@ -75,7 +75,7 @@ def _update_dynamic_data(self): """ Update stuff like submission time so we can compare within out fixtures """ - for (uuid, submission_time) in iteritems(self.uuid_to_submission_times): + for uuid, submission_time in iteritems(self.uuid_to_submission_times): i = self.xform.instances.get(uuid=uuid) i.date_created = pytz.timezone("UTC").localize( datetime.strptime(submission_time, MONGO_STRFTIME) @@ -368,7 +368,6 @@ def _check_data_dictionary(self): self.assertEqual(sorted(next(actual_csv)), sorted(expected_list)) def _check_data_for_csv_export(self): - data = [ { "available_transportation_types_to_referral_facility/ambulance": True, @@ -391,7 +390,7 @@ def _check_data_for_csv_export(self): ] for d_from_db in self.data_dictionary.get_data_for_excel(): test_dict = {} - for (k, v) in iteritems(d_from_db): + for k, v in iteritems(d_from_db): if ( k not in [ @@ -476,7 +475,7 @@ def _check_csv_export_second_pass(self): "image1": "1335783522563.jpg", "meta/instanceID": "uuid:5b2cc313-fc09-437e-8149-fcd32f695d41", "_uuid": "5b2cc313-fc09-437e-8149-fcd32f695d41", - "_submission_time": "2013-02-14T15:37:21", + "_submission_time": "2013-02-14T15:37:21+00:00", "_tags": "", "_notes": "", "_version": "2014111", @@ -492,7 +491,7 @@ def _check_csv_export_second_pass(self): self.bicycle_key: "weekly", "meta/instanceID": "uuid:f3d8dc65-91a6-4d0f-9e97-802128083390", "_uuid": "f3d8dc65-91a6-4d0f-9e97-802128083390", - "_submission_time": "2013-02-14T15:37:22", + "_submission_time": "2013-02-14T15:37:22+00:00", "_tags": "", "_notes": "", "_version": "2014111", @@ -507,7 +506,7 @@ def _check_csv_export_second_pass(self): self.ambulance_key: "weekly", "meta/instanceID": "uuid:9c6f3468-cfda-46e8-84c1-75458e72805d", "_uuid": "9c6f3468-cfda-46e8-84c1-75458e72805d", - "_submission_time": "2013-02-14T15:37:23", + "_submission_time": "2013-02-14T15:37:23+00:00", "_tags": "", "_notes": "", "_version": "2014111", @@ -524,7 +523,7 @@ def _check_csv_export_second_pass(self): self.taxi_key: "daily", "meta/instanceID": "uuid:9f0a1508-c3b7-4c99-be00-9b237c26bcbf", "_uuid": "9f0a1508-c3b7-4c99-be00-9b237c26bcbf", - "_submission_time": "2013-02-14T15:37:24", + "_submission_time": "2013-02-14T15:37:24+00:00", "_tags": "", "_notes": "", "_version": "2014111", @@ -543,7 +542,7 @@ def _check_csv_export_second_pass(self): for row, expected_dict in zip(actual_csv, data): test_dict = {} row_dict = dict(zip(headers, row)) - for (k, v) in iteritems(row_dict): + for k, v in iteritems(row_dict): if not (v in ["n/a", "False"] or k in additional_headers): test_dict[k] = v this_list = [] diff --git a/onadata/apps/viewer/tasks.py b/onadata/apps/viewer/tasks.py index 90cf235a71..2e812ae6d5 100644 --- a/onadata/apps/viewer/tasks.py +++ b/onadata/apps/viewer/tasks.py @@ -37,7 +37,6 @@ def _get_export_object(export_id): return Export.objects.get(id=export_id) except Export.DoesNotExist: if getattr(settings, "SLAVE_DATABASES", []): - with use_master: return Export.objects.get(id=export_id) diff --git a/onadata/apps/viewer/tests/fixtures/transportation.csv b/onadata/apps/viewer/tests/fixtures/transportation.csv index 484ebb1a49..acaa119d95 100644 --- a/onadata/apps/viewer/tests/fixtures/transportation.csv +++ b/onadata/apps/viewer/tests/fixtures/transportation.csv @@ -1,2 +1,2 @@ transport/available_transportation_types_to_referral_facility/ambulance,transport/available_transportation_types_to_referral_facility/bicycle,transport/available_transportation_types_to_referral_facility/boat_canoe,transport/available_transportation_types_to_referral_facility/bus,transport/available_transportation_types_to_referral_facility/donkey_mule_cart,transport/available_transportation_types_to_referral_facility/keke_pepe,transport/available_transportation_types_to_referral_facility/lorry,transport/available_transportation_types_to_referral_facility/motorbike,transport/available_transportation_types_to_referral_facility/taxi,transport/available_transportation_types_to_referral_facility/other,transport/available_transportation_types_to_referral_facility_other,transport/loop_over_transport_types_frequency/ambulance/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/bicycle/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/boat_canoe/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/bus/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/donkey_mule_cart/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/keke_pepe/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/lorry/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/motorbike/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/taxi/frequency_to_referral_facility,meta/instanceID,_uuid,_submission_time,_tags,_notes,_version,_duration,_submitted_by,_total_media,_media_count,_media_all_received -False,False,False,False,False,False,False,False,False,False,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,uuid:5b2cc313-fc09-437e-8149-fcd32f695d41,5b2cc313-fc09-437e-8149-fcd32f695d41,2013-02-18T15:54:01,,,2014111,,bob,1,0,False +False,False,False,False,False,False,False,False,False,False,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,uuid:5b2cc313-fc09-437e-8149-fcd32f695d41,5b2cc313-fc09-437e-8149-fcd32f695d41,2013-02-18T15:54:01+00:00,,,2014111,,bob,1,0,False diff --git a/onadata/apps/viewer/tests/fixtures/transportation_without_na.csv b/onadata/apps/viewer/tests/fixtures/transportation_without_na.csv index 922d5efa68..990d1d37a5 100644 --- a/onadata/apps/viewer/tests/fixtures/transportation_without_na.csv +++ b/onadata/apps/viewer/tests/fixtures/transportation_without_na.csv @@ -1,2 +1,2 @@ transport/available_transportation_types_to_referral_facility/ambulance,transport/available_transportation_types_to_referral_facility/bicycle,transport/available_transportation_types_to_referral_facility/boat_canoe,transport/available_transportation_types_to_referral_facility/bus,transport/available_transportation_types_to_referral_facility/donkey_mule_cart,transport/available_transportation_types_to_referral_facility/keke_pepe,transport/available_transportation_types_to_referral_facility/lorry,transport/available_transportation_types_to_referral_facility/motorbike,transport/available_transportation_types_to_referral_facility/taxi,transport/available_transportation_types_to_referral_facility/other,transport/available_transportation_types_to_referral_facility_other,transport/loop_over_transport_types_frequency/ambulance/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/bicycle/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/boat_canoe/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/bus/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/donkey_mule_cart/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/keke_pepe/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/lorry/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/motorbike/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/taxi/frequency_to_referral_facility,meta/instanceID,_uuid,_submission_time,_tags,_notes,_version,_duration,_submitted_by,_total_media,_media_count,_media_all_received -False,False,False,False,False,False,False,False,False,False,,,,,,,,,,,uuid:5b2cc313-fc09-437e-8149-fcd32f695d41,5b2cc313-fc09-437e-8149-fcd32f695d41,2013-02-18T15:54:01,,,2014111,,bob,1,0,False +False,False,False,False,False,False,False,False,False,False,,,,,,,,,,,uuid:5b2cc313-fc09-437e-8149-fcd32f695d41,5b2cc313-fc09-437e-8149-fcd32f695d41,2013-02-18T15:54:01+00:00,,,2014111,,bob,1,0,False diff --git a/onadata/libs/renderers/renderers.py b/onadata/libs/renderers/renderers.py index 1e828027bd..46fb86a932 100644 --- a/onadata/libs/renderers/renderers.py +++ b/onadata/libs/renderers/renderers.py @@ -52,11 +52,16 @@ def floip_rows_list(data): """ Yields a row of FLOIP results data from dict data. """ - _submission_time = ( - pytz.timezone("UTC") - .localize(parse_datetime(data["_submission_time"])) - .isoformat() - ) + try: + _submission_time = ( + pytz.timezone("UTC") + .localize(parse_datetime(data["_submission_time"])) + .isoformat() + ) + + except ValueError: + _submission_time = data["_submission_time"] + for i, key in enumerate(data, 1): if not (key.startswith("_") or key in IGNORE_FIELDS): instance_id = data["_id"] @@ -296,7 +301,7 @@ def _to_xml(self, xml, data): xml.endElement(self.element_node) elif isinstance(data, dict): - for (key, value) in iteritems(data): + for key, value in iteritems(data): if key not in FORMLIST_MANDATORY_FIELDS and value is None: continue xml.startElement(key, {}) diff --git a/onadata/libs/tests/data/test_tools.py b/onadata/libs/tests/data/test_tools.py index a7acf2b2b4..652d7e8523 100644 --- a/onadata/libs/tests/data/test_tools.py +++ b/onadata/libs/tests/data/test_tools.py @@ -35,8 +35,10 @@ def test_get_form_submissions_grouped_by_field(self, mock_time): self.assertEqual([field, count_key], sorted(list(result))) self.assertEqual(result[count_key], count) - @patch("onadata.apps.logger.models.instance.submission_time") - def test_get_form_submissions_grouped_by_field_datetime_to_date(self, mock_time): + def test_get_form_submissions_grouped_by_field_datetime( + self, + ): # pylint: disable=invalid-name + """Test get_form_submissions_grouped_by_field datetime""" now = datetime(2014, 1, 1, tzinfo=utc) times = [ now, @@ -44,12 +46,12 @@ def test_get_form_submissions_grouped_by_field_datetime_to_date(self, mock_time) now + timedelta(seconds=2), now + timedelta(seconds=3), ] - mock_time.side_effect = times self._make_submissions() for i in self.xform.instances.all().order_by("-pk"): i.date_created = times.pop() i.save() + count_key = "count" fields = ["_submission_time"] diff --git a/onadata/libs/tests/utils/fixtures/nested_repeats/nested_repeats.csv b/onadata/libs/tests/utils/fixtures/nested_repeats/nested_repeats.csv index 3a22b86d86..44fcc51f07 100644 --- a/onadata/libs/tests/utils/fixtures/nested_repeats/nested_repeats.csv +++ b/onadata/libs/tests/utils/fixtures/nested_repeats/nested_repeats.csv @@ -1,3 +1,3 @@ kids/has_kids,kids/kids_details[1]/kids_name,kids/kids_details[1]/kids_age,kids/kids_details[2]/kids_name,kids/kids_details[2]/kids_age,kids/kids_details[3]/kids_name,kids/kids_details[3]/kids_age,kids/kids_details[1]/kids_immunization[1]/immunization_info,kids/kids_details[1]/kids_immunization[2]/immunization_info,kids/kids_details[1]/kids_immunization[3]/immunization_info,kids/kids_details[2]/kids_immunization[1]/immunization_info,kids/kids_details[2]/kids_immunization[2]/immunization_info,kids/kids_details[2]/kids_immunization[3]/immunization_info,kids/kids_details[3]/kids_immunization[1]/immunization_info,kids/nested_group/nested_name,kids/nested_group/nested_age,address,gps,_gps_latitude,_gps_longitude,_gps_altitude,_gps_precision,web_browsers/firefox,web_browsers/chrome,web_browsers/ie,web_browsers/safari,meta/instanceID,_uuid,_submission_time,_tags,_notes,_version,_duration,_submitted_by,_total_media,_media_count,_media_all_received -1,Hansel,2,Gretel,1,n/a,n/a,Polio,Measles,Malaria,TB,Rickets,n/a,n/a,The Witch,45,1234,-1.2627557 36.7926442 0.0 30.0,-1.2627557,36.7926442,0.0,30.0,False,False,True,True,uuid:0a2a8ef5-5d82-4e43-a267-74037b0e9ea4,0a2a8ef5-5d82-4e43-a267-74037b0e9ea4,2013-02-18T15:54:01,,,201211121234,,bob,0,0,True -1,Tom,8,Dick,5,Harry,3,Berry Berry,Scurvy,n/a,Measles,Flu,Anemia,Polio 2,John,38,5678,-1.2627427 36.7925298 0.0 39.0,-1.2627427,36.7925298,0.0,39.0,True,True,True,False,uuid:8742fb1c-50a5-4c54-80c6-4420129d14ce,8742fb1c-50a5-4c54-80c6-4420129d14ce,2013-02-18T15:54:01,,,201211121234,,bob,0,0,True +1,Hansel,2,Gretel,1,n/a,n/a,Polio,Measles,Malaria,TB,Rickets,n/a,n/a,The Witch,45,1234,-1.2627557 36.7926442 0.0 30.0,-1.2627557,36.7926442,0.0,30.0,False,False,True,True,uuid:0a2a8ef5-5d82-4e43-a267-74037b0e9ea4,0a2a8ef5-5d82-4e43-a267-74037b0e9ea4,2013-02-18T15:54:01+00:00,,,201211121234,,bob,0,0,True +1,Tom,8,Dick,5,Harry,3,Berry Berry,Scurvy,n/a,Measles,Flu,Anemia,Polio 2,John,38,5678,-1.2627427 36.7925298 0.0 39.0,-1.2627427,36.7925298,0.0,39.0,True,True,True,False,uuid:8742fb1c-50a5-4c54-80c6-4420129d14ce,8742fb1c-50a5-4c54-80c6-4420129d14ce,2013-02-18T15:54:01+00:00,,,201211121234,,bob,0,0,True diff --git a/onadata/libs/tests/utils/test_logger_tools.py b/onadata/libs/tests/utils/test_logger_tools.py index 5e498d15e8..03516b7e66 100644 --- a/onadata/libs/tests/utils/test_logger_tools.py +++ b/onadata/libs/tests/utils/test_logger_tools.py @@ -97,6 +97,7 @@ def test_attachment_tracking(self): BytesIO(xml_string.strip().encode("utf-8")), media_files=[media_file], ) + instance.refresh_from_db() self.assertFalse(instance.json[MEDIA_ALL_RECEIVED]) self.assertEqual(instance.json[TOTAL_MEDIA], 2) self.assertEqual(instance.json[MEDIA_COUNT], 1) @@ -183,6 +184,7 @@ def test_attachment_tracking_for_repeats(self): BytesIO(xml_string.strip().encode("utf-8")), media_files=[media_file], ) + instance.refresh_from_db() self.assertFalse(instance.json[MEDIA_ALL_RECEIVED]) self.assertEqual(instance.json[TOTAL_MEDIA], 2) self.assertEqual(instance.json[MEDIA_COUNT], 1) @@ -254,6 +256,7 @@ def test_attachment_tracking_for_nested_repeats(self): BytesIO(xml_string.strip().encode("utf-8")), media_files=[media_file], ) + instance.refresh_from_db() self.assertFalse(instance.json[MEDIA_ALL_RECEIVED]) self.assertEqual(instance.json[TOTAL_MEDIA], 2) self.assertEqual(instance.json[MEDIA_COUNT], 1) @@ -326,6 +329,7 @@ def test_replaced_attachments_not_tracked(self): BytesIO(xml_string.strip().encode("utf-8")), media_files=[file_media, image_media], ) + instance.refresh_from_db() self.assertTrue(instance.json[MEDIA_ALL_RECEIVED]) self.assertEqual( instance.attachments.filter(deleted_at__isnull=True).count(), 2 @@ -408,6 +412,7 @@ def test_attachment_tracking_duplicate(self): BytesIO(xml_string.strip().encode("utf-8")), media_files=[media_file], ) + instance.refresh_from_db() self.assertFalse(instance.json[MEDIA_ALL_RECEIVED]) self.assertEqual(instance.json[TOTAL_MEDIA], 2) self.assertEqual(instance.json[MEDIA_COUNT], 1) @@ -473,6 +478,7 @@ def test_attachment_tracking_not_in_submission(self): BytesIO(xml_string.strip().encode("utf-8")), media_files=[media_file, media2_file], ) + instance.refresh_from_db() self.assertFalse(instance.json[MEDIA_ALL_RECEIVED]) self.assertEqual(instance.json[TOTAL_MEDIA], 2) self.assertEqual(instance.json[MEDIA_COUNT], 1) diff --git a/onadata/libs/utils/cache_tools.py b/onadata/libs/utils/cache_tools.py index 033b37c6f5..1b0ec079b8 100644 --- a/onadata/libs/utils/cache_tools.py +++ b/onadata/libs/utils/cache_tools.py @@ -55,6 +55,10 @@ XFORM_SUBMISSION_COUNT_FOR_DAY_DATE = "xfm-get_submission_count_date-" XFORM_SUBMISSION_STAT = "xfm-get_form_submissions_grouped_by_field-" XFORM_CHARTS = "xfm-get_form_charts-" +XFORM_REGENERATE_INSTANCE_JSON_TASK = "xfm-regenerate_instance_json_task-" + +# Cache timeouts used in XForm model +XFORM_REGENERATE_INSTANCE_JSON_TASK_TTL = 24 * 60 * 60 # 24 hrs converted to seconds def safe_delete(key):