Skip to content

Commit

Permalink
šŸ› [BUG] Fix Aggregator does not retrieve unpublished Tour Steps (refs #ā€¦
Browse files Browse the repository at this point in the history
  • Loading branch information
Chatewgne committed Sep 13, 2024
1 parent 34e82c4 commit fa8ede4
Show file tree
Hide file tree
Showing 11 changed files with 659 additions and 75 deletions.
1 change: 1 addition & 0 deletions docs/changelog.rst
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ CHANGELOG
- ApidaeTrekParser duration import is fixed for multiple-days treks
- Apidae tourism parser now handles missing contact properties
- ApidaeTrekParser now handles missing source website
- Fix Aggregator does not retrieve unpublished Tour Steps (#3569)"

**Documentation**

Expand Down
56 changes: 45 additions & 11 deletions geotrek/common/tests/test_parsers.py
Original file line number Diff line number Diff line change
Expand Up @@ -668,24 +668,35 @@ class GeotrekAggregatorTestParser(GeotrekAggregatorParser):
pass


class GeotrekParserTest(TestCase):
class GeotrekParserTest(GeotrekParserTestMixin, TestCase):
def setUp(self, *args, **kwargs):
self.filetype = FileType.objects.create(type="Photographie")

def test_improperly_configurated_categories(self):
with self.assertRaisesRegex(ImproperlyConfigured, 'foo_field is not configured in categories_keys_api_v2'):
call_command('import', 'geotrek.common.tests.test_parsers.GeotrekTrekTestParser', verbosity=2)

def mock_json(self):
filename = os.path.join('geotrek', 'common', 'tests', 'data', 'geotrek_parser_v2', 'treks.json')
with open(filename, 'r') as f:
return json.load(f)

@mock.patch('requests.get')
def test_delete_according_to_provider(self, mocked_get):
@mock.patch('requests.head')
def test_delete_according_to_provider(self, mocked_head, mocked_get):
self.mock_time = 0
self.mock_json_order = [
('common', 'treks.json'),
('common', 'treks.json'),
('trekking', 'trek_no_children.json'),
('common', 'treks.json'),
('common', 'treks.json'),
('trekking', 'trek_no_children.json'),
('common', 'treks.json'),
('common', 'treks.json'),
('trekking', 'trek_no_children.json')]

# Mock GET
mocked_get.return_value.status_code = 200
mocked_get.return_value.json = self.mock_json
self.assertEqual(Trek.objects.count(), 0)
mocked_get.return_value.content = b''
mocked_head.return_value.status_code = 200

call_command('import', 'geotrek.common.tests.test_parsers.GeotrekTrekTestProviderParser', verbosity=0)
self.assertEqual(Trek.objects.count(), 1)
t = Trek.objects.first()
Expand All @@ -704,9 +715,26 @@ def test_delete_according_to_provider(self, mocked_get):
self.assertEqual(set([t.pk, t2.pk, t3.pk]), set(Trek.objects.values_list('pk', flat=True)))

@mock.patch('requests.get')
def test_delete_according_to_no_provider(self, mocked_get):
@mock.patch('requests.head')
def test_delete_according_to_no_provider(self, mocked_head, mocked_get):
self.mock_time = 0
self.mock_json_order = [
('common', 'treks.json'),
('common', 'treks.json'),
('trekking', 'trek_no_children.json'),
('common', 'treks.json'),
('common', 'treks.json'),
('trekking', 'trek_no_children.json'),
('common', 'treks.json'),
('common', 'treks.json'),
('trekking', 'trek_no_children.json')]

# Mock GET
mocked_get.return_value.status_code = 200
mocked_get.return_value.json = self.mock_json
mocked_get.return_value.content = b''
mocked_head.return_value.status_code = 200

self.assertEqual(Trek.objects.count(), 0)
call_command('import', 'geotrek.common.tests.test_parsers.GeotrekTrekTestNoProviderParser', verbosity=0)
self.assertEqual(Trek.objects.count(), 1)
Expand Down Expand Up @@ -838,6 +866,8 @@ def test_geotrek_aggregator_parser(self, mocked_head, mocked_get):
('trekking', 'trek_ids.json'),
('trekking', 'trek.json'),
('trekking', 'trek_children.json'),
('trekking', 'trek_published_step.json'),
('trekking', 'trek_unpublished_step.json'),
('trekking', 'poi_ids.json'),
('trekking', 'poi.json'),
('tourism', 'informationdesk_ids.json'),
Expand All @@ -863,6 +893,8 @@ def test_geotrek_aggregator_parser(self, mocked_head, mocked_get):
('trekking', 'trek_ids.json'),
('trekking', 'trek.json'),
('trekking', 'trek_children.json'),
('trekking', 'trek_published_step.json'),
('trekking', 'trek_unpublished_step.json'),
('trekking', 'poi_ids.json'),
('trekking', 'poi.json'),
('tourism', 'informationdesk_ids.json'),
Expand All @@ -885,9 +917,11 @@ def test_geotrek_aggregator_parser(self, mocked_head, mocked_get):
string_parser = output.getvalue()
self.assertIn('0000: Trek (URL_1) (00%)', string_parser)
self.assertIn('0000: Poi (URL_1) (00%)', string_parser)
self.assertIn('5/5 lignes importƩes.', string_parser)
# Published Tour steps are imported twice, but created once
self.assertIn('7/7 lignes importƩes.', string_parser)
self.assertIn('6 enregistrements crƩƩs.', string_parser)
self.assertIn('2/2 lignes importƩes.', string_parser)
self.assertEqual(Trek.objects.count(), 5)
self.assertEqual(Trek.objects.count(), 6)
self.assertEqual(POI.objects.count(), 2)
self.assertEqual(1, Trek.objects.get(name="Foo").information_desks.count())
self.assertEqual("Office de Tourisme de Seix",
Expand Down
17 changes: 8 additions & 9 deletions geotrek/trekking/parsers.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,18 +197,17 @@ def filter_points_reference(self, src, val):

def end(self):
"""Add children after all treks imported are created in database."""
super().end()
self.next_url = f"{self.url}/api/v2/tour"
try:
params = {
'in_bbox': ','.join([str(coord) for coord in self.bbox.extent]),
'fields': 'steps,uuid'
'fields': 'steps,id'
}
response = self.request_or_retry(f"{self.next_url}", params=params)
results = response.json()['results']
final_children = {}
for result in results:
final_children[result['uuid']] = [step['uuid'] for step in result['steps']]
final_children[result['uuid']] = [step['id'] for step in result['steps']]

for key, value in final_children.items():
if value:
Expand All @@ -217,18 +216,18 @@ def end(self):
self.add_warning(_(f"Trying to retrieve children for missing trek : could not find trek with UUID {key}"))
return
order = 0
for child in value:
try:
trek_child_instance = Trek.objects.get(eid=child)
except Trek.DoesNotExist:
self.add_warning(_(f"One trek has not be generated for {trek_parent_instance[0].name} : could not find trek with UUID {child}"))
continue
for child_id in value:
response = self.request_or_retry(f"{self.url}/api/v2/trek/{child_id}")
child_trek = response.json()
self.parse_row(child_trek)
trek_child_instance = self.obj
OrderedTrekChild.objects.update_or_create(parent=trek_parent_instance[0],
child=trek_child_instance,
defaults={'order': order})
order += 1
except Exception as e:
self.add_warning(_(f"An error occured in children generation : {getattr(e, 'message', repr(e))}"))
super().end()


class GeotrekServiceParser(GeotrekParser):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,10 @@
"uuid": "9e70b294-1134-4c50-9c56-d722720cacf1",
"steps": [
{
"uuid": "c9567576-2934-43ab-979e-e13d02c671a9"
"id": 10439
},
{
"id": 10442
}
]
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@
"uuid": "9e70b294-1134-4c50-9c56-d722720cacf1",
"steps": [
{
"uuid": "c9567576-2934-43ab-979e-e13d02c671a9"
"id": 1234
},
{
"uuid": "c9567576-2934-43ab-666e-e13d02c671a9"
"id": 1235
}
]
},
Expand All @@ -34,7 +34,8 @@
"uuid": "b2aea666-5e6e-4daa-a750-7d2ee52d3fe1",
"steps": [
{
"uuid": "c9567576-2934-43ab-979e-e13d02c671a9"
"uuid": "c9567576-2934-43ab-979e-e13d02c671a9",
"id": 457
}
]
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
{
"count": 5,
"next": null,
"previous": null,
"results": [
{
"uuid": "9e70b294-1134-4c50-9c56-d722720cacf1",
"steps": []
},
{
"uuid": "1ba24605-aff2-4b16-bf30-6de1ebfb2a12",
"steps": []
},
{
"uuid": "6761143f-9244-41d0-b1af-21114408f769",
"steps": []
},
{
"uuid": "c9567576-2934-43ab-979e-e13d02c671a9",
"steps": []
},
{
"uuid": "b2aea892-5e6e-4daa-a750-7d2ee52d3fe1",
"steps": []
}
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"detail": "No Trek matches the given query."
}
Loading

0 comments on commit fa8ede4

Please sign in to comment.