Skip to content

Commit

Permalink
Track dedupe load for each result
Browse files Browse the repository at this point in the history
  • Loading branch information
gherceg committed Oct 1, 2024
1 parent f3d778a commit 86b7d99
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions corehq/apps/data_interfaces/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,6 @@
)
from corehq import toggles
from corehq.util.log import with_progress_bar
from corehq.util.metrics import metrics_counter
from corehq.util.metrics.load_counters import dedupe_load_counter
from corehq.util.quickcache import quickcache
from corehq.util.test_utils import unit_testing_only
Expand Down Expand Up @@ -1164,8 +1163,6 @@ def _handle_case_duplicate(self, case, rule):
if is_copied_case(case):
return CaseRuleActionResult()

dedupe_load_counter('unknown', case.domain)()

if not case_matching_rule_criteria_exists_in_es(case, rule):
ALLOWED_ES_DELAY = timedelta(hours=1)
if datetime.utcnow() - case.server_modified_on > ALLOWED_ES_DELAY:
Expand All @@ -1179,7 +1176,7 @@ def _handle_case_duplicate(self, case, rule):
# but disabling this to avoid further quota issues.
# raise ValueError(f'Unable to find current ElasticSearch data for: {case.case_id}')
# Ignore this result for now
metrics_counter('commcare.dedupe.no_matching_case', tags={'domain': case.domain})
dedupe_load_counter('unknown', case.domain, {'result': 'errored'})()
return CaseRuleActionResult(num_errors=1)
else:
# Normal processing can involve latency between when a case is written to the database and when
Expand All @@ -1191,9 +1188,12 @@ def _handle_case_duplicate(self, case, rule):
# inserts into ElasticSearch are asychronous, we can receive cases here that will not yet be
# present in ElasticSearch but will never be processed later. In the short-term, we're avoiding
# this by resaving the case, with the intention to use a more stable approach in the future
dedupe_load_counter('unknown', case.domain, {'result': 'retried'})()
resave_case(rule.domain, case, send_post_save_signal=False)
return CaseRuleActionResult(num_updates=0)

dedupe_load_counter('unknown', case.domain, {'result': 'processed'})()

try:
existing_duplicate = CaseDuplicateNew.objects.get(case_id=case.case_id, action=self)
except CaseDuplicateNew.DoesNotExist:
Expand Down

0 comments on commit 86b7d99

Please sign in to comment.