Skip to content

Commit

Permalink
Merge branch 'master' into ctsims/end_to_end_dev_guide
Browse files Browse the repository at this point in the history
  • Loading branch information
ctsims authored Aug 30, 2024
2 parents b314857 + 0784740 commit 72bc766
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 35 deletions.
38 changes: 11 additions & 27 deletions corehq/apps/export/models/new.py
Original file line number Diff line number Diff line change
Expand Up @@ -1771,6 +1771,7 @@ def generate_schema_from_builds(
only_process_current_builds=False,
task=None,
for_new_export_instance=False,
is_identifier_case_type=False,
):
"""
Builds a schema from Application builds for a given identifier
Expand All @@ -1787,6 +1788,7 @@ def generate_schema_from_builds(
processed.
:param task: A celery task to update the progress of the build
:param for_new_export_instance: Flag to be set if generating schema for a new export instance
:param is_identifier_case_type: boolean, if True, some optimizations are applied specific to case type
:returns: Returns a ExportDataSchema instance
"""

Expand All @@ -1809,7 +1811,8 @@ def generate_schema_from_builds(
)
app_build_ids.extend(app_ids_for_domain)
current_schema = cls._process_apps_for_export(domain, current_schema, identifier, app_build_ids, task,
for_new_export_instance=for_new_export_instance)
for_new_export_instance=for_new_export_instance,
is_identifier_case_type=is_identifier_case_type)

inferred_schema = cls._get_inferred_schema(domain, app_id, identifier)
if inferred_schema:
Expand Down Expand Up @@ -1955,7 +1958,7 @@ def _save_export_schema(current_schema, original_id, original_rev):

@classmethod
def _process_apps_for_export(cls, domain, schema, identifier, app_build_ids, task,
for_new_export_instance=False):
for_new_export_instance=False, is_identifier_case_type=False):
apps_processed = 0
for app_doc in iter_docs(Application.get_db(), app_build_ids, chunksize=10):
doc_type = app_doc.get('doc_type', '')
Expand All @@ -1974,6 +1977,9 @@ def _process_apps_for_export(cls, domain, schema, identifier, app_build_ids, tas
)
continue

if is_identifier_case_type and not app.case_type_exists(identifier):
continue

try:
schema = cls._process_app_build(
schema,
Expand Down Expand Up @@ -2313,18 +2319,6 @@ def _get_stock_items_from_question(question, app_id, app_version, repeats):

return items

@classmethod
def _process_apps_for_export(cls, domain, schema, identifier, app_build_ids, task,
for_new_export_instance=False):
return super(FormExportDataSchema, cls)._process_apps_for_export(
domain,
schema,
identifier,
app_build_ids,
task,
for_new_export_instance=for_new_export_instance
)


class CaseExportDataSchema(ExportDataSchema):

Expand Down Expand Up @@ -2544,7 +2538,7 @@ def _add_to_group_schema(group_schema, path_start, prop, app_id, app_version):

@classmethod
def _process_apps_for_export(cls, domain, schema, identifier, app_build_ids, task,
for_new_export_instance=False):
for_new_export_instance=False, is_identifier_case_type=False):
if identifier == ALL_CASE_TYPE_EXPORT:
return cls._process_apps_for_bulk_export(domain, schema, app_build_ids, task)
else:
Expand All @@ -2554,7 +2548,8 @@ def _process_apps_for_export(cls, domain, schema, identifier, app_build_ids, tas
identifier,
app_build_ids,
task,
for_new_export_instance=for_new_export_instance
for_new_export_instance=for_new_export_instance,
is_identifier_case_type=is_identifier_case_type
)

@classmethod
Expand Down Expand Up @@ -2623,17 +2618,6 @@ def schema_version(cls):
def get_latest_export_schema(domain, include_metadata, identifier=None):
return SMSExportDataSchema(domain=domain, include_metadata=include_metadata)

def _process_apps_for_export(cls, domain, schema, identifier, app_build_ids, task,
for_new_export_instance=False):
return super(SMSExportDataSchema, cls)._process_apps_for_export(
domain,
schema,
identifier,
app_build_ids,
task,
for_new_export_instance=for_new_export_instance
)


def _string_path_to_list(path):
return path if path is None else path[1:].split('/')
Expand Down
21 changes: 14 additions & 7 deletions corehq/apps/hqadmin/service_checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,9 @@

import attr
import gevent

from dimagi.utils.logging import notify_exception

from corehq.apps.app_manager.models import Application
from corehq.apps.change_feed.connection import (
get_kafka_client,
Expand Down Expand Up @@ -78,9 +81,7 @@ def check_all_rabbitmq():
return ServiceStatus(True, 'RabbitMQ OK')

else:
return ServiceStatus(False, '; '.join(['{}:{}'.format(rabbit[0], rabbit[1])
for rabbit in unwell_rabbits])
)
return ServiceStatus(False, "; ".join(["{}:{}".format(rabbit[0], rabbit[1]) for rabbit in unwell_rabbits]))


def check_rabbitmq(broker_url):
Expand Down Expand Up @@ -123,7 +124,12 @@ def check_kafka():

@change_log_level('urllib3.connectionpool', logging.WARNING)
def check_elasticsearch():
cluster_health = check_es_cluster_health()
try:
cluster_health = check_es_cluster_health()
except Exception:
notify_exception(None, message="Error while checking elasticsearch cluster health")
return ServiceStatus(False, "Something went wrong checking cluster health")

if cluster_health == 'red':
return ServiceStatus(False, "Cluster health at %s" % cluster_health)

Expand Down Expand Up @@ -176,8 +182,9 @@ def check_celery():
bad_queues.append(
f"{queue} has been blocked for {blockage_duration} (max allowed is {threshold})"
)
elif (heartbeat_time_to_start is not None and
heartbeat_time_to_start > max(threshold, datetime.timedelta(minutes=5))):
elif heartbeat_time_to_start is not None and heartbeat_time_to_start > max(
threshold, datetime.timedelta(minutes=5)
):
bad_queues.append(
f"{queue} is delayed for {heartbeat_time_to_start} (max allowed is {threshold})"
)
Expand All @@ -194,7 +201,7 @@ def check_postgres():
for db in settings.DATABASES:
db_conn = connections[db]
try:
c = db_conn.cursor()
db_conn.cursor()
c_status = 'OK'
except OperationalError:
c_status = 'FAIL'
Expand Down
3 changes: 2 additions & 1 deletion corehq/apps/reports/standard/cases/case_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -450,7 +450,8 @@ def case_property_names(request, domain, case_id):
# We need to look at the export schema in order to remove any case properties that
# have been deleted from the app. When the data dictionary is fully public, we can use that
# so that users may deprecate those properties manually
export_schema = CaseExportDataSchema.generate_schema_from_builds(domain, None, case.type)
export_schema = CaseExportDataSchema.generate_schema_from_builds(domain, None, case.type,
is_identifier_case_type=True)
property_schema = export_schema.group_schemas[0]
last_app_ids = get_latest_app_ids_and_versions(domain)
all_property_names = {
Expand Down

0 comments on commit 72bc766

Please sign in to comment.