From adf61b78fed7f09f64238d1922c4600150c6c8e2 Mon Sep 17 00:00:00 2001 From: Jay Zeng Date: Tue, 10 Sep 2024 16:33:08 -0400 Subject: [PATCH 1/3] Process user events Call BOP directly to check user's data --- rbac/management/principal/cleaner.py | 154 +++++++++++++-------- rbac/management/principal/utils.py | 32 +++++ rbac/management/tasks.py | 4 +- rbac/migration_tool/migrate.py | 14 +- tests/management/principal/test_cleaner.py | 139 ++++++++++++++++--- tests/migration_tool/tests_migrate.py | 3 +- 6 files changed, 264 insertions(+), 82 deletions(-) create mode 100644 rbac/management/principal/utils.py diff --git a/rbac/management/principal/cleaner.py b/rbac/management/principal/cleaner.py index f27fc932..46de8b0e 100644 --- a/rbac/management/principal/cleaner.py +++ b/rbac/management/principal/cleaner.py @@ -19,12 +19,16 @@ import logging import os import ssl -from collections import defaultdict import xmltodict from django.conf import settings from management.principal.model import Principal from management.principal.proxy import PrincipalProxy +from management.principal.utils import ( + create_tenant_relationships, + create_user_relationships, + remove_user_relationships, +) from rest_framework import status from stompest.config import StompConfig from stompest.error import StompConnectionError @@ -36,7 +40,7 @@ logger = logging.getLogger(__name__) # pylint: disable=invalid-name -proxy = PrincipalProxy() # pylint: disable=invalid-name +PROXY = PrincipalProxy() # pylint: disable=invalid-name CERT_LOC = "/opt/rbac/rbac/management/principal/umb_certs/cert.pem" KEY_LOC = "/opt/rbac/rbac/management/principal/umb_certs/key.pem" @@ -54,7 +58,7 @@ def clean_tenant_principals(tenant): continue logger.debug("clean_tenant_principals: Checking for username %s for tenant %s.", principal.username, tenant_id) org_id = tenant.org_id - resp = proxy.request_filtered_principals([principal.username], org_id=org_id) + resp = PROXY.request_filtered_principals([principal.username], org_id=org_id) status_code = resp.get("status_code") data = resp.get("data") logger.info("clean_tenant_principals: Response code: %s Data: %s", str(status_code), str(data)) @@ -119,45 +123,98 @@ def clean_tenants_principals(): UMB_CLIENT = Stomp(CONFIG) -def is_umb_deactivate_msg(data_dict): - """Check if the message is a user deactivation message from UMB.""" - if not data_dict.get("CanonicalMessage"): # Skip if it is not CanonicalMessage - return False - # We only care about disabled user, operation == update and status == Inactive - operation = data_dict["CanonicalMessage"].get("Header", {}).get("Operation") - if operation != "update": - return False - status = data_dict["CanonicalMessage"].get("Payload", {}).get("Sync").get("User", {}).get("Status", {}) - if status.get("@primary") != "true" or status.get("State") != "Inactive": - return False - - return True - - -def clean_principal_umb(data_dict): - """Delete the principal if it exists.""" - user_principal_login = data_dict["CanonicalMessage"]["Payload"]["Sync"]["User"]["Person"]["Credentials"]["Login"] - # In case the user is under multiple account - principals = ( - Principal.objects.filter(username=user_principal_login) - .exclude(cross_account=True) - .exclude(type=Principal.Types.SERVICE_ACCOUNT) +def process_principal_deletion(user_data): + """Process the principal deletion.""" + # TODO: cleanup the relationships in spicedb + user_id = user_data["user_id"] + groups = [] + tenant = Tenant.objects.get(org_id=user_data["org_id"]) + principal = Principal.objects.filter(username=user_data["username"], tenant=tenant).first() + if not principal: # User not in RBAC + return + + # Log the group info in case it is needed + for group in principal.group.all(): + groups.append(group) + # We have to do the removal explicitly in order to clear the cache, + # or the console will still show the cached number of members + group.principals.remove(principal) + principal.delete() + remove_user_relationships(tenant, groups, principal, user_data["is_org_admin"]) + if not groups: + logger.info(f"Principal {user_id} was not under any groups.") + for group in groups: + logger.info(f"Principal {user_id} was in group with uuid: {group.uuid}") + + +def process_principal_edit(user_data): + """Process the principal update.""" + org_id = user_data["org_id"] + tenant_name = f"org{org_id}" + tenant, created = Tenant.objects.get_or_create(org_id=org_id, defaults={"ready": True, "tenant_name": tenant_name}) + if created: + create_tenant_relationships(tenant) + principal, created = Principal.objects.get_or_create( + username=user_data["username"], + tenant=tenant, + defaults={"user_id": user_data["user_id"]}, ) - groups = defaultdict(list) - for principal in principals: - # Log the group info in case it is needed - for group in principal.group.all(): - groups[principal.tenant.tenant_name].append(group.name) - # We have to trigger the removal in order to clear the cache, or the console will still show the cached - # number of members - group.principals.remove(principal) - principal.delete() - return user_principal_login, groups - - -def clean_principals_via_umb(): - """Check which principals are eligible for clean up via UMB.""" - logger.info("clean_tenant_principals: Start principal clean up via umb.") + if created: + create_user_relationships(principal, user_data["is_org_admin"]) + + +def retrieve_user_info(message): + """ + Retrieve user info from the message. + + returns: + user_data + is_deleted # Has the user been deleted on IT's side + """ + user = message["Payload"]["Sync"]["User"] + identifiers = user["Identifiers"] + user_id = identifiers["Identifier"]["#text"] + + bop_resp = PROXY.request_filtered_principals([user_id], options={"return_id": True}) + if not bop_resp["data"]: # User has been deleted + is_org_admin = user.get("UserMembership") == {"Name": "admin:org:all"} + user_name = user["Person"]["Credentials"]["Login"] + for ref in identifiers["Reference"]: + if ref["@entity-name"] == "Customer": + org_id = ref["#text"] + break + return {"user_id": user_id, "is_org_admin": is_org_admin, "username": user_name, "org_id": org_id}, True + return bop_resp["data"][0], False + + +def process_principal_data(user_data, is_deleted): + """Process the principal data.""" + if is_deleted: + process_principal_deletion(user_data) + else: + process_principal_edit(user_data) + + +def process_umb_event(frame, umb_client): + """Process each umb frame.""" + data_dict = xmltodict.parse(frame.body) + canonical_message = data_dict.get("CanonicalMessage") + if not canonical_message: + return + try: + user_data, is_deleted = retrieve_user_info(canonical_message) + except Exception as e: # Skip processing and leave the it to be processed later + logger.error("process_umb_event: Error retrieving user info: %s", str(e)) + return + + process_principal_data(user_data, is_deleted) + + umb_client.ack(frame) + + +def process_principal_events_from_umb(): + """Process principals events from UMB.""" + logger.info("process_tenant_principal_events: Start processing principal events from umb.") try: UMB_CLIENT.connect() UMB_CLIENT.subscribe(QUEUE, {StompSpec.ACK_HEADER: StompSpec.ACK_CLIENT_INDIVIDUAL}) @@ -168,17 +225,6 @@ def clean_principals_via_umb(): while UMB_CLIENT.canRead(2): # Check if queue is empty, two sec timeout frame = UMB_CLIENT.receiveFrame() - data_dict = xmltodict.parse(frame.body) - is_deactivate = is_umb_deactivate_msg(data_dict) - if not is_deactivate: - # Drop the message cause it is useless for us - UMB_CLIENT.ack(frame) - continue - principal_name, groups = clean_principal_umb(data_dict) - if not groups: - logger.info(f"Principal {principal_name} was not under any groups.") - for tenant, group_names in groups.items(): - logger.info(f"Principal {principal_name} was under tenant {tenant} in groups: {group_names}") - UMB_CLIENT.ack(frame) # This will remove the message from the queue + process_umb_event(frame, UMB_CLIENT) UMB_CLIENT.disconnect() - logger.info("clean_tenant_principals: Principal clean up finished.") + logger.info("process_tenant_principal_events: Principal event processing finished.") diff --git a/rbac/management/principal/utils.py b/rbac/management/principal/utils.py new file mode 100644 index 00000000..549adf39 --- /dev/null +++ b/rbac/management/principal/utils.py @@ -0,0 +1,32 @@ +""" +Copyright 2019 Red Hat, Inc. + +This program is free software: you can redistribute it and/or modify +it under the terms of the GNU Affero General Public License as +published by the Free Software Foundation, either version 3 of the +License, or (at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU Affero General Public License for more details. + +You should have received a copy of the GNU Affero General Public License +along with this program. If not, see . +""" + + +def create_tenant_relationships(tenant): + """Create relationships for tenant.""" + pass + + +def create_user_relationships(principal, is_org_admin): + """Create relationships for user.""" + pass + + +def remove_user_relationships(tenant, groups, principal, is_org_admin): + """Remove relationships for user.""" + # TODO: consider (admin) default groups + pass diff --git a/rbac/management/tasks.py b/rbac/management/tasks.py index c15111a5..2c5015b5 100644 --- a/rbac/management/tasks.py +++ b/rbac/management/tasks.py @@ -21,8 +21,8 @@ from django.core.management import call_command from management.health.healthcheck import redis_health from management.principal.cleaner import ( - clean_principals_via_umb, clean_tenants_principals, + process_principal_events_from_umb, ) from migration_tool.migrate import migrate_data @@ -36,7 +36,7 @@ def principal_cleanup(): @shared_task def principal_cleanup_via_umb(): """Celery task to clean up principals no longer existing.""" - clean_principals_via_umb() + process_principal_events_from_umb() @shared_task diff --git a/rbac/migration_tool/migrate.py b/rbac/migration_tool/migrate.py index 33f8db18..1b50e7be 100644 --- a/rbac/migration_tool/migrate.py +++ b/rbac/migration_tool/migrate.py @@ -118,13 +118,17 @@ def migrate_users(tenant: Tenant, write_db: bool): def migrate_users_for_groups(tenant: Tenant, write_db: bool): """Write users relationship to groups.""" relationships = [] - for group in tenant.group_set.all(): - # Explicitly create relationships for platform default group - user_set = ( - tenant.principal_set.filter(cross_account=False) if group.platform_default else group.principals.all() - ) + for group in tenant.group_set.exclude(platform_default=True): + user_set = group.principals.all() for user in user_set: relationships.append(create_relationship("group", str(group.uuid), "user", str(user.uuid), "member")) + # Explicitly create relationships for platform default group + group_default = tenant.group_set.filter(platform_default=True).first() + if not group_default: # Means it is not custom platform_default + group_default = Tenant.objects.get(tenant_name="public").group_set.get(platform_default=True) + user_set = tenant.principal_set.filter(cross_account=False) + for user in user_set: + relationships.append(create_relationship("group", str(group_default.uuid), "user", str(user.uuid), "member")) output_relationships(relationships, write_db) diff --git a/tests/management/principal/test_cleaner.py b/tests/management/principal/test_cleaner.py index e1fbc805..a2a98046 100644 --- a/tests/management/principal/test_cleaner.py +++ b/tests/management/principal/test_cleaner.py @@ -24,7 +24,8 @@ from management.group.model import Group from management.principal.cleaner import clean_tenant_principals from management.principal.model import Principal -from management.principal.cleaner import clean_principals_via_umb +from management.principal.cleaner import process_principal_events_from_umb +from management.workspace.model import Workspace from api.models import Tenant from tests.identity_request import IdentityRequest @@ -191,31 +192,80 @@ def test_principal_cleanup_principal_error(self, mock_request): b"\n \n \n \n\n" ) +FRAME_BODY_CREATION = ( + b'\n\n ' + b"
\n WEB\n insert\n User\n " + b"660a018a6d336076b5b57fff\n 2024-03-31T20:36:27.820\n
\n " + b"\n \n \n 2024-02-16T02:57:51.738\n " + b"2024-02-21T06:47:24.672\n \n " + b'56780000\n ' + b'17685860\n ' + b'11111111\n ' + b'\n \n Active\n ' + b"\n \n Test\n " + b"Principal\n Mr.\n QE\n " + b"\n principal-test\n \n " + b"\n \n Shakespeare Birthplace Trust\n " + b"\n
\n \n \n" + b"103\n \n " + b'33535807_SITE\n ' + b'\n \n Inactive\n ' + b'\n 100 E. Davie St.\n Raleigh\n ' + b'Wake\n NC\n ' + b"US\n 27601\n
\n " + b'\n \n ' + b'56780000_IPHONE\n ' + b"\n 1234567890\n 1234567890\n " + b'\n \n \n ' + b'56780000_IEMAIL\n ' + b"\n test@email.com\n \n " + b"\n admin:org:all\n \n " + b"\n \n " + b"Customer Portal: System Management\n Y\n " + b"\n \n \n " + b"Customer Portal: Download Software and Updates\n Y\n " + b"\n \n \n " + b"Customer Portal: Manage Subscriptions\n Y\n " + b"\n \n \n " + b"Customer Portal: Manage Support Cases\n Y\n " + b"\n
\n
\n
\n
\n" +) -class PrincipalCleanerUMBTests(IdentityRequest): - """Test the principal cleaner functions.""" + +class PrincipalUMBTests(IdentityRequest): + """Test the principal processor functions.""" def setUp(self): - """Set up the principal cleaner tests.""" + """Set up the principal processor tests.""" super().setUp() + self.principal_name = "principal-test" self.group = Group(name="groupA", tenant=self.tenant) self.group.save() + self.tenant.org_id = "17685860" + self.tenant.save() @patch("management.principal.cleaner.UMB_CLIENT") def test_principal_cleanup_none(self, client_mock): """Test that we can run a principal clean up with no messages.""" client_mock.canRead.return_value = False - clean_principals_via_umb() + process_principal_events_from_umb() client_mock.receiveFrame.assert_not_called() client_mock.disconnect.assert_called_once() + @patch( + "management.principal.proxy.PrincipalProxy._request_principals", + return_value={ + "status_code": status.HTTP_200_OK, + "data": [], + }, + ) @patch("management.group.model.AccessCache") @patch("management.principal.cleaner.UMB_CLIENT") - def test_cleanup_principal_in_or_not_in_group(self, client_mock, cache_class): + def test_cleanup_principal_in_or_not_in_group(self, client_mock, cache_class, proxy_mock): """Test that we can run a principal clean up on a tenant with a principal in a group.""" principal_name = "principal-test" - self.principal = Principal(username=principal_name, tenant=self.tenant) + self.principal = Principal(username=principal_name, tenant=self.tenant, user_id="56780000") self.principal.save() self.group.principals.add(self.principal) self.group.save() @@ -224,7 +274,7 @@ def test_cleanup_principal_in_or_not_in_group(self, client_mock, cache_class): client_mock.receiveFrame.return_value = MagicMock(body=FRAME_BODY) cache_mock = MagicMock() cache_class.return_value = cache_mock - clean_principals_via_umb() + process_principal_events_from_umb() client_mock.receiveFrame.assert_called_once() client_mock.disconnect.assert_called_once() @@ -235,16 +285,23 @@ def test_cleanup_principal_in_or_not_in_group(self, client_mock, cache_class): cache_mock.delete_policy.assert_called_once_with(self.principal.uuid) # When principal not in group - self.principal = Principal(username=principal_name, tenant=self.tenant) + self.principal = Principal(username=principal_name, tenant=self.tenant, user_id="56780000") self.principal.save() client_mock.canRead.side_effect = [True, False] client_mock.ack.reset_mock() - clean_principals_via_umb() + process_principal_events_from_umb() self.assertFalse(Principal.objects.filter(username=principal_name).exists()) client_mock.ack.assert_called_once() + @patch( + "management.principal.proxy.PrincipalProxy.request_filtered_principals", + return_value={ + "status_code": 200, + "data": [], + }, + ) @patch("management.principal.cleaner.UMB_CLIENT") - def test_cleanup_principal_does_not_exist(self, client_mock): + def test_cleanup_principal_does_not_exist(self, client_mock, proxy_mock): """Test that can run a principal clean up with a principal does not exist.""" principal_name = "principal-keep" self.principal = Principal(username=principal_name, tenant=self.tenant) @@ -252,25 +309,67 @@ def test_cleanup_principal_does_not_exist(self, client_mock): client_mock.canRead.side_effect = [True, False] client_mock.receiveFrame.return_value = MagicMock(body=FRAME_BODY) - clean_principals_via_umb() + process_principal_events_from_umb() client_mock.ack.assert_called_once() self.assertTrue(Principal.objects.filter(username=principal_name).exists()) + @patch( + "management.principal.proxy.PrincipalProxy.request_filtered_principals", + return_value={ + "status_code": 200, + "data": [], + }, + ) @patch("management.principal.cleaner.UMB_CLIENT") - def test_cleanup_principal_with_multiple_tenants(self, client_mock): + def test_cleanup_same_principal_name_in_multiple_tenants(self, client_mock, proxy_mock): """Test that can run a principal clean up with a principal that have multiple tenants.""" - another_tenant = Tenant.objects.create(tenant_name="another", account_id="11111", org_id="22222", ready=True) - principal_name = "principal-test" - self.principal = Principal.objects.create(username=principal_name, tenant=self.tenant) - Principal.objects.create(username=principal_name, tenant=another_tenant) - self.assertEqual(Principal.objects.filter(username=principal_name).count(), 2) + another_tenant = Tenant.objects.create( + tenant_name="another", account_id="11111112", org_id="17685861", ready=True + ) + self.principal = Principal.objects.create(username=self.principal_name, user_id="56780000", tenant=self.tenant) + Principal.objects.create(username=self.principal_name, user_id="12340000", tenant=another_tenant) + self.assertEqual(Principal.objects.filter(username=self.principal_name).count(), 2) client_mock.canRead.side_effect = [True, False] client_mock.receiveFrame.return_value = MagicMock(body=FRAME_BODY) - clean_principals_via_umb() + process_principal_events_from_umb() client_mock.receiveFrame.assert_called_once() client_mock.disconnect.assert_called_once() client_mock.ack.assert_called_once() - self.assertFalse(Principal.objects.filter(username=principal_name).exists()) + self.assertFalse(Principal.objects.filter(username=self.principal_name, tenant=self.tenant).exists()) + self.assertTrue(Principal.objects.filter(username=self.principal_name, tenant=another_tenant).exists()) + + @patch( + "management.principal.proxy.PrincipalProxy.request_filtered_principals", + return_value={ + "status_code": 200, + "data": [ + { + "user_id": 56780000, + "org_id": "17685860", + "username": "principal-test", + "email": "test_user@email.com", + "first_name": "user", + "last_name": "test", + "is_org_admin": False, + } + ], + }, + ) + @patch("management.principal.cleaner.UMB_CLIENT") + def test_principal_creation_event(self, client_mock, proxy_mock): + """Test that we can run principal creation event.""" + public_tenant = Tenant.objects.get(tenant_name="public") + Group.objects.create(name="default", platform_default=True, tenant=public_tenant) + client_mock.canRead.side_effect = [True, False] + client_mock.receiveFrame.return_value = MagicMock(body=FRAME_BODY_CREATION) + Tenant.objects.get(org_id="17685860").delete() + process_principal_events_from_umb() + + client_mock.receiveFrame.assert_called_once() + client_mock.disconnect.assert_called_once() + client_mock.ack.assert_called_once() + self.assertTrue(Tenant.objects.filter(org_id="17685860").exists()) + self.assertTrue(Principal.objects.filter(username=self.principal_name).exists()) diff --git a/tests/migration_tool/tests_migrate.py b/tests/migration_tool/tests_migrate.py index fae74507..184ce1a8 100644 --- a/tests/migration_tool/tests_migrate.py +++ b/tests/migration_tool/tests_migrate.py @@ -31,7 +31,8 @@ class MigrateTests(TestCase): def setUp(self): """Set up the utils tests.""" super().setUp() - public_tenant = Tenant.objects.create(tenant_name="public") + public_tenant = Tenant.objects.get(tenant_name="public") + Group.objects.create(name="default", tenant=public_tenant, platform_default=True) # This would be skipped permission1 = Permission.objects.create(permission="app1:hosts:read", tenant=public_tenant) permission2 = Permission.objects.create(permission="inventory:hosts:write", tenant=public_tenant) From 3a336c3dde7e20bcf6b71cb080f5ebb851994301 Mon Sep 17 00:00:00 2001 From: Jay Zeng Date: Tue, 1 Oct 2024 11:33:02 -0400 Subject: [PATCH 2/3] Update principal model --- .../migrations/0051_alter_principal_user_id.py | 18 ++++++++++++++++++ rbac/management/principal/model.py | 2 +- 2 files changed, 19 insertions(+), 1 deletion(-) create mode 100644 rbac/management/migrations/0051_alter_principal_user_id.py diff --git a/rbac/management/migrations/0051_alter_principal_user_id.py b/rbac/management/migrations/0051_alter_principal_user_id.py new file mode 100644 index 00000000..59b83203 --- /dev/null +++ b/rbac/management/migrations/0051_alter_principal_user_id.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.10 on 2024-10-01 15:32 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("management", "0050_principal_user_id_alter_principal_type"), + ] + + operations = [ + migrations.AlterField( + model_name="principal", + name="user_id", + field=models.CharField(db_index=True, max_length=256, null=True), + ), + ] diff --git a/rbac/management/principal/model.py b/rbac/management/principal/model.py index 8d7bfb61..c217b578 100644 --- a/rbac/management/principal/model.py +++ b/rbac/management/principal/model.py @@ -35,7 +35,7 @@ class Types(models.TextChoices): cross_account = models.BooleanField(default=False) type = models.CharField(null=False, default=Types.USER, choices=Types.choices, max_length=20) service_account_id = models.TextField(null=True) - user_id = models.CharField(max_length=36, null=True) + user_id = models.CharField(max_length=256, null=True, db_index=True) class Meta: ordering = ["username"] From 42f974d096cbfbcc06b13348e86053cf585ff6f2 Mon Sep 17 00:00:00 2001 From: Jay Zeng Date: Tue, 1 Oct 2024 11:48:36 -0400 Subject: [PATCH 3/3] Add a flag to disable the UMB JOB processing --- deploy/rbac-clowdapp.yml | 5 +++++ rbac/rbac/celery.py | 11 ++++++----- rbac/rbac/settings.py | 1 + 3 files changed, 12 insertions(+), 5 deletions(-) diff --git a/deploy/rbac-clowdapp.yml b/deploy/rbac-clowdapp.yml index d5e46c35..646e21c0 100644 --- a/deploy/rbac-clowdapp.yml +++ b/deploy/rbac-clowdapp.yml @@ -293,6 +293,8 @@ objects: optional: true - name: PRINCIPAL_CLEANUP_DELETION_ENABLED_UMB value: ${PRINCIPAL_CLEANUP_DELETION_ENABLED_UMB} + - name: UMB_JOB_ENABLED + value: ${UMB_JOB_ENABLED} - name: service minReplicas: ${{MIN_REPLICAS}} @@ -895,6 +897,9 @@ parameters: - name: PRINCIPAL_CLEANUP_DELETION_ENABLED_UMB description: Allow cleanup job to delete principals via messages from UMB value: 'False' +- name: UMB_JOB_ENABLE + description: Temp env to enable the UMB job + value: 'True' - name: UMB_HOST description: Host of the UMB service value: 'localhost' diff --git a/rbac/rbac/celery.py b/rbac/rbac/celery.py index 87a28d36..e55f0f88 100644 --- a/rbac/rbac/celery.py +++ b/rbac/rbac/celery.py @@ -48,11 +48,12 @@ } if settings.PRINCIPAL_CLEANUP_DELETION_ENABLED_UMB: - app.conf.beat_schedule["principal-cleanup-every-minute"] = { - "task": "management.tasks.principal_cleanup_via_umb", - "schedule": 60, # Every 60 second - "args": [], - } + if settings.UMB_JOB_ENABLED: # TODO: This is temp flag, remove it after populating user_id + app.conf.beat_schedule["principal-cleanup-every-minute"] = { + "task": "management.tasks.principal_cleanup_via_umb", + "schedule": 60, # Every 60 second + "args": [], + } else: app.conf.beat_schedule["principal-cleanup-every-sevenish-days"] = { "task": "management.tasks.principal_cleanup", diff --git a/rbac/rbac/settings.py b/rbac/rbac/settings.py index a07e3938..89ce4c0d 100644 --- a/rbac/rbac/settings.py +++ b/rbac/rbac/settings.py @@ -475,6 +475,7 @@ # Settings for enabling/disabling deletion in principal cleanup job via UMB PRINCIPAL_CLEANUP_DELETION_ENABLED_UMB = ENVIRONMENT.bool("PRINCIPAL_CLEANUP_DELETION_ENABLED_UMB", default=False) +UMB_JOB_ENABLED = ENVIRONMENT.bool("UMB_JOB_ENABLED", default=True) UMB_HOST = ENVIRONMENT.get_value("UMB_HOST", default="localhost") UMB_PORT = ENVIRONMENT.get_value("UMB_PORT", default="61612") # Service account name