diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/exclusion_constraint/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/exclusion_constraint/__init__.py
index 0a06ce473..33a6d7f14 100644
--- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/exclusion_constraint/__init__.py
+++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/exclusion_constraint/__init__.py
@@ -462,6 +462,30 @@ class ExclusionConstraintView(PGChildNodeView):
                 ))
         return res
 
+    @staticmethod
+    def parse_input_data(data):
+        for k, v in data.items():
+            try:
+                # comments should be taken as is because if user enters a
+                # json comment it is parsed by loads which should not happen
+                if k in ('comment',):
+                    data[k] = v
+                else:
+                    data[k] = json.loads(v, encoding='utf-8')
+            except (ValueError, TypeError, KeyError):
+                data[k] = v
+
+        return data
+
+    @staticmethod
+    def check_required_args(data, required_args):
+        for arg in required_args:
+            if arg not in data or \
+                    (isinstance(data[arg], list) and len(data[arg]) < 1):
+                return arg
+
+        return None
+
     @check_precondition
     def create(self, gid, sid, did, scid, tid, exid=None):
         """
@@ -480,36 +504,22 @@ class ExclusionConstraintView(PGChildNodeView):
         """
         required_args = ['columns']
 
-        data = request.form if request.form else json.loads(
-            request.data, encoding='utf-8'
-        )
-
-        for k, v in data.items():
-            try:
-                # comments should be taken as is because if user enters a
-                # json comment it is parsed by loads which should not happen
-                if k in ('comment',):
-                    data[k] = v
-                else:
-                    data[k] = json.loads(v, encoding='utf-8')
-            except (ValueError, TypeError, KeyError):
-                data[k] = v
-
-        for arg in required_args:
-            if arg not in data or \
-                    (isinstance(data[arg], list) and len(data[arg]) < 1):
-                return make_json_response(
-                    status=400,
-                    success=0,
-                    errormsg=_(
-                        "Could not find required parameter ({})."
-                    ).format(arg)
-                )
+        data = json.loads(request.data, encoding='utf-8')
+        data = self.parse_input_data(data)
+        arg_missing = self.check_required_args(data, required_args)
+        if arg_missing is not None:
+            return make_json_response(
+                status=400,
+                success=0,
+                errormsg=_(
+                    "Could not find required parameter ({})."
+                ).format(arg_missing)
+            )
 
         data['schema'] = self.schema
         data['table'] = self.table
         try:
-            if 'name' not in data or data['name'] == "":
+            if data.get('name', '') == "":
                 SQL = render_template(
                     "/".join([self.template_path, 'begin.sql']))
                 # Start transaction.
@@ -528,7 +538,7 @@ class ExclusionConstraintView(PGChildNodeView):
                 self.end_transaction()
                 return internal_server_error(errormsg=res)
 
-            if 'name' not in data or data['name'] == "":
+            if data.get('name', '') == "":
                 sql = render_template(
                     "/".join([self.template_path,
                               'get_oid_with_transaction.sql']),
@@ -784,13 +794,8 @@ class ExclusionConstraintView(PGChildNodeView):
 
             columns = []
             for row in res['rows']:
-                if row['options'] & 1:
-                    order = False
-                    nulls_order = True if (row['options'] & 2) else False
-                else:
-                    order = True
-                    nulls_order = True if (row['options'] & 2) else False
-
+                nulls_order = True if (row['options'] & 2) else False
+                order = False if row['options'] & 1 else True
                 columns.append({"column": row['coldef'].strip('"'),
                                 "oper_class": row['opcname'],
                                 "order": order,
@@ -814,7 +819,7 @@ class ExclusionConstraintView(PGChildNodeView):
 
                 data['include'] = [col['colname'] for col in res['rows']]
 
-            if not data['amname'] or data['amname'] == '':
+            if data.get('amname', '') == "":
                 data['amname'] = 'btree'
 
             SQL = render_template(
diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/utils.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/utils.py
index 3fd55868d..af8166681 100644
--- a/web/pgadmin/browser/server_groups/servers/databases/schemas/utils.py
+++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/utils.py
@@ -583,16 +583,11 @@ class VacuumSettings:
             row_name = row['name']
             if type == 'toast':
                 row_name = 'toast_{0}'.format(row['name'])
-            if row_name in result and result[row_name] is not None:
-                if row['column_type'] == 'number':
-                    value = float(result[row_name])
-                    value = int(value) if value % 1 == 0 else value
-                else:
-                    value = int(result[row_name])
-                row['value'] = value
+            if result.get(row_name, None) is not None:
+                value = float(result[row_name])
+                row['value'] = int(value) if value % 1 == 0 else value
             else:
-                if 'value' in row:
-                    row.pop('value')
+                row.pop('value', None)
 
         return vacuum_settings_tmp
 
diff --git a/web/pgadmin/tools/debugger/__init__.py b/web/pgadmin/tools/debugger/__init__.py
index d3f458466..25f177721 100644
--- a/web/pgadmin/tools/debugger/__init__.py
+++ b/web/pgadmin/tools/debugger/__init__.py
@@ -607,6 +607,75 @@ def direct_new(trans_id):
     )
 
 
+def get_debugger_version(conn):
+    debugger_version = 0
+    status, rid = conn.execute_scalar(
+        "SELECT COUNT(*) FROM pg_catalog.pg_proc p"
+        " LEFT JOIN pg_catalog.pg_namespace n ON p.pronamespace = n.oid"
+        " WHERE n.nspname = ANY(current_schemas(false)) AND"
+        " p.proname = 'pldbg_get_proxy_info';"
+    )
+
+    if not status:
+        return False, internal_server_error(errormsg=rid)
+
+    if rid == 0:
+        debugger_version = 1
+
+    status, rid = conn.execute_scalar(
+        "SELECT proxyapiver FROM pldbg_get_proxy_info();")
+
+    if status and rid in (2, 3):
+        debugger_version = rid
+
+    return True, debugger_version
+
+
+def validate_debug(conn, debug_type, is_superuser):
+    if debug_type == 'indirect' and not is_superuser:
+        # If user is super user then we should check debugger library is
+        # loaded or not
+        msg = gettext("You must be a superuser to set a global breakpoint"
+                      " and perform indirect debugging.")
+        return False, internal_server_error(errormsg=msg)
+
+    status, rid_pre = conn.execute_scalar(
+        "SHOW shared_preload_libraries"
+    )
+    if not status:
+        return False, internal_server_error(
+            gettext("Could not fetch debugger plugin information.")
+        )
+
+    # Need to check if plugin is really loaded or not with
+    # "plugin_debugger" string
+    if debug_type == 'indirect' and "plugin_debugger" not in rid_pre:
+        msg = gettext(
+            "The debugger plugin is not enabled. "
+            "Please add the plugin to the shared_preload_libraries "
+            "setting in the postgresql.conf file and restart the "
+            "database server for indirect debugging."
+        )
+        current_app.logger.debug(msg)
+        return False, internal_server_error(msg)
+
+    # Check debugger extension version for EPAS 11 and above.
+    # If it is 1.0 then return error to upgrade the extension.
+    status, ext_version = conn.execute_scalar(
+        "SELECT installed_version FROM pg_catalog.pg_available_extensions "
+        "WHERE name = 'pldbgapi'"
+    )
+    if not status:
+        return False, internal_server_error(errormsg=ext_version)
+    if conn.manager.server_type == 'ppas' and conn.manager.sversion >= 110000 \
+            and float(ext_version) < 1.1:
+        return False, internal_server_error(
+            errormsg=gettext("Please upgrade the pldbgapi extension "
+                             "to 1.1 or above and try again."))
+
+    return True, None
+
+
 @blueprint.route(
     '/initialize_target/<debug_type>/<int:trans_id>/<int:sid>/<int:did>/'
     '<int:scid>/<int:func_id>',
@@ -644,11 +713,8 @@ def initialize_target(debug_type, trans_id, sid, did,
 
     # Create asynchronous connection using random connection id.
     conn_id = str(random.randint(1, 9999999))
-    try:
-        manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid)
-        conn = manager.connection(did=did, conn_id=conn_id)
-    except Exception as e:
-        return internal_server_error(errormsg=str(e))
+    manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid)
+    conn = manager.connection(did=did, conn_id=conn_id)
 
     # Connect the Server
     status, msg = conn.connect()
@@ -656,49 +722,9 @@ def initialize_target(debug_type, trans_id, sid, did,
         return internal_server_error(errormsg=str(msg))
 
     user = manager.user_info
-    if debug_type == 'indirect':
-        # If user is super user then we should check debugger library is
-        # loaded or not
-        if not user['is_superuser']:
-            msg = gettext("You must be a superuser to set a global breakpoint"
-                          " and perform indirect debugging.")
-            return internal_server_error(errormsg=msg)
-        else:
-            status_in, rid_pre = conn.execute_scalar(
-                "SHOW shared_preload_libraries"
-            )
-            if not status_in:
-                return internal_server_error(
-                    gettext("Could not fetch debugger plugin information.")
-                )
-
-            # Need to check if plugin is really loaded or not with
-            # "plugin_debugger" string
-            if "plugin_debugger" not in rid_pre:
-                msg = gettext(
-                    "The debugger plugin is not enabled. "
-                    "Please add the plugin to the shared_preload_libraries "
-                    "setting in the postgresql.conf file and restart the "
-                    "database server for indirect debugging."
-                )
-                current_app.logger.debug(msg)
-                return internal_server_error(msg)
-
-    # Check debugger extension version for EPAS 11 and above.
-    # If it is 1.0 then return error to upgrade the extension.
-    if manager.server_type == 'ppas' and manager.sversion >= 110000:
-        status, ext_version = conn.execute_scalar(
-            "SELECT installed_version FROM pg_catalog.pg_available_extensions "
-            "WHERE name = 'pldbgapi'"
-        )
-
-        if not status:
-            return internal_server_error(errormsg=ext_version)
-        else:
-            if float(ext_version) < 1.1:
-                return internal_server_error(
-                    errormsg=gettext("Please upgrade the pldbgapi extension "
-                                     "to 1.1 or above and try again."))
+    status, error = validate_debug(conn, debug_type, user['is_superuser'])
+    if not status:
+        return error
 
     # Set the template path required to read the sql files
     template_path = 'debugger/sql'
@@ -718,30 +744,10 @@ def initialize_target(debug_type, trans_id, sid, did,
 
         func_id = tr_set['rows'][0]['tgfoid']
 
-    status = True
-
     # Find out the debugger version and store it in session variables
-    status, rid = conn.execute_scalar(
-        "SELECT COUNT(*) FROM pg_catalog.pg_proc p"
-        " LEFT JOIN pg_catalog.pg_namespace n ON p.pronamespace = n.oid"
-        " WHERE n.nspname = ANY(current_schemas(false)) AND"
-        " p.proname = 'pldbg_get_proxy_info';"
-    )
-
+    status, debugger_version = get_debugger_version(conn)
     if not status:
-        return internal_server_error(errormsg=rid)
-    else:
-        if rid == 0:
-            debugger_version = 1
-
-        status, rid = conn.execute_scalar(
-            "SELECT proxyapiver FROM pldbg_get_proxy_info();")
-
-        if status:
-            if rid == 2 or rid == 3:
-                debugger_version = rid
-            else:
-                status = False
+        return debugger_version
 
     # Add the debugger version information to pgadmin4 log file
     current_app.logger.debug("Debugger version is: %d", debugger_version)
@@ -753,9 +759,8 @@ def initialize_target(debug_type, trans_id, sid, did,
     # provide the data from another session so below condition will
     # be be required
     if request.method == 'POST':
-        data = json.loads(request.values['data'], encoding='utf-8')
-        if data:
-            de_inst.function_data['args_value'] = data
+        de_inst.function_data['args_value'] = \
+            json.loads(request.values['data'], encoding='utf-8')
 
     # Update the debugger data session variable
     # Here frame_id is required when user debug the multilevel function.
@@ -1143,51 +1148,43 @@ def execute_debugger_query(trans_id, query_type):
         conn_id=de_inst.debugger_data['exe_conn_id'])
 
     # find the debugger version and execute the query accordingly
-    dbg_version = de_inst.debugger_data['debugger_version']
-    if dbg_version <= 2:
-        template_path = 'debugger/sql/v1'
-    else:
-        template_path = 'debugger/sql/v2'
+    template_path = 'debugger/sql/v1' \
+        if de_inst.debugger_data['debugger_version'] <= 2 \
+        else 'debugger/sql/v2'
 
-    if conn.connected():
-        sql = render_template(
-            "/".join([template_path, query_type + ".sql"]),
-            session_id=de_inst.debugger_data['session_id']
-        )
-        # As the query type is continue or step_into or step_over then we
-        # may get result after some time so poll the result.
-        # We need to update the frame id variable when user move the next
-        # step for debugging.
-        if query_type == 'continue' or query_type == 'step_into' or \
-                query_type == 'step_over':
-            # We should set the frame_id to 0 when execution starts.
-            if de_inst.debugger_data['frame_id'] != 0:
-                de_inst.debugger_data['frame_id'] = 0
-                de_inst.update_session()
+    if not conn.connected():
+        result = gettext('Not connected to server or connection '
+                         'with the server has been closed.')
+        return internal_server_error(errormsg=result)
 
-            status, result = conn.execute_async(sql)
-            if not status:
-                internal_server_error(errormsg=result)
-            return make_json_response(
-                data={'status': status, 'result': result}
-            )
-        elif query_type == 'abort_target':
-            status, result = conn.execute_dict(sql)
-            if not status:
-                return internal_server_error(errormsg=result)
-            else:
-                return make_json_response(
-                    info=gettext('Debugging aborted successfully.'),
-                    data={'status': 'Success', 'result': result}
-                )
-        else:
-            status, result = conn.execute_dict(sql)
+    sql = render_template(
+        "/".join([template_path, query_type + ".sql"]),
+        session_id=de_inst.debugger_data['session_id']
+    )
+    # As the query type is continue or step_into or step_over then we
+    # may get result after some time so poll the result.
+    # We need to update the frame id variable when user move the next
+    # step for debugging.
+    if query_type in ('continue', 'step_into', 'step_over'):
+        # We should set the frame_id to 0 when execution starts.
+        de_inst.debugger_data['frame_id'] = 0
+        de_inst.update_session()
+
+        status, result = conn.execute_async(sql)
         if not status:
             return internal_server_error(errormsg=result)
-    else:
-        result = gettext('Not connected to server or connection '
-                         'with the server has been closed.')
+        return make_json_response(
+            data={'status': status, 'result': result}
+        )
+
+    status, result = conn.execute_dict(sql)
+    if not status:
         return internal_server_error(errormsg=result)
+    if query_type == 'abort_target':
+        return make_json_response(
+            info=gettext('Debugging aborted successfully.'),
+            data={'status': 'Success', 'result': result}
+        )
 
     return make_json_response(
         data={'status': 'Success', 'result': result['rows']}
@@ -1230,7 +1227,8 @@ def messages(trans_id):
     port_number = ''
 
     if conn.connected():
-        status, result = conn.poll()
+        status = 'Busy'
+        _, result = conn.poll()
         notify = conn.messages()
         if notify:
             # In notice message we need to find "PLDBGBREAK" string to find
@@ -1240,19 +1238,12 @@ def messages(trans_id):
             # From the above message we need to find out port number
             # as "7" so below logic will find 7 as port number
             # and attach listened to that port number
-            port_found = False
             tmp_list = list(filter(lambda x: 'PLDBGBREAK' in x, notify))
             if len(tmp_list) > 0:
                 port_number = re.search(r'\d+', tmp_list[0])
                 if port_number is not None:
                     status = 'Success'
                     port_number = port_number.group(0)
-                    port_found = True
-
-            if not port_found:
-                status = 'Busy'
-        else:
-            status = 'Busy'
 
         return make_json_response(
             data={'status': status, 'result': port_number}
