Hi Dave and Hackers,

Please find attached a work-in-progress patch with the following
modifications to the query history:

1- Queries generated by pgAdmin in Save Data operations are now recorded in
query history. This includes transaction control commands such as 'COMMIT,
ROLLBACK, SAVEPOINT, etc.'

2- Queries are now recorded in a correct (mogrified) form after parameters
injection - as opposed to older versions < 4.10. They also appear with the
correct start time (they used to appear with the start time of the
previously executed query - a bug I found).

3- Save Data Queries are visually distinguishable in the query history.

4- Save Data Queries can be shown/hidden from history using a button.

5- Query Tool and View Data now share a common history - this makes more
sense now as data changes can be done from both modes. I had a thought to
remove the Query History from View Data mode entirely, but I thought it
might be useful for some users? I don't know.

I am done with all the functionality code, what is left is the design of
the toggling button/checkbox. For now, I am using an empty button at the
end of the toolbar (next to download button) for experimental purposes.

Do you think a button or a checkbox is more appropriate? If a button, I
would need a design to use. If a checkbox, I am going to need more help as
I am not so good with the design parts. Where should it be placed ( I am
thinking above the list of history entries) ? How should it be styled?

For now, I will start working on tests and documentation updates for this.
Looking forward to your feedback and comments !

P.S I have done a lot of refactoring especially in save_data_changes.py, I
would really appreciate it if someone reviewed these changes.

Thanks!
-- 
*Yosry Muhammad Yosry*

Computer Engineering student,
The Faculty of Engineering,
Cairo University (2021).
Class representative of CMP 2021.
https://www.linkedin.com/in/yosrym93/
diff --git a/web/pgadmin/static/js/sqleditor/history/history_collection.js b/web/pgadmin/static/js/sqleditor/history/history_collection.js
index a1654fdc..f8380598 100644
--- a/web/pgadmin/static/js/sqleditor/history/history_collection.js
+++ b/web/pgadmin/static/js/sqleditor/history/history_collection.js
@@ -30,6 +30,10 @@ export default class HistoryCollection {
     this.onResetHandler(this.historyList);
   }
 
+  toggleSaveDataEntries() {
+    this.toggleSaveDataEntriesHandler();
+  }
+
   onAdd(onAddHandler) {
     this.onAddHandler = onAddHandler;
   }
@@ -37,4 +41,8 @@ export default class HistoryCollection {
   onReset(onResetHandler) {
     this.onResetHandler = onResetHandler;
   }
+
+  onToggleSaveDataEntries(toggleSaveDataEntriesHandler) {
+    this.toggleSaveDataEntriesHandler = toggleSaveDataEntriesHandler;
+  }
 }
diff --git a/web/pgadmin/static/js/sqleditor/history/query_history.js b/web/pgadmin/static/js/sqleditor/history/query_history.js
index d0091596..525cfc39 100644
--- a/web/pgadmin/static/js/sqleditor/history/query_history.js
+++ b/web/pgadmin/static/js/sqleditor/history/query_history.js
@@ -13,6 +13,7 @@ export default class QueryHistory {
     this.onCopyToEditorHandler = ()=>{};
     this.histCollection.onAdd(this.onAddEntry.bind(this));
     this.histCollection.onReset(this.onResetEntries.bind(this));
+    this.histCollection.onToggleSaveDataEntries(this.onToggleSaveDataEntries.bind(this));
   }
 
   focus() {
@@ -54,6 +55,10 @@ export default class QueryHistory {
     }
   }
 
+  onToggleSaveDataEntries() {
+    this.queryHistEntries.toggleSaveDataEntries();
+  }
+
   render() {
     if (this.histCollection.length() == 0) {
       this.parentNode.empty()
diff --git a/web/pgadmin/static/js/sqleditor/history/query_history_details.js b/web/pgadmin/static/js/sqleditor/history/query_history_details.js
index afd75101..626e89b5 100644
--- a/web/pgadmin/static/js/sqleditor/history/query_history_details.js
+++ b/web/pgadmin/static/js/sqleditor/history/query_history_details.js
@@ -96,10 +96,13 @@ export default class QueryHistoryDetails {
 
   updateQueryMetaData() {
     let itemTemplate = (data, description) => {
-      return `<div class='item'>
-                <span class='value'>${data}</span>
-                <span class='description'>${description}</span>
-            </div>`;
+      if(data)
+        return `<div class='item'>
+                  <span class='value'>${data}</span>
+                  <span class='description'>${description}</span>
+              </div>`;
+      else
+        return '';
     };
 
     this.$metaData.empty().append(
@@ -134,8 +137,23 @@ export default class QueryHistoryDetails {
     }
   }
 
+  updateInfoMessage() {
+    if (this.entry.info) {
+      this.$infoMsgBlock.removeClass('d-none');
+      this.$infoMsgBlock.empty().append(
+        `<div class='history-info-text'>
+            ${this.entry.info}
+        </div>`
+      );
+    } else {
+      this.$infoMsgBlock.addClass('d-none');
+      this.$infoMsgBlock.empty();
+    }
+  }
+
   selectiveRender() {
     this.updateErrorMessage();
+    this.updateInfoMessage();
     this.updateCopyButton(false);
     this.updateQueryMetaData();
     this.query_codemirror.setValue(this.entry.query);
@@ -147,6 +165,7 @@ export default class QueryHistoryDetails {
       this.parentNode.empty().append(
         `<div id='query_detail' class='query-detail'>
             <div class='error-message-block'></div>
+            <div class='info-message-block'></div>
             <div class='metadata-block'></div>
             <div class='query-statement-block'>
               <div id='history-detail-query'>
@@ -168,6 +187,7 @@ export default class QueryHistoryDetails {
       );
 
       this.$errMsgBlock = this.parentNode.find('.error-message-block');
+      this.$infoMsgBlock = this.parentNode.find('.info-message-block');
       this.$copyBtn = this.parentNode.find('#history-detail-query .btn-copy');
       this.$copyBtn.off('click').on('click', this.copyAllHandler.bind(this));
       this.$copyToEditor = this.parentNode.find('#history-detail-query .btn-copy-editor');
diff --git a/web/pgadmin/static/js/sqleditor/history/query_history_entries.js b/web/pgadmin/static/js/sqleditor/history/query_history_entries.js
index 51c7847e..9707ae0c 100644
--- a/web/pgadmin/static/js/sqleditor/history/query_history_entries.js
+++ b/web/pgadmin/static/js/sqleditor/history/query_history_entries.js
@@ -69,7 +69,9 @@ export class QueryHistoryItem {
     this.$el = $(
       `<li class='list-item' tabindex='0' data-key='${this.dataKey()}'>
           <div class='entry ${this.entry.status ? '' : 'error'}'>
-              <div class='query'>${_.escape(this.entry.query)}</div>
+              <div class='query'>
+                  ${_.escape(this.entry.query)}
+              </div>
               <div class='other-info'>
               <div class='timestamp'>${this.formatDate(this.entry.start_time)}</div>
               </div>
@@ -80,6 +82,11 @@ export class QueryHistoryItem {
       .on('click', e => {
         this.onClickHandler($(e.currentTarget));
       });
+
+    if(this.entry.is_save_data_query) {
+      this.$el.find('.entry').addClass('save-data-query');
+      this.$el.find('.query').prepend('<i class="icon-save-data-changes sql-icon-lg"></i>');
+    }
   }
 }
 
@@ -221,6 +228,12 @@ export class QueryHistoryEntries {
     this.setSelectedListItem(newItem.$el);
   }
 
+  toggleSaveDataEntries() {
+    this.$el.find('.save-data-query').each(function() {
+      $(this).toggle();
+    });
+  }
+
   render() {
     let self = this;
     self.$el = $(`
diff --git a/web/pgadmin/tools/datagrid/templates/datagrid/index.html b/web/pgadmin/tools/datagrid/templates/datagrid/index.html
index b1be514d..a69305c5 100644
--- a/web/pgadmin/tools/datagrid/templates/datagrid/index.html
+++ b/web/pgadmin/tools/datagrid/templates/datagrid/index.html
@@ -348,6 +348,13 @@
                     <i class="fa fa-download sql-icon-lg" aria-hidden="true"></i>
                 </button>
             </div>
+            <div class="btn-group" role="group" aria-label="">
+                <button id="btn-history-toggle" type="button" class="btn btn-sm btn-secondary"
+                        title=""
+                        tabindex="0">
+                    <i class="fa sql-icon-lg" aria-hidden="true"></i>
+                </button>
+            </div>
         </div>
 
         <div class="connection_status_wrapper d-flex">
diff --git a/web/pgadmin/tools/sqleditor/__init__.py b/web/pgadmin/tools/sqleditor/__init__.py
index 5091c11d..b862f62c 100644
--- a/web/pgadmin/tools/sqleditor/__init__.py
+++ b/web/pgadmin/tools/sqleditor/__init__.py
@@ -696,7 +696,7 @@ def generate_client_primary_key_name(columns_info):
 @login_required
 def save(trans_id):
     """
-    This method is used to save the changes to the server
+    This method is used to save the data changes to the server
 
     Args:
         trans_id: unique transaction id
@@ -746,7 +746,7 @@ def save(trans_id):
                 return make_json_response(
                     data={'status': status, 'result': u"{}".format(msg)}
                 )
-        status, res, query_res, _rowid = trans_obj.save(
+        status, res, query_results, _rowid = trans_obj.save(
             changed_data,
             session_obj['columns_info'],
             session_obj['client_primary_key'],
@@ -754,7 +754,7 @@ def save(trans_id):
     else:
         status = False
         res = error_msg
-        query_res = None
+        query_results = None
         _rowid = None
 
     transaction_status = conn.transaction_status()
@@ -763,7 +763,7 @@ def save(trans_id):
         data={
             'status': status,
             'result': res,
-            'query_result': query_res,
+            'query_results': query_results,
             '_rowid': _rowid,
             'transaction_status': transaction_status
         }
diff --git a/web/pgadmin/tools/sqleditor/static/js/sqleditor.js b/web/pgadmin/tools/sqleditor/static/js/sqleditor.js
index 4e1302d7..8c1f57f1 100644
--- a/web/pgadmin/tools/sqleditor/static/js/sqleditor.js
+++ b/web/pgadmin/tools/sqleditor/static/js/sqleditor.js
@@ -112,6 +112,7 @@ define('tools.querytool', [
       'click #btn-flash-menu': 'on_flash',
       'click #btn-cancel-query': 'on_cancel_query',
       'click #btn-download': 'on_download',
+      'click #btn-history-toggle': 'on_history_toggle',
       'click #btn-clear': 'on_clear',
       'click #btn-auto-commit': 'on_auto_commit',
       'click #btn-auto-rollback': 'on_auto_rollback',
@@ -1361,26 +1362,26 @@ define('tools.querytool', [
         });
       }
 
-      // Make ajax call to get history data except view/edit data
-      if(self.handler.is_query_tool) {
-        $.ajax({
-          url: url_for('sqleditor.get_query_history', {
-            'trans_id': self.handler.transId,
-          }),
-          method: 'GET',
-          contentType: 'application/json',
-        })
-          .done(function(res) {
-            res.data.result.map((entry) => {
-              let newEntry = JSON.parse(entry);
-              newEntry.start_time = new Date(newEntry.start_time);
-              self.history_collection.add(newEntry);
-            });
-          })
-          .fail(function() {
-          /* history fetch fail should not affect query tool */
+      // Make ajax call to get history data
+      $.ajax({
+        url: url_for('sqleditor.get_query_history', {
+          'trans_id': self.handler.transId,
+        }),
+        method: 'GET',
+        contentType: 'application/json',
+      })
+        .done(function(res) {
+          res.data.result.map((entry) => {
+            let newEntry = JSON.parse(entry);
+            newEntry.start_time = new Date(newEntry.start_time);
+            self.history_collection.add(newEntry);
           });
-      } else {
+        })
+        .fail(function() {
+        /* history fetch fail should not affect query tool */
+        });
+
+      if(!self.is_query_tool) {
         self.historyComponent.setEditorPref({'copy_to_editor':false});
       }
     },
@@ -1886,6 +1887,16 @@ define('tools.querytool', [
       queryToolActions.download(this.handler);
     },
 
+    on_history_toggle: function() {
+      var self = this;
+
+      self.handler.trigger(
+        'pgadmin-sqleditor:button:toggle-history',
+        self,
+        self.handler
+      );
+    },
+
     keyAction: function(event) {
       var panel_type='';
 
@@ -2248,6 +2259,7 @@ define('tools.querytool', [
         self.on('pgadmin-sqleditor:button:explain-timing', self._explain_timing, self);
         self.on('pgadmin-sqleditor:button:explain-summary', self._explain_summary, self);
         self.on('pgadmin-sqleditor:button:explain-settings', self._explain_settings, self);
+        self.on('pgadmin-sqleditor:button:toggle-history', self.history_toggle, self);
         // Indentation related
         self.on('pgadmin-sqleditor:indent_selected_code', self._indent_selected_code, self);
         self.on('pgadmin-sqleditor:unindent_selected_code', self._unindent_selected_code, self);
@@ -2711,36 +2723,38 @@ define('tools.querytool', [
               new Date());
           }
 
-          let hist_entry = {
+          let history_entry = {
             'status': status,
             'start_time': self.query_start_time,
             'query': self.query,
             'row_affected': self.rows_affected,
             'total_time': self.total_time,
             'message': msg,
+            'is_save_data_query': false,
           };
 
-          /* Make ajax call to save the history data
-           * Do not bother query tool if failed to save
-           * Not applicable for view/edit data
-           */
-          if(self.is_query_tool) {
-            $.ajax({
-              url: url_for('sqleditor.add_query_history', {
-                'trans_id': self.transId,
-              }),
-              method: 'POST',
-              contentType: 'application/json',
-              data: JSON.stringify(hist_entry),
-            })
-              .done(function() {})
-              .fail(function() {});
-          }
-
-          self.gridView.history_collection.add(hist_entry);
+          self.add_to_history(history_entry);
         }
       },
 
+      /* Make ajax call to save the history data */
+      add_to_history: function(history_entry) {
+        var self = this;
+
+        $.ajax({
+          url: url_for('sqleditor.add_query_history', {
+            'trans_id': self.transId,
+          }),
+          method: 'POST',
+          contentType: 'application/json',
+          data: JSON.stringify(history_entry),
+        })
+          .done(function() {})
+          .fail(function() {});
+
+        self.gridView.history_collection.add(history_entry);
+      },
+
       /* This function is used to check whether cell
        * is editable or not depending on primary keys
        * and staged_rows flag
@@ -2901,7 +2915,7 @@ define('tools.querytool', [
         var req_data = self.data_store, view = self.gridView;
         req_data.columns = view ? view.handler.columns : self.columns;
 
-        var save_successful = false;
+        var save_successful = false, save_start_time = new Date();
 
         // Make ajax call to save the data
         $.ajax({
@@ -2950,7 +2964,7 @@ define('tools.querytool', [
               if(is_added) {
               // Update the rows in a grid after addition
                 dataView.beginUpdate();
-                _.each(res.data.query_result, function(r) {
+                _.each(res.data.query_results, function(r) {
                   if (!_.isNull(r.row_added)) {
                   // Fetch temp_id returned by server after addition
                     var row_id = Object.keys(r.row_added)[0];
@@ -3044,6 +3058,22 @@ define('tools.querytool', [
               grid.gotoCell(_row_index, 1);
             }
 
+            var query_history_info_msg = gettext('This query was generated by pgAdmin as part of a "Save Data" operation');
+
+            _.each(res.data.query_results, function(r) {
+              var history_entry = {
+                'status': r.status,
+                'start_time': save_start_time,
+                'query': r.sql,
+                'row_affected': r.rows_affected,
+                'total_time': null,
+                'message': r.result,
+                'is_save_data_query': true,
+                'info': query_history_info_msg,
+              };
+              self.add_to_history(history_entry);
+            });
+
             self.trigger('pgadmin-sqleditor:loading-icon:hide');
 
             grid.invalidate();
@@ -3636,7 +3666,6 @@ define('tools.querytool', [
           mode_disabled = true;
         }
 
-        $('#btn-clear-dropdown').prop('disabled', mode_disabled);
         $('#btn-explain').prop('disabled', mode_disabled);
         $('#btn-explain-analyze').prop('disabled', mode_disabled);
         $('#btn-explain-options-dropdown').prop('disabled', mode_disabled);
@@ -4040,6 +4069,10 @@ define('tools.querytool', [
         is_query_running = value;
       },
 
+      history_toggle: function() {
+        this.gridView.history_collection.toggleSaveDataEntries();
+      },
+
       /* Checks if there is any unsaved data changes, unsaved changes in the query
       or uncommitted transactions before closing a panel */
       check_needed_confirmations_before_closing_panel: function(is_close_event_call = false) {
diff --git a/web/pgadmin/tools/sqleditor/static/scss/_history.scss b/web/pgadmin/tools/sqleditor/static/scss/_history.scss
index 37ed8b0e..9005b83a 100644
--- a/web/pgadmin/tools/sqleditor/static/scss/_history.scss
+++ b/web/pgadmin/tools/sqleditor/static/scss/_history.scss
@@ -111,6 +111,17 @@
     }
   }
 
+  .info-message-block {
+    background: $sql-history-detail-bg;
+    flex: 0.3;
+    padding-left: 20px;
+
+    .history-info-text {
+      @extend .text-12;
+      padding: 7px 0;
+    }
+  }
+
   .metadata-block {
     flex: 0.4;
     padding: 10px 20px;
diff --git a/web/pgadmin/tools/sqleditor/utils/save_changed_data.py b/web/pgadmin/tools/sqleditor/utils/save_changed_data.py
index 935d6591..12df12e4 100644
--- a/web/pgadmin/tools/sqleditor/utils/save_changed_data.py
+++ b/web/pgadmin/tools/sqleditor/utils/save_changed_data.py
@@ -32,9 +32,7 @@ def save_changed_data(changed_data, columns_info, conn, command_obj,
     """
     status = False
     res = None
-    query_res = dict()
-    count = 0
-    list_of_rowid = []
+    query_results = []
     operations = ('added', 'updated', 'deleted')
     list_of_sql = {}
     _rowid = None
@@ -44,267 +42,279 @@ def save_changed_data(changed_data, columns_info, conn, command_obj,
         for col_name, col_info in columns_info.items()
     }
 
-    if conn.connected():
-        is_savepoint = False
-        # Start the transaction if the session is idle
-        if conn.transaction_status() == TX_STATUS_IDLE:
-            conn.execute_void('BEGIN;')
-        else:
-            conn.execute_void('SAVEPOINT save_data;')
-            is_savepoint = True
-
-        # Iterate total number of records to be updated/inserted
-        for of_type in changed_data:
-            # No need to go further if its not add/update/delete operation
-            if of_type not in operations:
-                continue
-            # if no data to be save then continue
-            if len(changed_data[of_type]) < 1:
-                continue
-
-            column_type = {}
-            column_data = {}
-            for each_col in columns_info:
-                if (
-                    columns_info[each_col]['not_null'] and
-                    not columns_info[each_col]['has_default_val']
-                ):
-                    column_data[each_col] = None
-                    column_type[each_col] = \
-                        columns_info[each_col]['type_name']
-                else:
-                    column_type[each_col] = \
-                        columns_info[each_col]['type_name']
-
-            # For newly added rows
-            if of_type == 'added':
-                # Python dict does not honour the inserted item order
-                # So to insert data in the order, we need to make ordered
-                # list of added index We don't need this mechanism in
-                # updated/deleted rows as it does not matter in
-                # those operations
-                added_index = OrderedDict(
-                    sorted(
-                        changed_data['added_index'].items(),
-                        key=lambda x: int(x[0])
-                    )
+    is_savepoint = False
+    # Start the transaction if the session is idle
+    if conn.transaction_status() == TX_STATUS_IDLE:
+        sql = 'BEGIN;'
+    else:
+        sql = 'SAVEPOINT save_data;'
+        is_savepoint = True
+
+    status, res = execute_void_wrapper(conn, sql, query_results)
+    if not status:
+        return status, res, query_results, None
+
+    # Iterate total number of records to be updated/inserted
+    for of_type in changed_data:
+        # No need to go further if its not add/update/delete operation
+        if of_type not in operations:
+            continue
+        # if no data to be save then continue
+        if len(changed_data[of_type]) < 1:
+            continue
+
+        column_type = {}
+        column_data = {}
+        for each_col in columns_info:
+            if (
+                columns_info[each_col]['not_null'] and
+                not columns_info[each_col]['has_default_val']
+            ):
+                column_data[each_col] = None
+                column_type[each_col] = \
+                    columns_info[each_col]['type_name']
+            else:
+                column_type[each_col] = \
+                    columns_info[each_col]['type_name']
+
+        # For newly added rows
+        if of_type == 'added':
+            # Python dict does not honour the inserted item order
+            # So to insert data in the order, we need to make ordered
+            # list of added index We don't need this mechanism in
+            # updated/deleted rows as it does not matter in
+            # those operations
+            added_index = OrderedDict(
+                sorted(
+                    changed_data['added_index'].items(),
+                    key=lambda x: int(x[0])
                 )
-                list_of_sql[of_type] = []
+            )
+            list_of_sql[of_type] = []
 
-                # When new rows are added, only changed columns data is
-                # sent from client side. But if column is not_null and has
-                # no_default_value, set column to blank, instead
-                # of not null which is set by default.
-                column_data = {}
-                pk_names, primary_keys = command_obj.get_primary_keys()
-                has_oids = 'oid' in column_type
-
-                for each_row in added_index:
-                    # Get the row index to match with the added rows
-                    # dict key
-                    tmp_row_index = added_index[each_row]
-                    data = changed_data[of_type][tmp_row_index]['data']
-                    # Remove our unique tracking key
-                    data.pop(client_primary_key, None)
-                    data.pop('is_row_copied', None)
-                    list_of_rowid.append(data.get(client_primary_key))
-
-                    # Update columns value with columns having
-                    # not_null=False and has no default value
-                    column_data.update(data)
-
-                    sql = render_template(
-                        "/".join([command_obj.sql_path, 'insert.sql']),
-                        data_to_be_saved=column_data,
-                        pgadmin_alias=pgadmin_alias,
-                        primary_keys=None,
-                        object_name=command_obj.object_name,
-                        nsp_name=command_obj.nsp_name,
-                        data_type=column_type,
-                        pk_names=pk_names,
-                        has_oids=has_oids
-                    )
+            # When new rows are added, only changed columns data is
+            # sent from client side. But if column is not_null and has
+            # no_default_value, set column to blank, instead
+            # of not null which is set by default.
+            column_data = {}
+            pk_names, primary_keys = command_obj.get_primary_keys()
+            has_oids = 'oid' in column_type
+
+            for each_row in added_index:
+                # Get the row index to match with the added rows
+                # dict key
+                tmp_row_index = added_index[each_row]
+                data = changed_data[of_type][tmp_row_index]['data']
+                # Remove our unique tracking key
+                data.pop(client_primary_key, None)
+                data.pop('is_row_copied', None)
+
+                # Update columns value with columns having
+                # not_null=False and has no default value
+                column_data.update(data)
 
-                    select_sql = render_template(
-                        "/".join([command_obj.sql_path, 'select.sql']),
-                        object_name=command_obj.object_name,
-                        nsp_name=command_obj.nsp_name,
-                        primary_keys=primary_keys,
-                        has_oids=has_oids
-                    )
+                sql = render_template(
+                    "/".join([command_obj.sql_path, 'insert.sql']),
+                    data_to_be_saved=column_data,
+                    pgadmin_alias=pgadmin_alias,
+                    primary_keys=None,
+                    object_name=command_obj.object_name,
+                    nsp_name=command_obj.nsp_name,
+                    data_type=column_type,
+                    pk_names=pk_names,
+                    has_oids=has_oids
+                )
 
-                    list_of_sql[of_type].append({
-                        'sql': sql, 'data': data,
-                        'client_row': tmp_row_index,
-                        'select_sql': select_sql
-                    })
-                    # Reset column data
-                    column_data = {}
-
-            # For updated rows
-            elif of_type == 'updated':
-                list_of_sql[of_type] = []
-                for each_row in changed_data[of_type]:
-                    data = changed_data[of_type][each_row]['data']
-                    pk_escaped = {
-                        pk: pk_val.replace('%', '%%') if hasattr(
-                            pk_val, 'replace') else pk_val
-                        for pk, pk_val in
-                        changed_data[of_type][each_row]['primary_keys'].items()
-                    }
-                    sql = render_template(
-                        "/".join([command_obj.sql_path, 'update.sql']),
-                        data_to_be_saved=data,
-                        pgadmin_alias=pgadmin_alias,
-                        primary_keys=pk_escaped,
-                        object_name=command_obj.object_name,
-                        nsp_name=command_obj.nsp_name,
-                        data_type=column_type
-                    )
-                    list_of_sql[of_type].append({'sql': sql, 'data': data})
-                    list_of_rowid.append(data.get(client_primary_key))
-
-            # For deleted rows
-            elif of_type == 'deleted':
-                list_of_sql[of_type] = []
-                is_first = True
-                rows_to_delete = []
-                keys = None
-                no_of_keys = None
-                for each_row in changed_data[of_type]:
-                    rows_to_delete.append(changed_data[of_type][each_row])
-                    # Fetch the keys for SQL generation
-                    if is_first:
-                        # We need to covert dict_keys to normal list in
-                        # Python3
-                        # In Python2, it's already a list & We will also
-                        # fetch column names using index
-                        keys = list(
-                            changed_data[of_type][each_row].keys()
-                        )
-                        no_of_keys = len(keys)
-                        is_first = False
-                # Map index with column name for each row
-                for row in rows_to_delete:
-                    for k, v in row.items():
-                        # Set primary key with label & delete index based
-                        # mapped key
-                        try:
-                            row[changed_data['columns']
-                                            [int(k)]['name']] = v
-                        except ValueError:
-                            continue
-                        del row[k]
+                select_sql = render_template(
+                    "/".join([command_obj.sql_path, 'select.sql']),
+                    object_name=command_obj.object_name,
+                    nsp_name=command_obj.nsp_name,
+                    primary_keys=primary_keys,
+                    has_oids=has_oids
+                )
+
+                list_of_sql[of_type].append({
+                    'sql': sql, 'data': data,
+                    'client_row': tmp_row_index,
+                    'select_sql': select_sql,
+                    'row_id': data.get(client_primary_key)
+                })
+                # Reset column data
+                column_data = {}
 
+        # For updated rows
+        elif of_type == 'updated':
+            list_of_sql[of_type] = []
+            for each_row in changed_data[of_type]:
+                data = changed_data[of_type][each_row]['data']
+                pk_escaped = {
+                    pk: pk_val.replace('%', '%%') if hasattr(
+                        pk_val, 'replace') else pk_val
+                    for pk, pk_val in
+                    changed_data[of_type][each_row]['primary_keys'].items()
+                }
                 sql = render_template(
-                    "/".join([command_obj.sql_path, 'delete.sql']),
-                    data=rows_to_delete,
-                    primary_key_labels=keys,
-                    no_of_keys=no_of_keys,
+                    "/".join([command_obj.sql_path, 'update.sql']),
+                    data_to_be_saved=data,
+                    pgadmin_alias=pgadmin_alias,
+                    primary_keys=pk_escaped,
                     object_name=command_obj.object_name,
-                    nsp_name=command_obj.nsp_name
+                    nsp_name=command_obj.nsp_name,
+                    data_type=column_type
                 )
-                list_of_sql[of_type].append({'sql': sql, 'data': {}})
-
-        for opr, sqls in list_of_sql.items():
-            for item in sqls:
-                if item['sql']:
-                    item['data'] = {
-                        pgadmin_alias[k] if k in pgadmin_alias else k: v
-                        for k, v in item['data'].items()
-                    }
-
-                    row_added = None
-
-                    def failure_handle(res):
-                        if is_savepoint:
-                            conn.execute_void('ROLLBACK TO SAVEPOINT '
-                                              'save_data;')
-                            msg = 'Query ROLLBACK, but the current ' \
-                                  'transaction is still ongoing.'
-                        else:
-                            conn.execute_void('ROLLBACK;')
-                            msg = 'Transaction ROLLBACK'
-                        # If we roll backed every thing then update the
-                        # message for each sql query.
-                        for val in query_res:
-                            if query_res[val]['status']:
-                                query_res[val]['result'] = msg
-
-                        # If list is empty set rowid to 1
-                        try:
-                            if list_of_rowid:
-                                _rowid = list_of_rowid[count]
-                            else:
-                                _rowid = 1
-                        except Exception:
-                            _rowid = 0
-
-                        return status, res, query_res, _rowid
-
+                list_of_sql[of_type].append({'sql': sql,
+                                             'data': data,
+                                             'row_id':
+                                                 data.get(client_primary_key)})
+
+        # For deleted rows
+        elif of_type == 'deleted':
+            list_of_sql[of_type] = []
+            is_first = True
+            rows_to_delete = []
+            keys = None
+            no_of_keys = None
+            for each_row in changed_data[of_type]:
+                rows_to_delete.append(changed_data[of_type][each_row])
+                # Fetch the keys for SQL generation
+                if is_first:
+                    # We need to covert dict_keys to normal list in
+                    # Python3
+                    # In Python2, it's already a list & We will also
+                    # fetch column names using index
+                    keys = list(
+                        changed_data[of_type][each_row].keys()
+                    )
+                    no_of_keys = len(keys)
+                    is_first = False
+            # Map index with column name for each row
+            for row in rows_to_delete:
+                for k, v in row.items():
+                    # Set primary key with label & delete index based
+                    # mapped key
                     try:
-                        # Fetch oids/primary keys
-                        if 'select_sql' in item and item['select_sql']:
-                            status, res = conn.execute_dict(
-                                item['sql'], item['data'])
-                        else:
-                            status, res = conn.execute_void(
-                                item['sql'], item['data'])
-                    except Exception as _:
-                        failure_handle(res)
-                        raise
+                        row[changed_data['columns']
+                                        [int(k)]['name']] = v
+                    except ValueError:
+                        continue
+                    del row[k]
+
+            sql = render_template(
+                "/".join([command_obj.sql_path, 'delete.sql']),
+                data=rows_to_delete,
+                primary_key_labels=keys,
+                no_of_keys=no_of_keys,
+                object_name=command_obj.object_name,
+                nsp_name=command_obj.nsp_name
+            )
+            list_of_sql[of_type].append({'sql': sql, 'data': {}})
+
+    def failure_handle(res, row_id):
+        mogrified_sql = conn.mogrify(item['sql'], item['data'])
+        mogrified_sql = mogrified_sql if mogrified_sql is not None \
+            else item['sql']
+        query_results.append({
+            'status': False,
+            'result': res,
+            'sql': mogrified_sql,
+            'rows_affected': 0,
+            'row_added': None
+        })
+
+        if is_savepoint:
+            sql = 'ROLLBACK TO SAVEPOINT save_data;'
+            msg = 'A ROLLBACK was done for the save operation only. ' \
+                  'The active transaction is not affected.'
+        else:
+            sql = 'ROLLBACK;'
+            msg = 'A ROLLBACK was done for the save transaction.'
+
+        rollback_status, rollback_result = \
+            execute_void_wrapper(conn, sql, query_results)
+        if not rollback_status:
+            return rollback_status, rollback_result, query_results, None
+
+        # If we roll backed every thing then update the
+        # message for each sql query.
+        for query in query_results:
+            if query['status']:
+                query['result'] = msg
+
+        return False, res, query_results, row_id
+
+    for opr, sqls in list_of_sql.items():
+        for item in sqls:
+            if item['sql']:
+                item['data'] = {
+                    pgadmin_alias[k] if k in pgadmin_alias else k: v
+                    for k, v in item['data'].items()
+                }
+
+                row_added = None
+
+                try:
+                    # Fetch oids/primary keys
+                    if 'select_sql' in item and item['select_sql']:
+                        status, res = conn.execute_dict(
+                            item['sql'], item['data'])
+                    else:
+                        status, res = conn.execute_void(
+                            item['sql'], item['data'])
+                except Exception as _:
+                    failure_handle(res, item.get('row_id', 0))
+                    raise
+
+                if not status:
+                    return failure_handle(res, item.get('row_id', 0))
+
+                # Select added row from the table
+                if 'select_sql' in item:
+                    status, sel_res = conn.execute_dict(
+                        item['select_sql'], res['rows'][0])
 
                     if not status:
-                        return failure_handle(res)
-
-                    # Select added row from the table
-                    if 'select_sql' in item:
-                        status, sel_res = conn.execute_dict(
-                            item['select_sql'], res['rows'][0])
-
-                        if not status:
-                            if is_savepoint:
-                                conn.execute_void('ROLLBACK TO SAVEPOINT'
-                                                  ' save_data;')
-                                msg = 'Query ROLLBACK, the current' \
-                                      ' transaction is still ongoing.'
-                            else:
-                                conn.execute_void('ROLLBACK;')
-                                msg = 'Transaction ROLLBACK'
-                            # If we roll backed every thing then update
-                            # the message for each sql query.
-                            for val in query_res:
-                                if query_res[val]['status']:
-                                    query_res[val]['result'] = msg
-
-                            # If list is empty set rowid to 1
-                            try:
-                                if list_of_rowid:
-                                    _rowid = list_of_rowid[count]
-                                else:
-                                    _rowid = 1
-                            except Exception:
-                                _rowid = 0
-
-                            return status, sel_res, query_res, _rowid
-
-                        if 'rows' in sel_res and len(sel_res['rows']) > 0:
-                            row_added = {
-                                item['client_row']: sel_res['rows'][0]}
-
-                    rows_affected = conn.rows_affected()
-                    # store the result of each query in dictionary
-                    query_res[count] = {
-                        'status': status,
-                        'result': None if row_added else res,
-                        'sql': item['sql'], 'rows_affected': rows_affected,
-                        'row_added': row_added
-                    }
-
-                    count += 1
-
-        # Commit the transaction if no error is found & autocommit is activated
-        if auto_commit:
-            conn.execute_void('COMMIT;')
-
-    return status, res, query_res, _rowid
+                        return failure_handle(sel_res, item.get('row_id', 0))
+
+                    if 'rows' in sel_res and len(sel_res['rows']) > 0:
+                        row_added = {
+                            item['client_row']: sel_res['rows'][0]}
+
+                rows_affected = conn.rows_affected()
+                mogrified_sql = conn.mogrify(item['sql'], item['data'])
+                mogrified_sql = mogrified_sql if mogrified_sql is not None \
+                    else item['sql']
+                # store the result of each query in dictionary
+                query_results.append({
+                    'status': status,
+                    'result': None if row_added else res,
+                    'sql': mogrified_sql,
+                    'rows_affected': rows_affected,
+                    'row_added': row_added
+                })
+
+    # Commit the transaction if no error is found & autocommit is activated
+    if auto_commit:
+        sql = 'COMMIT;'
+        status, res = execute_void_wrapper(conn, sql, query_results)
+        if not status:
+            return status, res, query_results, None
+
+    return status, res, query_results, _rowid
+
+
+def execute_void_wrapper(conn, sql, query_results):
+    """
+    Executes a sql query with no return and adds it to query_results
+    :param sql: Sql query
+    :param query_results: A list of query results in the save operation
+    :return: status, result
+    """
+    status, res = conn.execute_void(sql)
+    if status:
+        query_results.append({
+            'status': status,
+            'result': res,
+            'sql': sql, 'rows_affected': 0,
+            'row_added': None
+        })
+    return status, res
diff --git a/web/pgadmin/tools/sqleditor/utils/tests/test_save_changed_data.py b/web/pgadmin/tools/sqleditor/utils/tests/test_save_changed_data.py
index 8a4a0bd8..f25f2247 100644
--- a/web/pgadmin/tools/sqleditor/utils/tests/test_save_changed_data.py
+++ b/web/pgadmin/tools/sqleditor/utils/tests/test_save_changed_data.py
@@ -116,8 +116,9 @@ class TestSaveChangedData(BaseTestGenerator):
                 ]
             },
             save_status=False,
-            check_sql=None,
-            check_result=None
+            check_sql=
+            "SELECT * FROM %s WHERE pk_col = 1 AND normal_col = 'four'",
+            check_result='SELECT 0'
         )),
         ('When updating a row in a valid way', dict(
             save_payload={
@@ -171,9 +172,9 @@ class TestSaveChangedData(BaseTestGenerator):
                 "updated": {
                     "1":
                         {"err": False,
-                         "data": {"pk_col": "2"},
+                         "data": {"pk_col": "1"},
                          "primary_keys":
-                             {"pk_col": 1}
+                             {"pk_col": 2}
                          }
                 },
                 "added": {},
@@ -210,8 +211,9 @@ class TestSaveChangedData(BaseTestGenerator):
                 ]
             },
             save_status=False,
-            check_sql=None,
-            check_result=None
+            check_sql=
+            "SELECT * FROM %s WHERE pk_col = 1 AND normal_col = 'two'",
+            check_result='SELECT 0'
         )),
         ('When deleting a row', dict(
             save_payload={
@@ -283,20 +285,19 @@ class TestSaveChangedData(BaseTestGenerator):
         save_status = response_data['data']['status']
         self.assertEquals(save_status, self.save_status)
 
-        if self.check_sql:
-            # Execute check sql
-            # Add test table name to the query
-            check_sql = self.check_sql % self.test_table_name
-            is_success, response_data = \
-                execute_query(tester=self.tester,
-                              query=check_sql,
-                              start_query_tool_url=self.start_query_tool_url,
-                              poll_url=self.poll_url)
-            self.assertEquals(is_success, True)
+        # Execute check sql
+        # Add test table name to the query
+        check_sql = self.check_sql % self.test_table_name
+        is_success, response_data = \
+            execute_query(tester=self.tester,
+                          query=check_sql,
+                          start_query_tool_url=self.start_query_tool_url,
+                          poll_url=self.poll_url)
+        self.assertEquals(is_success, True)
 
-            # Check table for updates
-            result = response_data['data']['result']
-            self.assertEquals(result, self.check_result)
+        # Check table for updates
+        result = response_data['data']['result']
+        self.assertEquals(result, self.check_result)
 
     def tearDown(self):
         # Disconnect the database
diff --git a/web/pgadmin/utils/driver/psycopg2/connection.py b/web/pgadmin/utils/driver/psycopg2/connection.py
index 230e15af..9e6aaa5d 100644
--- a/web/pgadmin/utils/driver/psycopg2/connection.py
+++ b/web/pgadmin/utils/driver/psycopg2/connection.py
@@ -1917,3 +1917,17 @@ Failed to reset the connection to the server due to following error:
             )
 
         return enc_password
+
+    def mogrify(self, query, parameters):
+        """
+        This function will return the sql query after parameters binding
+        :param query: sql query before parameters (variables) binding
+        :param parameters: query parameters / variables
+        :return:
+        """
+        status, cursor = self.__cursor()
+        if not status:
+            return None
+        else:
+            mogrified_sql = cursor.mogrify(query, parameters)
+            return mogrified_sql

Reply via email to