commit:     aead3c221debd25eba336987e4ad2cda67e870a1
Author:     Magnus Granberg <zorry <AT> gentoo <DOT> org>
AuthorDate: Mon Jun 26 21:44:46 2023 +0000
Commit:     Magnus Granberg <zorry <AT> gentoo <DOT> org>
CommitDate: Mon Jun 26 21:44:46 2023 +0000
URL:        
https://gitweb.gentoo.org/proj/tinderbox-cluster.git/commit/?id=aead3c22

Add deleteLogChunks patch for bb

Signed-off-by: Magnus Granberg <zorry <AT> gentoo.org>

 patches/bb-deleteLogChunks.patch | 83 ++++++++++++++++++++++++++++++++++++++++
 1 file changed, 83 insertions(+)

diff --git a/patches/bb-deleteLogChunks.patch b/patches/bb-deleteLogChunks.patch
new file mode 100644
index 0000000..6de332e
--- /dev/null
+++ b/patches/bb-deleteLogChunks.patch
@@ -0,0 +1,83 @@
+--- a/buildbot/db/logs.py      2022-04-02 11:10:34.892310594 +0200
++++ b/buildbot/db/logs.py      2023-06-26 23:06:24.611959431 +0200
+@@ -410,3 +410,80 @@
+         rv = dict(row)
+         rv['complete'] = bool(rv['complete'])
+         return rv
++
++    # returns a Deferred that returns a value
++    def deleteLogChunks(self, buildid):
++        model = self.db.model
++        horizon_per_builder = False
++
++        def countLogchunks(conn):
++            res = 
conn.execute(sa.select([sa.func.count(model.logchunks.c.logid)]))
++            count = res.fetchone()[0]
++            res.close()
++            return count
++
++        # find the steps.id at the upper bound of steps
++        def getStepidMax(conn, buildid):
++            # N.B.: we utilize the fact that steps.id is auto-increment, thus 
steps.started_at
++            # times are effectively sorted and we only need to find the 
steps.id at the upper
++            # bound of steps to update.
++
++            # SELECT steps.id from steps WHERE steps.buildid = buildid ORDER 
BY
++            # steps.id DESC LIMIT 1;
++            res = conn.execute(
++                sa.select([model.steps.c.id])
++                .where(model.steps.c.buildid == buildid)
++                .order_by(model.steps.c.id.desc())
++                .limit(1)
++            )
++            res_list = res.fetchone()
++            stepid_max = None
++            if res_list:
++                stepid_max = res_list[0]
++            res.close()
++            return stepid_max
++
++        # query all logs with type 'd' and delete their chunks.
++        def deleteLogsWithTypeD(conn):
++            if self.db._engine.dialect.name == 'sqlite':
++                # sqlite does not support delete with a join, so for this 
case we use a subquery,
++                # which is much slower
++                q = sa.select([model.logs.c.id])
++                q = q.select_from(model.logs)
++                q = q.where(model.logs.c.type == 'd')
++
++                # delete their logchunks
++                q = 
model.logchunks.delete().where(model.logchunks.c.logid.in_(q))
++            else:
++                q = model.logchunks.delete()
++                q = q.where(model.logs.c.id == model.logchunks.c.logid)
++                q = q.where(model.logs.c.type == 'd')
++
++            res = conn.execute(q)
++            res.close()
++
++        def thddeleteLogs(conn):
++            count_before = countLogchunks(conn)
++
++            # update log types that match buildid
++            # we do it first to avoid having UI discrepancy
++
++            stepid_max = getStepidMax(conn, buildid)
++            if stepid_max:
++                # UPDATE logs SET logs.type = 'd'
++                # WHERE logs.stepid <= stepid_max AND type != 'd';
++                res = conn.execute(
++                    model.logs.update()
++                    .where(sa.and_(model.logs.c.stepid <= stepid_max,
++                                   model.logs.c.type != 'd'))
++                    .values(type='d')
++                )
++                res.close()
++
++            deleteLogsWithTypeD(conn)
++
++            count_after = countLogchunks(conn)
++            count = count_before - count_after
++
++            return count if count > 0 else 0
++        return self.db.pool.do(thddeleteLogs)

Reply via email to