Review: Needs Fixing

Left a few diff comments -- I very much like this approach though!

Diff comments:

> diff --git a/charms/focal/autopkgtest-web/webcontrol/sqlite-writer 
> b/charms/focal/autopkgtest-web/webcontrol/sqlite-writer
> new file mode 100755
> index 0000000..3939bde
> --- /dev/null
> +++ b/charms/focal/autopkgtest-web/webcontrol/sqlite-writer
> @@ -0,0 +1,136 @@
> +#!/usr/bin/python3
> +
> +import configparser
> +import json
> +import logging
> +import os
> +import socket
> +import sqlite3
> +import urllib.parse
> +
> +import amqplib.client_0_8 as amqp
> +from helpers.utils import init_db
> +
> +EXCHANGE_NAME = "sqlite-write-me.fanout"
> +
> +config = None
> +db_con = None
> +
> +INSERT_INTO_KEYS = [
> +    "test_id",
> +    "run_id",
> +    "version",
> +    "triggers",
> +    "duration",
> +    "exitcode",
> +    "requester",
> +    "env",
> +    "uuid",
> +]
> +
> +
> +def amqp_connect():
> +    """Connect to AMQP server"""
> +
> +    cp = configparser.ConfigParser()
> +    cp.read(os.path.expanduser("~ubuntu/autopkgtest-cloud.conf"))
> +    amqp_uri = cp["amqp"]["uri"]
> +    parts = urllib.parse.urlsplit(amqp_uri, allow_fragments=False)
> +    amqp_con = amqp.Connection(
> +        parts.hostname, userid=parts.username, password=parts.password
> +    )
> +    logging.info(
> +        "Connected to AMQP server at %s@%s" % (parts.username, 
> parts.hostname)
> +    )
> +
> +    return amqp_con
> +
> +
> +def db_connect():
> +    """Connect to SQLite DB"""
> +    cp = configparser.ConfigParser()
> +    cp.read(os.path.expanduser("~ubuntu/autopkgtest-cloud.conf"))

My same old whinge about ConfigParser.read :)

> +
> +    db_con = init_db(cp["web"]["database"])
> +
> +    return db_con
> +
> +
> +def check_msg(queue_msg):
> +    required_keys = [
> +        "test_id",
> +        "run_id",
> +        "version",
> +        "triggers",
> +        "duration",
> +        "exitcode",
> +        "requester",
> +        "env",
> +        "uuid",
> +    ]
> +    queue_msg_keys = list(queue_msg.keys())
> +    required_keys.sort()
> +    queue_msg_keys.sort()
> +    if queue_msg_keys == required_keys:

No need to go sorting lists; just compare sets. Replace [] with {} in 
required_keys, and convert queue_msg.keys to a set() instead of a list().

If queue_msg is a dict (or the result of the queue_msg.keys() method conforms 
to KeysView), you don't even need to convert it to a set(), you can just 
compare queue_msg.keys() == required_keys.

> +        return True
> +    return False
> +
> +
> +def process_message(msg, db_con):
> +    # aight, time to test this with download-results and 
> download-all-results now.
> +    body = msg.body
> +    if isinstance(body, bytes):
> +        body = body.decide("UTF-8", errors="replace")

I think you mean decode? :)

> +    info = json.loads(body)
> +    logging.info("Message is: \n%s" % json.dumps(info, indent=2))
> +    print(check_msg(info))
> +    if not check_msg(info):
> +        logging.info(
> +            "Message has incorrect keys!\n%s" % json.dumps(info, indent=2)
> +        )
> +    # insert into db
> +    try:
> +        with db_con:
> +            c = db_con.cursor()
> +            c.execute(
> +                "INSERT INTO result VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)",

Nitpick: I don't like bare INSERTs where the columns are implicit; it's a bit 
like using SELECT * -- I never do it in production code because it makes the 
code resistant to (benign) schema changes. If the ordering of the columns of 
the table changes, or new columns with valid defaults are introduced (e.g. an 
"inserted on timestamp" column which defaults to "now"), this INSERT will fail 
when it should calmly carry on doing the Right Thing.

In other words, this should be INSERT INTO result(col1, col2, ...) VALUES (?, 
?, ...)

Really really nitpicky: Whilst the ? paramstyle is the SQL standard, it's also 
rubbish. The sqlite3 module is thankfully one of those that sensibly varied 
from the Python DB-API and permitted this to be modified. You may want to 
change sqlite3.paramstyle to "named" (the other supported value, and the One 
True Paramstyle ;) and use :params instead.

> +                (
> +                    info["test_id"],
> +                    info["run_id"],
> +                    info["version"],
> +                    info["triggers"],
> +                    info["duration"],
> +                    info["exitcode"],
> +                    info["requester"],
> +                    info["env"],
> +                    info["uuid"],
> +                ),
> +            )
> +            db_con.commit()

This is redundant; you're already in a "with db_con" block which will 
implicitly commit / rollback as appropriate

> +        logging.info("Inserted the following entry into the db:\n%s" % body)
> +    except sqlite3.OperationalError as e:
> +        logging.info("Insert operation failed with: %s" % str(e))
> +    except sqlite3.IntegrityError as e:
> +        logging.info("Insert operation failed with: %s" % str(e))

Do we really just want to log an info message if this stuff fails? At the very 
least I'd expect to log an error-level message, but I'd also want to raise the 
exception and crash in most instances? Or do we expect to go inserting 
duplicate keys?

Also, may be worth looking at logging.exception for these (which implicitly 
records exception information from within the handler)

> +
> +    msg.channel.basic_ack(msg.delivery_tag)
> +
> +
> +if __name__ == "__main__":
> +    logging.basicConfig(level=logging.INFO)
> +    db_con = db_connect()
> +    amqp_con = amqp_connect()
> +    status_ch = amqp_con.channel()
> +    status_ch.access_request("/complete", active=True, read=True, 
> write=False)
> +    status_ch.exchange_declare(
> +        EXCHANGE_NAME, "fanout", durable=True, auto_delete=False
> +    )
> +    queue_name = "sqlite-writer-listener-%s" % socket.getfqdn()
> +    status_ch.queue_declare(queue_name, durable=True, auto_delete=False)
> +    status_ch.queue_bind(queue_name, EXCHANGE_NAME, queue_name)
> +    logging.info("Listening to requests on %s" % queue_name)
> +    status_ch.basic_consume(
> +        "", callback=lambda msg: process_message(msg, db_con)
> +    )
> +    while status_ch.callbacks:
> +        status_ch.wait()


-- 
https://code.launchpad.net/~andersson123/autopkgtest-cloud/+git/autopkgtest-cloud/+merge/460847
Your team Canonical's Ubuntu QA is requested to review the proposed merge of 
~andersson123/autopkgtest-cloud:d-r-d-a-r-merging into autopkgtest-cloud:master.


-- 
Mailing list: https://launchpad.net/~canonical-ubuntu-qa
Post to     : canonical-ubuntu-qa@lists.launchpad.net
Unsubscribe : https://launchpad.net/~canonical-ubuntu-qa
More help   : https://help.launchpad.net/ListHelp

Reply via email to