The branch, master has been updated via 5b4d43c24523813f633ae7709e856a78eb64a296 (commit) via 4554a24397b86df1bbe31458840e8a0ccfa2a8fb (commit) via 6dfccdace750c90631f12460367f14c00f0d96cf (commit) via c6bc759eee12f838d5ccb59be8c4552d164940e1 (commit) via f8cecf67f39e465770befd7415ebd98f07a559a7 (commit) via a1f4a250a7b4043c9aa313193529138c1877853f (commit) via f0a2c4488c4dbd3c011c512e76c31f21bc24ad6d (commit) via e47da1e3f463403e5dbd9f461a296f7de21443c1 (commit) via 995c010fa21279be55ae04896cb9a71cf11b3334 (commit) via 1c64a6a32fc0372f95851c084cbda1eb5e630e13 (commit) from 006e68406a6a002bf31dd8b2525e968422b7bda6 (commit)
- Log ----------------------------------------------------------------- commit 5b4d43c24523813f633ae7709e856a78eb64a296 Author: Andriy Gelman <andriy.gel...@gmail.com> AuthorDate: Sun Jun 8 22:12:34 2025 -0700 Commit: Andriy Gelman <andriy.gel...@gmail.com> CommitDate: Sun Jun 8 22:12:34 2025 -0700 Use logger module for logging messages diff --git a/patchwork_runner.py b/patchwork_runner.py index 9b0eb66..a562927 100644 --- a/patchwork_runner.py +++ b/patchwork_runner.py @@ -1,5 +1,6 @@ import email import json +import logging import re import requests import smtplib @@ -19,6 +20,9 @@ from mysql_helper import SQLDatabase from sqlite_helper import SQLiteDatabase from proxy_smtplib import ProxySMTP +logger = logging.getLogger("Runner:") +logger.setLevel(logging.INFO) + def post_check(check_url, type_check, context, msg_short, msg_long, config_pw): if (isinstance(msg_long, bytes)): @@ -39,8 +43,6 @@ def post_check(check_url, type_check, context, msg_short, msg_long, config_pw): headers = {"Authorization" : "Token %s" % config_pw["token"]} payload = {"state" : type_check, "context" : context, "description" : msg_short, "description_long" : msg_long} resp = requests.post(check_url, headers=headers, data=payload) - print(resp) - print(resp.content) def submit_job_result(mydb, job, job_result, check_url, config_pw): @@ -69,8 +71,7 @@ def run_job(mydb, commit_hash, job): commit_hash = commit_hash.decode("utf-8") job_result = mydb.query(job.name, keys, "WHERE commit_hash = \"%s\"" % commit_hash) if job_result: - print("\nFound cashed result: %s\n" % commit_hash) - print(job_result) + logger.info(f"Found cached result: {commit_hash}") return job_result job_result = { "commit_hash" : commit_hash, "setup_success" : 0, "setup_log" : "", @@ -110,13 +111,12 @@ def run_job(mydb, commit_hash, job): else: job_result["unit_test_success"] = 1 - print (job_result) mydb.insert(job.name, job_result) return job_result def notify_by_email(mydb, patch, config_smtp): - print ("Sending email notification") + logger.info ("Sending email notification") keys = list() keys.append("email_sent") @@ -147,10 +147,10 @@ def notify_by_email(mydb, patch, config_smtp): msg_email["References"] = patch["msg_id"] config_proxy = config_smtp["proxy"] - print ("Proxy is %d" % config_proxy["enabled"]) + proxy_enabled = config_proxy["enabled"] + logger.info (f"Proxy enabled: {proxy_enabled}") if config_proxy["enabled"]: - print ("Using proxy") - ret = subprocess.run(config_proxy["cmd"], shell=True) + subprocess.run(config_proxy["cmd"], shell=True) smtp = ProxySMTP(config_smtp["host"], config_smtp["port"], proxy_addr = config_proxy["proxy_addr"], proxy_port = config_proxy["proxy_port"]) else: smtp = smtplib.SMTP(config_smtp["host"], config_smtp["port"]) @@ -226,13 +226,13 @@ def fetch_and_process_patches(mydb, jobs_list, time_interval, config_pw): msg_id = msg_id[:256] check_url = reply_patch["checks"] - print ("Author email: %s" % author_email) - print ("Subject email: %s" % subject_email) - print ("Series id: %s" % series_id) - print ("Check url: %s" % check_url) - print ("Patch url: %s" % patch_url) - print ("Mbox: %s" % mbox) - print ("User link: %s" % mbox[:-5]) + logger.info (f"Author email: {author_email}") + logger.info (f"Subject email: {subject_email}") + logger.info (f"Series id: {series_id}") + logger.info (f"Check url: {check_url}") + logger.info (f"Patch url: {patch_url}") + logger.info (f"Mbox: {mbox}") + logger.info (f"User link: {mbox[:-5]}") keys = list() keys.append("msg_id") @@ -255,7 +255,7 @@ def fetch_and_process_patches(mydb, jobs_list, time_interval, config_pw): mydb.insert("series", {"series_id" : "%d" % series_id, "email_sent" : 0}) - print ("Number of patches in list: %d" % len(patch_list)) + logger.info (f"Number of patches in list: {len(patch_list)}") for patch in patch_list: @@ -287,7 +287,8 @@ def fetch_and_process_patches(mydb, jobs_list, time_interval, config_pw): time.sleep(1*60) if retries == max_retries: - print ("Failed to fetch patch %s" % patch["mbox"]) + patch_mbox= patch["mbox"] + logger.error(f"Failed to fetch patch {patch_mbox}") continue git_cmd = git_cmd_template + "am --keep-cr -3 --committer-date-is-author-date --exclude=Changelog mbox_file" @@ -310,7 +311,6 @@ def fetch_and_process_patches(mydb, jobs_list, time_interval, config_pw): commit_msg = ret.stdout.decode("utf-8") warn = check_commit_message(commit_msg) if warn: - print (warn) post_check(patch["check_url"], "warning", "commit_msg_" + job.name, warn, "", config_pw) notify_by_email(mydb, patch, config_pw["smtp"]) @@ -318,7 +318,6 @@ def fetch_and_process_patches(mydb, jobs_list, time_interval, config_pw): ret = subprocess.run(git_cmd, capture_output=True, shell=True) current_hash = ret.stdout current_hash = current_hash[0:32] - print ("Current hash %s" % current_hash) job_result = run_job(mydb, current_hash, job) submit_job_result(mydb, job, job_result, patch["check_url"], config_pw) @@ -349,7 +348,7 @@ def fetch_and_process_patches(mydb, jobs_list, time_interval, config_pw): if __name__ == "__main__": if len(sys.argv) != 2: - print("Usage:\n $ python3 patchwork_runner.py config.yaml") + logger.error("Usage:\n $ python3 patchwork_runner.py config.yaml") sys.exit(1) with open(sys.argv[1], 'r') as file: @@ -361,7 +360,7 @@ if __name__ == "__main__": elif config["db"]["type"] == "mysql": mydb = SQLDatabase(config["db"]) else: - print("Invalid database type %s" % config["db"]["type"]) + logger.error("Invalid database type %s" % config["db"]["type"]) sys.exit(1) jobs_list = list() @@ -398,13 +397,13 @@ if __name__ == "__main__": patch_list = fetch_and_process_patches(mydb, jobs_list, time_interval, config["patchwork"]) except Exception as e: - print("Error processing patches %s" % str(e)) - print("Continuing after 60 seconds") + logger.error(f"Error processing patches {str(e)}") + logger.info("Continuing after 60 seconds") time.sleep(60) patch_list = None if not patch_list: - print ("No patches, sleeping for 5 minutes") + logger.info ("No patches, sleeping for 5 minutes") time.sleep(60*5) end_time = time.time() mydb.close() commit 4554a24397b86df1bbe31458840e8a0ccfa2a8fb Author: Andriy Gelman <andriy.gel...@gmail.com> AuthorDate: Sun Jun 8 21:54:00 2025 -0700 Commit: Andriy Gelman <andriy.gel...@gmail.com> CommitDate: Sun Jun 8 21:54:00 2025 -0700 Catch exceptions when processing the patches diff --git a/patchwork_runner.py b/patchwork_runner.py index d24f6c2..9b0eb66 100644 --- a/patchwork_runner.py +++ b/patchwork_runner.py @@ -394,7 +394,15 @@ if __name__ == "__main__": time_interval = time_interval + 24 * 60 first_run = 1 start_time = time.time() - patch_list = fetch_and_process_patches(mydb, jobs_list, time_interval, config["patchwork"]) + try: + patch_list = fetch_and_process_patches(mydb, jobs_list, time_interval, config["patchwork"]) + + except Exception as e: + print("Error processing patches %s" % str(e)) + print("Continuing after 60 seconds") + time.sleep(60) + patch_list = None + if not patch_list: print ("No patches, sleeping for 5 minutes") time.sleep(60*5) commit 6dfccdace750c90631f12460367f14c00f0d96cf Author: Andriy Gelman <andriy.gel...@gmail.com> AuthorDate: Sun Jun 8 21:44:16 2025 -0700 Commit: Andriy Gelman <andriy.gel...@gmail.com> CommitDate: Sun Jun 8 21:47:32 2025 -0700 Fetch more patches when the runner is started diff --git a/patchwork_runner.py b/patchwork_runner.py index a8d4b30..d24f6c2 100644 --- a/patchwork_runner.py +++ b/patchwork_runner.py @@ -387,8 +387,12 @@ if __name__ == "__main__": # in minutes start_time = 0 end_time = 0 + first_run = 1 while 1: time_interval = (end_time - start_time) / 60 + 10 + if first_run == 1: + time_interval = time_interval + 24 * 60 + first_run = 1 start_time = time.time() patch_list = fetch_and_process_patches(mydb, jobs_list, time_interval, config["patchwork"]) if not patch_list: commit c6bc759eee12f838d5ccb59be8c4552d164940e1 Author: Andriy Gelman <andriy.gel...@gmail.com> AuthorDate: Sun Jun 8 19:11:15 2025 -0700 Commit: Andriy Gelman <andriy.gel...@gmail.com> CommitDate: Sun Jun 8 21:47:28 2025 -0700 Add option to use sqlite instead of mysql diff --git a/README b/README index e7f3c40..c31b66b 100644 --- a/README +++ b/README @@ -17,7 +17,7 @@ user by email). Custom jobs can therefore be created modifying the job class. Us recomended for isolating the process. --- Caching results --- -The code currently uses a mysql database to track information and cache job +The code currently uses a sqlite or mysql database to track information and cache job results. The database client settings are defined in the config yaml file. Multiple runners on different physical machines may share a common mysql database or use a dedicated database. Sharing a common database allows to check whether an automated email was already @@ -49,7 +49,5 @@ Section "runners" stores the CI jobs to run. permissions. 2. Modify class Job in job.py for your CI job setup. 3. Set the configurations in the config.yaml file. -4. Start a mysql server instance, i.e. using docker: -$ docker run --name "ffmpeg_mysql" -e MYSQL_ROOT_PASSWORD=${PATCHWORK_DB_PASSWORD} --rm -v sqldb:/var/lib/mysql -p 3306:3306 mysql:5.7 -5. Start the job runner with: +4. Start the job runner with: $ python patchwork_runner.py config.yaml diff --git a/config.yaml b/config_mysql.yaml similarity index 96% copy from config.yaml copy to config_mysql.yaml index 1bf46bf..880eea5 100644 --- a/config.yaml +++ b/config_mysql.yaml @@ -20,6 +20,7 @@ runners: run_full_series: True db: + type: "mysql" host: "127.0.0.1" user: "user" password: "password" @@ -35,7 +36,7 @@ patchwork: token: "abcdefg12345" smtp: - enabled: True + enabled: False host: "smtp.gmail.com" port: 587 user: "u...@gmail.com" diff --git a/config.yaml b/config_sqlite.yaml similarity index 80% rename from config.yaml rename to config_sqlite.yaml index 1bf46bf..31c036a 100644 --- a/config.yaml +++ b/config_sqlite.yaml @@ -20,22 +20,15 @@ runners: run_full_series: True db: - host: "127.0.0.1" - user: "user" - password: "password" - db_name: "mysql" - ssl: - enabled: False - ssl_ca: "ssl_keys/ca-cert.pem" - ssl_cert: "ssl_keys/client-cert.pem" - ssl_key: "ssl_keys/client-key.pem" + type: "sqlite" + db_path: "./patchwork.sqlite" patchwork: host: "patchwork.ffmpeg.org" token: "abcdefg12345" smtp: - enabled: True + enabled: False host: "smtp.gmail.com" port: 587 user: "u...@gmail.com" diff --git a/patchwork_runner.py b/patchwork_runner.py index 8d6fc8b..a8d4b30 100644 --- a/patchwork_runner.py +++ b/patchwork_runner.py @@ -16,6 +16,7 @@ from dateutil.relativedelta import relativedelta from email.message import EmailMessage from job import Job from mysql_helper import SQLDatabase +from sqlite_helper import SQLiteDatabase from proxy_smtplib import ProxySMTP def post_check(check_url, type_check, context, msg_short, msg_long, config_pw): @@ -66,7 +67,7 @@ def run_job(mydb, commit_hash, job): "unit_test_success", "unit_test_log", "number_of_warnings") commit_hash = commit_hash.decode("utf-8") - job_result = mydb.query(job.name, keys, "WHERE commit_hash = 0x%s" % commit_hash) + job_result = mydb.query(job.name, keys, "WHERE commit_hash = \"%s\"" % commit_hash) if job_result: print("\nFound cashed result: %s\n" % commit_hash) print(job_result) @@ -316,7 +317,7 @@ def fetch_and_process_patches(mydb, jobs_list, time_interval, config_pw): git_cmd = git_cmd_template + " rev-parse master" ret = subprocess.run(git_cmd, capture_output=True, shell=True) current_hash = ret.stdout - current_hash = current_hash[0:40] + current_hash = current_hash[0:32] print ("Current hash %s" % current_hash) job_result = run_job(mydb, current_hash, job) submit_job_result(mydb, job, job_result, patch["check_url"], config_pw) @@ -355,7 +356,13 @@ if __name__ == "__main__": config = yaml.safe_load(file) # local database for storing cached job results - mydb = SQLDatabase(config["db"]) + if config["db"]["type"] == "sqlite": + mydb = SQLiteDatabase(config["db"]) + elif config["db"]["type"] == "mysql": + mydb = SQLDatabase(config["db"]) + else: + print("Invalid database type %s" % config["db"]["type"]) + sys.exit(1) jobs_list = list() @@ -364,9 +371,9 @@ if __name__ == "__main__": # when the db is first setup there are no tables. so init them for job in jobs_list: - mydb.create_missing_table(job.name, ("(id INT AUTO_INCREMENT PRIMARY KEY, commit_hash BINARY(20), " - "setup_success BIT(1), setup_log LONGTEXT, build_success BIT(1), build_log LONGTEXT," - "unit_test_success BIT(1), unit_test_log LONGTEXT, number_of_warnings INT)")) + mydb.create_missing_table(job.name, ("(id INT AUTO_INCREMENT PRIMARY KEY, commit_hash VARCHAR(32), " + "setup_success INT, setup_log LONGTEXT, build_success INT, build_log LONGTEXT," + "unit_test_success INT, unit_test_log LONGTEXT, number_of_warnings INT)")) # this tables stores the patches we have already processed locally # it is used for checking we don't run the same job twice @@ -375,7 +382,7 @@ if __name__ == "__main__": # this table is used to track if we have sent an email to user for a specific # series. We don't want to send an email for each commit that's failed, but # only once per series - mydb.create_missing_table("series", "(id INT AUTO_INCREMENT PRIMARY KEY, series_id INT, email_sent BIT(1))") + mydb.create_missing_table("series", "(id INT AUTO_INCREMENT PRIMARY KEY, series_id INT, email_sent INT)") # in minutes start_time = 0 commit f8cecf67f39e465770befd7415ebd98f07a559a7 Author: Andriy Gelman <andriy.gel...@gmail.com> AuthorDate: Sun Jun 8 18:47:21 2025 -0700 Commit: Andriy Gelman <andriy.gel...@gmail.com> CommitDate: Sun Jun 8 21:41:38 2025 -0700 Add sqlite helpers From git.ffmpeg.org/gitweb/patchwork_jobs_devops.git diff --git a/sqlite_helper.py b/sqlite_helper.py new file mode 100644 index 0000000..bb77d5b --- /dev/null +++ b/sqlite_helper.py @@ -0,0 +1,166 @@ +import sqlite3 +import os +import time +import threading + +class SQLiteDatabase: + """ + SQLite database helper for FFmpeg Patchwork CI + Provides simplified interface for database operations + """ + + def __init__(self, config_db): + """ + Initialize the SQLite database + + Args: + db_path: Path to the SQLite database file + """ + self.db_path = config_db["db_path"] + self.connection = None + self._connect() + + def _connect(self): + """Create a new database connection""" + # Ensure directory exists + os.makedirs(os.path.dirname(os.path.abspath(self.db_path)), exist_ok=True) + self.connection = sqlite3.connect(self.db_path) + self.connection.row_factory = sqlite3.Row + + def get_cursor(self): + """Get a database cursor, reconnecting if necessary""" + try: + # Test connection + self.connection.execute("SELECT 1") + except (sqlite3.Error, AttributeError): + # Reconnect if connection is lost or was never established + self._connect() + + return self.connection.cursor() + + def create_missing_table(self, name, columns): + """ + Create a table if it doesn't exist + + Args: + name: Table name + columns: SQL column definitions as a string + """ + cursor = self.get_cursor() + cursor.execute(f"SELECT name FROM sqlite_master WHERE type='table' AND name=?", (name,)) + if cursor.fetchone() is not None: + # print(f"Table {name} already exists") + return + + query = f"CREATE TABLE {name} {columns}" + # print(query) + cursor.execute(query) + self.connection.commit() + return + + def query(self, table_name, keys, filter_command=""): + """ + Execute a SELECT query and return the first matching row + + Args: + table_name: Table to query + keys: List of column names to select + filter_command: WHERE clause and other SQL filters + + Returns: + Dict containing the query results, or empty dict if no match + """ + cursor = self.get_cursor() + + str_cols = ", ".join(keys) + sql_query = f"SELECT {str_cols} FROM {table_name} {filter_command}" + # print(sql_query) + cursor.execute(sql_query) + db_out = cursor.fetchone() + out = {} + if not db_out: + return out + + for k in keys: + out[k] = db_out[k] + return out + + def query_all(self, table_name, keys, filter_command=""): + """ + Execute a SELECT query and return all matching rows + + Args: + table_name: Table to query + keys: List of column names to select + filter_command: WHERE clause and other SQL filters + + Returns: + List of dicts containing the query results + """ + cursor = self.get_cursor() + + str_cols = ", ".join(keys) + sql_query = f"SELECT {str_cols} FROM {table_name} {filter_command}" + # print(sql_query) + cursor.execute(sql_query) + db_out = cursor.fetchall() + + results = [] + for row in db_out: + out = {} + for k in keys: + out[k] = row[k] + results.append(out) + return results + + def insert(self, table, key_value_dict): + """ + Insert a new row into a table + + Args: + table: Table name + key_value_dict: Dict mapping column names to values + """ + cursor = self.get_cursor() + + keys = list(key_value_dict.keys()) + values = list(key_value_dict.values()) + + placeholders = ", ".join(["?" for _ in keys]) + keys_str = ", ".join(keys) + + sql_request = f'INSERT INTO {table} ({keys_str}) VALUES ({placeholders})' + # print(f"{sql_request} with values {values}") + cursor.execute(sql_request, values) + self.connection.commit() + return cursor.lastrowid + + def update(self, table, ref_key, ref_value, keys, values): + """ + Update existing rows in a table + + Args: + table: Table name + ref_key: List of column names to use in WHERE clause + ref_value: List of values corresponding to ref_key + keys: List of column names to update + values: List of new values corresponding to keys + """ + cursor = self.get_cursor() + + set_clauses = [f"{k} = ?" for k in keys] + where_clauses = [f"{k} = ?" for k in ref_key] + + str_set = ", ".join(set_clauses) + str_where = " AND ".join(where_clauses) + + sql_request = f'UPDATE {table} SET {str_set} WHERE {str_where}' + # print(f"{sql_request} with values {values + ref_value}") + cursor.execute(sql_request, values + ref_value) + self.connection.commit() + + def close(self): + """Close the database connection""" + if self.connection: + self.connection.close() + self.connection = None commit a1f4a250a7b4043c9aa313193529138c1877853f Author: Andriy Gelman <andriy.gel...@gmail.com> AuthorDate: Sun Jun 8 18:42:09 2025 -0700 Commit: Andriy Gelman <andriy.gel...@gmail.com> CommitDate: Sun Jun 8 21:41:38 2025 -0700 Remove direct call to internals of MySQL close diff --git a/mysql_helper.py b/mysql_helper.py index f0adb55..de0abd2 100644 --- a/mysql_helper.py +++ b/mysql_helper.py @@ -98,3 +98,6 @@ class SQLDatabase(): print (sql_request) cursor.execute(sql_request) self.mydb.commit() + + def close(self): + self.mydb.close() diff --git a/patchwork_runner.py b/patchwork_runner.py index d5f3e26..8d6fc8b 100644 --- a/patchwork_runner.py +++ b/patchwork_runner.py @@ -388,4 +388,4 @@ if __name__ == "__main__": print ("No patches, sleeping for 5 minutes") time.sleep(60*5) end_time = time.time() - mydb.mydb.close() + mydb.close() commit f0a2c4488c4dbd3c011c512e76c31f21bc24ad6d Author: Andriy Gelman <andriy.gel...@gmail.com> AuthorDate: Thu Mar 10 00:30:39 2022 -0500 Commit: Andriy Gelman <andriy.gel...@gmail.com> CommitDate: Sun Jun 8 21:41:38 2025 -0700 Use mariadb connector instead mysql-connector diff --git a/mysql_helper.py b/mysql_helper.py index eafe8d4..f0adb55 100644 --- a/mysql_helper.py +++ b/mysql_helper.py @@ -1,4 +1,4 @@ -import mysql.connector +import mariadb import time import threading @@ -10,16 +10,16 @@ class SQLDatabase(): def init_db(self): if self.config["ssl"]["enabled"]: - return mysql.connector.connect(host=self.config["host"], user=self.config["user"], + return mariadb.connect(host=self.config["host"], user=self.config["user"], database=self.config["db_name"], ssl_ca=self.config["ssl"]["ssl_ca"], ssl_cert=self.config["ssl"]["ssl_cert"], ssl_key=self.config["ssl"]["ssl_key"]) else: - return mysql.connector.connect(host=self.config["host"], user=self.config["user"], + return mariadb.connect(host=self.config["host"], user=self.config["user"], password=self.config["password"], database=self.config["db_name"]) def get_cursor(self): try: - self.mydb.ping(reconnect=True, attempts=3, delay=5) + self.mydb.ping() except: self.mydb = self.init_db() commit e47da1e3f463403e5dbd9f461a296f7de21443c1 Author: Andriy Gelman <andriy.gel...@gmail.com> AuthorDate: Sun Feb 27 23:30:47 2022 -0500 Commit: Andriy Gelman <andriy.gel...@gmail.com> CommitDate: Sun Jun 8 21:41:38 2025 -0700 Add option to connect to mysql server with ssl diff --git a/config.yaml b/config.yaml index c6129a5..1bf46bf 100644 --- a/config.yaml +++ b/config.yaml @@ -24,6 +24,11 @@ db: user: "user" password: "password" db_name: "mysql" + ssl: + enabled: False + ssl_ca: "ssl_keys/ca-cert.pem" + ssl_cert: "ssl_keys/client-cert.pem" + ssl_key: "ssl_keys/client-key.pem" patchwork: host: "patchwork.ffmpeg.org" diff --git a/mysql_helper.py b/mysql_helper.py index 2ab87ee..eafe8d4 100644 --- a/mysql_helper.py +++ b/mysql_helper.py @@ -9,8 +9,13 @@ class SQLDatabase(): self.mydb = self.init_db() def init_db(self): - return mysql.connector.connect(host=self.config["host"], user=self.config["user"], - password=self.config["password"], database=self.config["db_name"]) + if self.config["ssl"]["enabled"]: + return mysql.connector.connect(host=self.config["host"], user=self.config["user"], + database=self.config["db_name"], ssl_ca=self.config["ssl"]["ssl_ca"], + ssl_cert=self.config["ssl"]["ssl_cert"], ssl_key=self.config["ssl"]["ssl_key"]) + else: + return mysql.connector.connect(host=self.config["host"], user=self.config["user"], + password=self.config["password"], database=self.config["db_name"]) def get_cursor(self): try: diff --git a/patchwork_runner.py b/patchwork_runner.py index 3ca8b1e..d5f3e26 100644 --- a/patchwork_runner.py +++ b/patchwork_runner.py @@ -193,7 +193,7 @@ def fetch_and_process_patches(mydb, jobs_list, time_interval, config_pw): patch_list = list() - headers = {"Authorization" : "Token %s" % config_pw["token"], "Host": config_pw["token"]} + headers = {"Authorization" : "Token: %s" % config_pw["token"], "Host": config_pw["host"]} utc_time = datetime.utcnow() utc_time = utc_time - relativedelta(minutes = time_interval) @@ -203,7 +203,6 @@ def fetch_and_process_patches(mydb, jobs_list, time_interval, config_pw): url = "https://" + config_pw["host"] + url_request resp = requests.get(url, headers = headers) - print (resp) reply_list = json.loads(resp.content) for reply in reply_list: @@ -360,7 +359,7 @@ if __name__ == "__main__": jobs_list = list() - for name, config_runner in config["runner"].items(): + for name, config_runner in config["runners"].items(): jobs_list.append(Job(name, config_runner)) # when the db is first setup there are no tables. so init them commit 995c010fa21279be55ae04896cb9a71cf11b3334 Author: Andriy Gelman <andriy.gel...@gmail.com> AuthorDate: Sun Feb 27 17:59:52 2022 -0500 Commit: Andriy Gelman <andriy.gel...@gmail.com> CommitDate: Sun Jun 8 21:41:31 2025 -0700 Use a yaml config file instead of environment variables Also add example config file config.yaml. diff --git a/README b/README index 9793c0a..e7f3c40 100644 --- a/README +++ b/README @@ -18,9 +18,8 @@ recomended for isolating the process. --- Caching results --- The code currently uses a mysql database to track information and cache job -results. The config of the database are set by the environment variables: -PATCHWORK_DB_{HOST,USER,PASSWORD}. Multiple runners on different physical -machines may share a common mysql database or use a dedicated database. Sharing +results. The database client settings are defined in the config yaml file. Multiple runners +on different physical machines may share a common mysql database or use a dedicated database. Sharing a common database allows to check whether an automated email was already sent for a series. @@ -30,31 +29,27 @@ link to the patchwork site where the warning or error is shown. To prevent spamming the author, only one email is triggered per patch series. An email is also only sent if the parent commit builds successfully. Thus if current origin/master doesn't build, an email will not be sent (unless a commit fixes -the issue and breaks it another commit of the series). The environment variables -for connecting to an SMTP server are -PATCHWORK_SMTP_{HOST,PORT}, PATCHWORK_{USER_EMAIL,PASSWORD_EMAIL}. -The environment variable PATCHWORK_CC_EMAIL is used to add a cc email address. +the issue and breaks it another commit of the series). The client configs for +connecting to the smtp server are set in the config yaml file in the section +"smtp". --- Patchwork authentication --- An account (on https://patchwork.ffmpeg.org) and proper permission are needed to post CI results back to the patchwork site. Email your patchwork site maintainer in (FFmpeg/MAINTERNERS) with your username if you want the permissions added to your account. After the permissions are set up, an API token can be obtained after logging in -to the patchwork site. The environment variable PATCHWORK_TOKEN stores the api -token. The variable PATCHWORK_HOST needs to be set to patchwork.ffmpeg.org or -another patchwork site. +to the patchwork site. The section "patchwork" in config yaml file stores the +host and token for connecting to the patchwork site. --- Other environemnt variables -- -The following variables are used by the docker container for the CI job: -PATCHWORK_{UID,GID} set the uid/gid of the docker container. -PATCHWORK_PROJECT_ROOT_PATH is the path to the main FFmpeg directory. +-- Other yaml config sections -- +Section "runners" stores the CI jobs to run. -- Steps to setup a CI job runner -- 1. Create an account on patchwork.ffmpeg.org and email your patchwork maintainer to setup permissions. 2. Modify class Job in job.py for your CI job setup. -3. Export the environment variables described above. +3. Set the configurations in the config.yaml file. 4. Start a mysql server instance, i.e. using docker: $ docker run --name "ffmpeg_mysql" -e MYSQL_ROOT_PASSWORD=${PATCHWORK_DB_PASSWORD} --rm -v sqldb:/var/lib/mysql -p 3306:3306 mysql:5.7 5. Start the job runner with: -$ python patchwork_runner.py +$ python patchwork_runner.py config.yaml diff --git a/config.yaml b/config.yaml new file mode 100644 index 0000000..c6129a5 --- /dev/null +++ b/config.yaml @@ -0,0 +1,43 @@ +runners: + x86: + wd: "/home/user/ffmpeg_sources/ffmpeg" + uid: 1001 + gid: 1001 + docker_image: "ffmpeg_build:latest" + setup_command: "source run_configure" + build_flags: "-j44" + fate_flags: "-k -j44" + run_full_series: True + + ppc: + wd: "/home/user/ffmpeg_sources/ffmpeg" + uid: 1001 + gid: 1001 + docker_image: "ffmpeg_build_ppc:latest" + setup_command: "source run_configure" + build_flags: "-j44" + fate_flags: "-k -j44" + run_full_series: True + +db: + host: "127.0.0.1" + user: "user" + password: "password" + db_name: "mysql" + +patchwork: + host: "patchwork.ffmpeg.org" + token: "abcdefg12345" + + smtp: + enabled: True + host: "smtp.gmail.com" + port: 587 + user: "u...@gmail.com" + password: "password" + cc_email: "ccem...@gmail.com" + proxy: + enabled: False + cmd: "ssh -f -D 2345 -p 5678 user@127.0.0.1 sleep 10" + proxy_addr: "localhost" + proxy_port: 2345 diff --git a/mysql_helper.py b/mysql_helper.py index d53f42c..2ab87ee 100644 --- a/mysql_helper.py +++ b/mysql_helper.py @@ -4,14 +4,13 @@ import threading class SQLDatabase(): - def __init__(self, host, user, password): - self.host = host - self.user = user - self.password = password + def __init__(self, config_db): + self.config = config_db self.mydb = self.init_db() def init_db(self): - return mysql.connector.connect(host=self.host, user=self.user, password=self.password, database="mysql") + return mysql.connector.connect(host=self.config["host"], user=self.config["user"], + password=self.config["password"], database=self.config["db_name"]) def get_cursor(self): try: diff --git a/patchwork_runner.py b/patchwork_runner.py index d701b53..3ca8b1e 100644 --- a/patchwork_runner.py +++ b/patchwork_runner.py @@ -1,6 +1,5 @@ import email import json -import os import re import requests import smtplib @@ -9,6 +8,7 @@ import subprocess import sys import time import urllib.parse +import yaml from commit_message_filter import check_commit_message from datetime import datetime, timezone @@ -18,32 +18,7 @@ from job import Job from mysql_helper import SQLDatabase from proxy_smtplib import ProxySMTP -env = os.environ -use_proxy = int(env["PATCHWORK_USE_PROXY"]) -socks_dynamic_port = int(env["PATCHWORK_SOCKS_DYNAMIC_PORT"]) -proxy_host = env["PATCHWORK_PROXY_HOST"] -socks_proxy_uname = env["PATCHWORK_SOCKS_PROXY_UNAME"] -socks_proxy_ip = env["PATCHWORK_SOCKS_PROXY_IP"] -socks_proxy_port = int(env["PATCHWORK_SOCKS_PROXY_PORT"]) - -db_host = env["PATCHWORK_DB_HOST"] -db_user = env["PATCHWORK_DB_USER"] -db_password = env["PATCHWORK_DB_PASSWORD"] - -smtp_host = env["PATCHWORK_SMTP_HOST"] -smtp_port = int(env["PATCHWORK_SMTP_PORT"]) -user_email = env["PATCHWORK_USER_EMAIL"] -cc_email = env["PATCHWORK_CC_EMAIL"] -password_email = env["PATCHWORK_PASSWORD_EMAIL"] - -uid = int(env["PATCHWORK_UID"]) -gid = int(env["PATCHWORK_GID"]) - -patchwork_token = env["PATCHWORK_TOKEN"] -patchwork_host = env["PATCHWORK_HOST"] -project_root_path = env["PATCHWORK_PROJECT_ROOT_PATH"] - -def post_check(check_url, type_check, context, msg_short, msg_long): +def post_check(check_url, type_check, context, msg_short, msg_long, config_pw): if (isinstance(msg_long, bytes)): split_char = b'\n' @@ -60,28 +35,28 @@ def post_check(check_url, type_check, context, msg_short, msg_long): msg_long = split_char.join(msg_long_split) - headers = {"Authorization" : "Token %s" % patchwork_token} + headers = {"Authorization" : "Token %s" % config_pw["token"]} payload = {"state" : type_check, "context" : context, "description" : msg_short, "description_long" : msg_long} resp = requests.post(check_url, headers=headers, data=payload) print(resp) print(resp.content) -def submit_job_result(mydb, job, job_result, check_url): +def submit_job_result(mydb, job, job_result, check_url, config_pw): if job_result["setup_success"] == 0: - post_check(check_url, "warning", "configure_" + job.name, "Failed to run configure", job_result["setup_log"]) + post_check(check_url, "warning", "configure_" + job.name, "Failed to run configure", job_result["setup_log"], config_pw) return if job_result["build_success"] == 1: - post_check(check_url, "success", "make_" + job.name, "Make finished", b'') + post_check(check_url, "success", "make_" + job.name, "Make finished", b'', config_pw) else: - post_check(check_url, "fail", "make_" + job.name, "Make failed", job_result["build_log"]) + post_check(check_url, "fail", "make_" + job.name, "Make failed", job_result["build_log"], config_pw) return if job_result["unit_test_success"] == 1: - post_check(check_url, "success", "make_fate_" + job.name, "Make fate finished", b'') + post_check(check_url, "success", "make_fate_" + job.name, "Make fate finished", b'', config_pw) else: - post_check(check_url, "fail", "make_fate_" + job.name, "Make fate failed", job_result["unit_test_log"]) + post_check(check_url, "fail", "make_fate_" + job.name, "Make fate failed", job_result["unit_test_log"], config_pw) return @@ -138,7 +113,7 @@ def run_job(mydb, commit_hash, job): mydb.insert(job.name, job_result) return job_result -def notify_by_email(mydb, patch): +def notify_by_email(mydb, patch, config_smtp): print ("Sending email notification") @@ -146,8 +121,11 @@ def notify_by_email(mydb, patch): keys.append("email_sent") series_id = patch["series_id"] - res = mydb.query("series", keys, "WHERE series_id = %d" % series_id) - email_sent = res["email_sent"] + email_sent = False + if mydb is not None: + res = mydb.query("series", keys, "WHERE series_id = %d" % series_id) + email_sent = res["email_sent"] + if email_sent: return @@ -161,27 +139,28 @@ def notify_by_email(mydb, patch): msg_email = EmailMessage() msg_email.set_content(msg) msg_email["Subject"] = "Re: " + patch["subject_email"] - msg_email["From"] = "Patchwork <%s>" % user_email + msg_email["From"] = "Patchwork <%s>" % config_smtp["user"] msg_email["To"] = patch["author_email"] - msg_email["Cc"] = cc_email + msg_email["Cc"] = config_smtp["cc_email"] msg_email["In-Reply-To"] = patch["msg_id"] msg_email["References"] = patch["msg_id"] - print ("Proxy is %d" % use_proxy) - if use_proxy == 1: + config_proxy = config_smtp["proxy"] + print ("Proxy is %d" % config_proxy["enabled"]) + if config_proxy["enabled"]: print ("Using proxy") - proxy_setup_cmd = "ssh -f -D %d -p %d %s@%s sleep 10" % (socks_dynamic_port, socks_proxy_port, socks_proxy_uname, socks_proxy_ip) - ret = subprocess.run(proxy_setup_cmd, shell=True) - smtp = ProxySMTP(smtp_host, smtp_port, proxy_addr = proxy_host, proxy_port = socks_dynamic_port) + ret = subprocess.run(config_proxy["cmd"], shell=True) + smtp = ProxySMTP(config_smtp["host"], config_smtp["port"], proxy_addr = config_proxy["proxy_addr"], proxy_port = config_proxy["proxy_port"]) else: - smtp = smtplib.SMTP(smtp_host, smtp_port) + smtp = smtplib.SMTP(config_smtp["host"], config_smtp["port"]) smtp.starttls() - smtp.login(user_email, password_email) + smtp.login(config_smtp["user"], config_smtp["password"]) smtp.sendmail(msg_email["From"], msg_email["To"], msg_email.as_string()) smtp.quit() - mydb.update("series", ["series_id"], ["%d " % series_id], ["email_sent"], ["1"]) + if mydb is not None: + mydb.update("series", ["series_id"], ["%d " % series_id], ["email_sent"], ["1"]) def regex_version_and_commit(subject): subject_clean_re = re.compile('\[[^]]*\]\s+(\[[^]]*\])') @@ -210,18 +189,18 @@ def regex_version_and_commit(subject): return version_num, commit_entry_num, commit_entry_den -def fetch_and_process_patches(mydb, jobs_list, time_interval): +def fetch_and_process_patches(mydb, jobs_list, time_interval, config_pw): patch_list = list() - headers = {"Authorization" : "Token %s" % patchwork_token, "Host": patchwork_host} + headers = {"Authorization" : "Token %s" % config_pw["token"], "Host": config_pw["token"]} utc_time = datetime.utcnow() utc_time = utc_time - relativedelta(minutes = time_interval) str_time = utc_time.strftime("%Y-%m-%dT%H:%M:%S") str_time = urllib.parse.quote(str_time) url_request = "/api/events/?category=patch-completed&since=" + str_time - url = "https://" + patchwork_host + url_request + url = "https://" + config_pw["host"] + url_request resp = requests.get(url, headers = headers) print (resp) @@ -275,13 +254,13 @@ def fetch_and_process_patches(mydb, jobs_list, time_interval): if not res: mydb.insert("series", {"series_id" : "%d" % series_id, "email_sent" : 0}) - git_cmd_template = "git --git-dir=%s/.git --work-tree=%s " % (project_root_path, project_root_path) print ("Number of patches in list: %d" % len(patch_list)) for patch in patch_list: job = patch["job"] + git_cmd_template = "git --git-dir=%s/.git --work-tree=%s " % (job.config["wd"], job.config["wd"]) _, commit_num, commit_den = regex_version_and_commit(patch["subject_email"]) if job.config["run_full_series"] == False and commit_num != commit_den: continue @@ -301,7 +280,7 @@ def fetch_and_process_patches(mydb, jobs_list, time_interval): max_retries = 10 retries = 0 while 1: - ret = subprocess.run("curl %s/?series=%d > %s/mbox_file" % (patch["mbox"], patch["series_id"], project_root_path), shell=True) + ret = subprocess.run("curl %s/?series=%d > %s/mbox_file" % (patch["mbox"], patch["series_id"], job.config["wd"]), shell=True) if ret.returncode == 0 or retries == max_retries: break retries = retries + 1 @@ -319,10 +298,10 @@ def fetch_and_process_patches(mydb, jobs_list, time_interval): git_cmd = git_cmd_template + "am --keep-cr --skip" ret = subprocess.run(git_cmd, capture_output=True, shell=True) if ret.returncode != 0: - post_check(patch["check_url"], "warning", "configure_" + job.name, "Failed to apply patch", "") + post_check(patch["check_url"], "warning", "configure_" + job.name, "Failed to apply patch", "", config_pw) continue else: - post_check(patch["check_url"], "warning", "configure_" + job.name, "Failed to apply patch", ret.stderr) + post_check(patch["check_url"], "warning", "configure_" + job.name, "Failed to apply patch", ret.stderr, config_pw) continue # check commit message @@ -332,8 +311,8 @@ def fetch_and_process_patches(mydb, jobs_list, time_interval): warn = check_commit_message(commit_msg) if warn: print (warn) - post_check(patch["check_url"], "warning", "commit_msg_" + job.name, warn, "") - notify_by_email(mydb, patch) + post_check(patch["check_url"], "warning", "commit_msg_" + job.name, warn, "", config_pw) + notify_by_email(mydb, patch, config_pw["smtp"]) git_cmd = git_cmd_template + " rev-parse master" ret = subprocess.run(git_cmd, capture_output=True, shell=True) @@ -341,7 +320,7 @@ def fetch_and_process_patches(mydb, jobs_list, time_interval): current_hash = current_hash[0:40] print ("Current hash %s" % current_hash) job_result = run_job(mydb, current_hash, job) - submit_job_result(mydb, job, job_result, patch["check_url"]) + submit_job_result(mydb, job, job_result, patch["check_url"], config_pw) # get the hash of HEAD~ git_cmd = git_cmd_template + " rev-parse master~" @@ -354,48 +333,35 @@ def fetch_and_process_patches(mydb, jobs_list, time_interval): job_result_prev = run_job(mydb, prev_hash, job) if job_result["number_of_warnings"] > job_result_prev["number_of_warnings"]: - post_check(patch["check_url"], "warning", "make_" + job.name, "New warnings during build", "") + post_check(patch["check_url"], "warning", "make_" + job.name, "New warnings during build", "", config_pw) if job_result["setup_success"] == 0 and job_result_prev["setup_success"] == 1: - notify_by_email(mydb, patch) + notify_by_email(mydb, patch, config_pw["smtp"]) if job_result['build_success'] == 0 and job_result_prev['build_success'] == 1: - notify_by_email(mydb, patch) + notify_by_email(mydb, patch, config_pw["smtp"]) if job_result['unit_test_success'] == 0 and job_result_prev['unit_test_success'] == 1: - notify_by_email(mydb, patch) + notify_by_email(mydb, patch, config_pw["smtp"]) return patch_list if __name__ == "__main__": + if len(sys.argv) != 2: + print("Usage:\n $ python3 patchwork_runner.py config.yaml") + sys.exit(1) + + with open(sys.argv[1], 'r') as file: + config = yaml.safe_load(file) + # local database for storing cached job results - mydb = SQLDatabase(db_host, db_user, db_password) + mydb = SQLDatabase(config["db"]) jobs_list = list() - # setup configuration - config_x86 = dict() - config_x86["wd"] = project_root_path - config_x86["docker_image"] = "ffmpeg_build:latest" - config_x86["setup_command"] = "source run_configure" - config_x86["build_flags"] = "-j44" - config_x86["fate_flags"] = "-k -j44" - config_x86["uid"] = uid - config_x86["gid"] = gid - config_x86["run_full_series"] = True - jobs_list.append(Job("x86", config_x86)) - - config_ppc = dict() - config_ppc["wd"] = project_root_path - config_ppc["docker_image"] = "ffmpeg_build_ppc:latest" - config_ppc["setup_command"] = "source run_configure_ppc" - config_ppc["build_flags"] = "-j44" - config_ppc["fate_flags"] = "-k -j44" - config_ppc["uid"] = uid - config_ppc["gid"] = gid - config_ppc["run_full_series"] = True - jobs_list.append(Job("ppc", config_ppc)) + for name, config_runner in config["runner"].items(): + jobs_list.append(Job(name, config_runner)) # when the db is first setup there are no tables. so init them for job in jobs_list: @@ -418,7 +384,7 @@ if __name__ == "__main__": while 1: time_interval = (end_time - start_time) / 60 + 10 start_time = time.time() - patch_list = fetch_and_process_patches(mydb, jobs_list, time_interval) + patch_list = fetch_and_process_patches(mydb, jobs_list, time_interval, config["patchwork"]) if not patch_list: print ("No patches, sleeping for 5 minutes") time.sleep(60*5) diff --git a/test_send_email.py b/test_send_email.py index 25e12bd..14cdf30 100644 --- a/test_send_email.py +++ b/test_send_email.py @@ -1,62 +1,27 @@ -import os import smtplib import socks import subprocess +import sys +import yaml from email.message import EmailMessage +from patchwork_runner import notify_by_email from proxy_smtplib import ProxySMTP -env = os.environ -use_proxy = int(env["PATCHWORK_USE_PROXY"]) -socks_dynamic_port = int(env["PATCHWORK_SOCKS_DYNAMIC_PORT"]) -proxy_host = env["PATCHWORK_PROXY_HOST"] -socks_proxy_uname = env["PATCHWORK_SOCKS_PROXY_UNAME"] -socks_proxy_ip = env["PATCHWORK_SOCKS_PROXY_IP"] -socks_proxy_port = int(env["PATCHWORK_SOCKS_PROXY_PORT"]) - -db_host = env["PATCHWORK_DB_HOST"] -db_user = env["PATCHWORK_DB_USER"] -db_password = env["PATCHWORK_DB_PASSWORD"] - -smtp_host = env["PATCHWORK_SMTP_HOST"] -smtp_port = int(env["PATCHWORK_SMTP_PORT"]) -user_email = env["PATCHWORK_USER_EMAIL"] -cc_email = env["PATCHWORK_CC_EMAIL"] -password_email = env["PATCHWORK_PASSWORD_EMAIL"] - -uid = int(env["PATCHWORK_UID"]) -gid = int(env["PATCHWORK_GID"]) - -patchwork_token = env["PATCHWORK_TOKEN"] -patchwork_host = env["PATCHWORK_HOST"] -project_root_path = env["PATCHWORK_PROJECT_ROOT_PATH"] - -def send_email_test(): - msg = ("Hello,\n\n" - "Thank you for submitting a patch to ffmpeg-devel.\n" - "An error occurred during an automated build/fate test. Please review the following link for more details:\n" - "%s\n\n" - "Thank you,\n" - "ffmpeg-devel") % "this is a test" - - msg_email = EmailMessage() - msg_email.set_content(msg) - msg_email["Subject"] = "hi" - msg_email["From"] = "Patchwork <%s>" % user_email - msg_email["To"] = user_email - - if use_proxy == 1: - print ("Using proxy") - proxy_setup_cmd = "ssh -f -D %d -p %d %s@%s sleep 10" % (socks_dynamic_port, socks_proxy_port, socks_proxy_uname, socks_proxy_ip) - ret = subprocess.run(proxy_setup_cmd, shell=True) - smtp = ProxySMTP(smtp_host, smtp_port, proxy_addr = proxy_host, proxy_port = socks_dynamic_port) - else: - smtp = smtplib.SMTP(smtp_host, smtp_port) - - smtp.starttls() - smtp.login(user_email, password_email) - smtp.sendmail(msg_email["From"], msg_email["To"], msg_email.as_string()) - smtp.quit() - if __name__ == "__main__": - send_email_test() + + if len(sys.argv) != 2: + print("Usage:\n $ python3 patchwork_runner.py config.yaml") + sys.exit(1) + + with open(sys.argv[1], 'r') as file: + config = yaml.safe_load(file) + + sample_patch = { + "series_id" : 1234, + "msg_id" : "", + "mbox" : "https://example.com/mbox/", + "author_email" : "andriy.gel...@gmail.com", + "subject_email" : "patchwork: this is a test email", + } + notify_by_email(None, sample_patch, config["patchwork"]["smtp"]) commit 1c64a6a32fc0372f95851c084cbda1eb5e630e13 Author: Shiyou Yin <yinshiyou...@loongson.cn> AuthorDate: Thu Feb 10 19:24:54 2022 +0800 Commit: Andriy Gelman <andriy.gel...@gmail.com> CommitDate: Sat Feb 26 17:51:30 2022 -0500 Fix TypeError during mydb.ping. TypeError: ping() got an unexpected keyword argument 'attemts'. diff --git a/mysql_helper.py b/mysql_helper.py index 3eb1f50..d53f42c 100644 --- a/mysql_helper.py +++ b/mysql_helper.py @@ -15,7 +15,7 @@ class SQLDatabase(): def get_cursor(self): try: - self.mydb.ping(reconnect=True, attemts=3, delay=5) + self.mydb.ping(reconnect=True, attempts=3, delay=5) except: self.mydb = self.init_db() ----------------------------------------------------------------------- Summary of changes: README | 33 ++++----- config_mysql.yaml | 49 +++++++++++++ config_sqlite.yaml | 41 +++++++++++ mysql_helper.py | 21 ++++-- patchwork_runner.py | 205 ++++++++++++++++++++++++---------------------------- sqlite_helper.py | 166 ++++++++++++++++++++++++++++++++++++++++++ test_send_email.py | 73 +++++-------------- 7 files changed, 396 insertions(+), 192 deletions(-) create mode 100644 config_mysql.yaml create mode 100644 config_sqlite.yaml create mode 100644 sqlite_helper.py hooks/post-receive -- UNNAMED PROJECT _______________________________________________ ffmpeg-cvslog mailing list ffmpeg-cvslog@ffmpeg.org https://ffmpeg.org/mailman/listinfo/ffmpeg-cvslog To unsubscribe, visit link above, or email ffmpeg-cvslog-requ...@ffmpeg.org with subject "unsubscribe".