diff --git a/master-docker-nonstandard/master.cfg b/master-docker-nonstandard/master.cfg index 85e1fbb4..0e415d46 100644 --- a/master-docker-nonstandard/master.cfg +++ b/master-docker-nonstandard/master.cfg @@ -14,7 +14,7 @@ import docker import os import sys -sys.path.insert(0, '/srv/buildbot/master') +sys.path.insert(0, "/srv/buildbot/master") sys.setrecursionlimit(10000) from common_factories import * @@ -29,8 +29,8 @@ c = BuildmasterConfig = {} # Load the slave, database passwords and 3rd-party tokens from an external private file, so # that the rest of the configuration can be public. -config = { "private": { } } -exec(open("../master-private.cfg").read(), config, { }) +config = {"private": {}} +exec(open("../master-private.cfg").read(), config, {}) ####### BUILDBOT SERVICES @@ -39,175 +39,375 @@ exec(open("../master-private.cfg").read(), config, { }) # has a variety to choose from, like IRC bots. -c['services'] = [] +c["services"] = [] context = util.Interpolate("buildbot/%(prop:buildername)s") -gs = reporters.GitHubStatusPush(token=config["private"]["gh_mdbci"]["access_token"], - context=context, - startDescription='Build started.', - endDescription='Build done.', - verbose=True, - builders=github_status_builders) -c['services'].append(gs) +gs = reporters.GitHubStatusPush( + token=config["private"]["gh_mdbci"]["access_token"], + context=context, + startDescription="Build started.", + endDescription="Build done.", + verbose=True, + builders=github_status_builders, +) +c["services"].append(gs) ####### PROJECT IDENTITY # the 'title' string will appear at the top of this buildbot installation's # home pages (linked to the 'titleURL'). -c['title'] = os.getenv('TITLE', default="MariaDB CI") -c['titleURL'] = os.getenv('TITLE_URL', default="https://github.com/MariaDB/server") +c["title"] = os.getenv("TITLE", default="MariaDB CI") +c["titleURL"] = os.getenv("TITLE_URL", default="https://github.com/MariaDB/server") # the 'buildbotURL' string should point to the location where the buildbot's # internal web server is visible. This typically uses the port number set in # the 'www' entry below, but with an externally-visible host name which the # buildbot cannot figure out without some help. -c['buildbotURL'] = os.getenv('BUILDMASTER_URL', default="https://buildbot.mariadb.org/") +c["buildbotURL"] = os.getenv("BUILDMASTER_URL", default="https://buildbot.mariadb.org/") # 'protocols' contains information about protocols which master will use for # communicating with workers. You must define at least 'port' option that workers # could connect to your master with this protocol. # 'port' must match the value configured into the workers (with their # --master option) -port = int(os.getenv('PORT', default="9992")) -c['protocols'] = {'pb': {'port': port}} +port = int(os.getenv("PORT", default="9992")) +c["protocols"] = {"pb": {"port": port}} ####### DB URL -c['db'] = { +c["db"] = { # This specifies what database buildbot uses to store its state. - 'db_url' : config["private"]["db_url"] + "db_url": config["private"]["db_url"] } -mtrDbPool = util.EqConnectionPool("MySQLdb", config["private"]["db_host"], config["private"]["db_user"], config["private"]["db_password"], config["private"]["db_mtr_db"]) +mtrDbPool = util.EqConnectionPool( + "MySQLdb", + config["private"]["db_host"], + config["private"]["db_user"], + config["private"]["db_password"], + config["private"]["db_mtr_db"], +) ####### Disable net usage reports from being sent to buildbot.net -c['buildbotNetUsageData'] = None +c["buildbotNetUsageData"] = None ####### SCHEDULERS # Configure the Schedulers, which decide how to react to incoming changes. -c['schedulers'] = getSchedulers() +c["schedulers"] = getSchedulers() ####### WORKERS # The 'workers' list defines the set of recognized workers. Each element is # a Worker object, specifying a unique worker name and password. The same # worker name and password must be configured on the worker. -c['workers'] = [] +c["workers"] = [] workers = {} -def addWorker(worker_name_prefix, worker_id, worker_type, dockerfile, jobs=5, save_packages=False, shm_size='15G'): + + +def addWorker( + worker_name_prefix, + worker_id, + worker_type, + dockerfile, + jobs=5, + save_packages=False, + shm_size="15G", +): name, instance = createWorker( - worker_name_prefix, - worker_id, - worker_type, - dockerfile, - jobs, - save_packages, - shm_size, - ) + worker_name_prefix, + worker_id, + worker_type, + dockerfile, + jobs, + save_packages, + shm_size, + ) if name[0] not in workers: workers[name[0]] = [name[1]] else: workers[name[0]].append(name[1]) - c['workers'].append(instance) + c["workers"].append(instance) + # Docker workers -fqdn = os.getenv('BUILDMASTER_WG_IP', default='100.64.100.1') +fqdn = os.getenv("BUILDMASTER_WG_IP", default="100.64.100.1") ## hz-bbw2-docker -c['workers'].append(worker.DockerLatentWorker("hz-bbw2-docker-eco-php-ubuntu-2004", None, - docker_host=config["private"]["docker_workers"]["hz-bbw2-docker"], - dockerfile=open("dockerfiles/eco-php-ubuntu-2004.dockerfile").read(), - followStartupLogs=False, - masterFQDN=fqdn, - hostconfig={'shm_size':'6G', 'ulimits': [docker.types.Ulimit(name='memlock', soft=51200000, hard=51200000)]}, - build_wait_timeout=0, - max_builds=1, - volumes=['/srv/buildbot/eco/code:/code', '/srv/buildbot/eco/build:/build'], - properties={ 'jobs':7, 'save_packages':False })) - -c['workers'].append(worker.DockerLatentWorker("hz-bbw2-docker-eco-dbdeployer-ubuntu-2004", None, - docker_host=config["private"]["docker_workers"]["hz-bbw2-docker"], - dockerfile=open("dockerfiles/eco-dbdeployer-ubuntu-2004.dockerfile").read(), - followStartupLogs=False, - masterFQDN=fqdn, - hostconfig={'shm_size':'6G', 'ulimits': [docker.types.Ulimit(name='memlock', soft=51200000, hard=51200000)]}, - build_wait_timeout=0, - max_builds=1, - volumes=['/srv/buildbot/eco/dbdeployer:/dbdeployer'], - properties={ 'jobs':7, 'save_packages':False })) - -c['workers'].append(worker.DockerLatentWorker("hz-bbw2-docker-eco-pymysql-python-3-9-slim-buster", None, - docker_host=config["private"]["docker_workers"]["hz-bbw2-docker"], - dockerfile=open("dockerfiles/eco-pymysql-python-3-9-slim-buster.dockerfile").read(), - followStartupLogs=False, - masterFQDN=fqdn, - hostconfig={'shm_size':'6G', 'ulimits': [docker.types.Ulimit(name='memlock', soft=51200000, hard=51200000)]}, - build_wait_timeout=0, - max_builds=1, - volumes=['/srv/buildbot/eco/pymysqlcode:/code'], - properties={ 'jobs':7, 'save_packages':False })) - -c['workers'].append(worker.DockerLatentWorker("hz-bbw2-docker-eco-mysqljs-nodejs15-buster", None, - docker_host=config["private"]["docker_workers"]["hz-bbw2-docker"], - dockerfile=open("dockerfiles/eco-mysqljs-nodejs15-buster.dockerfile").read(), - followStartupLogs=False, - masterFQDN=fqdn, - hostconfig={'shm_size':'6G', 'ulimits': [docker.types.Ulimit(name='memlock', soft=51200000, hard=51200000)]}, - build_wait_timeout=0, - max_builds=1, - volumes=['/srv/buildbot/eco/mysqljscode:/code'], - properties={ 'jobs':7, 'save_packages':False })) +c["workers"].append( + worker.DockerLatentWorker( + "hz-bbw2-docker-eco-php-ubuntu-2004", + None, + docker_host=config["private"]["docker_workers"]["hz-bbw2-docker"], + dockerfile=open("dockerfiles/eco-php-ubuntu-2004.dockerfile").read(), + followStartupLogs=False, + masterFQDN=fqdn, + hostconfig={ + "shm_size": "6G", + "ulimits": [ + docker.types.Ulimit(name="memlock", soft=51200000, hard=51200000) + ], + }, + build_wait_timeout=0, + max_builds=1, + volumes=["/srv/buildbot/eco/code:/code", "/srv/buildbot/eco/build:/build"], + properties={"jobs": 7, "save_packages": False}, + ) +) + +c["workers"].append( + worker.DockerLatentWorker( + "hz-bbw2-docker-eco-dbdeployer-ubuntu-2004", + None, + docker_host=config["private"]["docker_workers"]["hz-bbw2-docker"], + dockerfile=open("dockerfiles/eco-dbdeployer-ubuntu-2004.dockerfile").read(), + followStartupLogs=False, + masterFQDN=fqdn, + hostconfig={ + "shm_size": "6G", + "ulimits": [ + docker.types.Ulimit(name="memlock", soft=51200000, hard=51200000) + ], + }, + build_wait_timeout=0, + max_builds=1, + volumes=["/srv/buildbot/eco/dbdeployer:/dbdeployer"], + properties={"jobs": 7, "save_packages": False}, + ) +) + +c["workers"].append( + worker.DockerLatentWorker( + "hz-bbw2-docker-eco-pymysql-python-3-9-slim-buster", + None, + docker_host=config["private"]["docker_workers"]["hz-bbw2-docker"], + dockerfile=open( + "dockerfiles/eco-pymysql-python-3-9-slim-buster.dockerfile" + ).read(), + followStartupLogs=False, + masterFQDN=fqdn, + hostconfig={ + "shm_size": "6G", + "ulimits": [ + docker.types.Ulimit(name="memlock", soft=51200000, hard=51200000) + ], + }, + build_wait_timeout=0, + max_builds=1, + volumes=["/srv/buildbot/eco/pymysqlcode:/code"], + properties={"jobs": 7, "save_packages": False}, + ) +) + +c["workers"].append( + worker.DockerLatentWorker( + "hz-bbw2-docker-eco-mysqljs-nodejs15-buster", + None, + docker_host=config["private"]["docker_workers"]["hz-bbw2-docker"], + dockerfile=open("dockerfiles/eco-mysqljs-nodejs15-buster.dockerfile").read(), + followStartupLogs=False, + masterFQDN=fqdn, + hostconfig={ + "shm_size": "6G", + "ulimits": [ + docker.types.Ulimit(name="memlock", soft=51200000, hard=51200000) + ], + }, + build_wait_timeout=0, + max_builds=1, + volumes=["/srv/buildbot/eco/mysqljscode:/code"], + properties={"jobs": 7, "save_packages": False}, + ) +) ## bm-bbw1-docker -MASTER_PACKAGES = os.getenv('MASTER_PACKAGES_DIR', default='/mnt/autofs/master_packages') -c['workers'].append(worker.DockerLatentWorker("bm-bbw1-docker-ubuntu-2004", None, - docker_host=config["private"]["docker_workers"]["bm-bbw1-docker"], - image="quay.io/mariadb-foundation/bb-worker:ubuntu20.04", - followStartupLogs=False, - autopull=True, - alwaysPull=True, - masterFQDN=fqdn, - hostconfig={'shm_size':'20G', 'ulimits': [docker.types.Ulimit(name='memlock', soft=51200000, hard=51200000)]}, - max_builds=1, - volumes=['/srv/buildbot/ccache:/mnt/ccache', MASTER_PACKAGES+':/packages'], - properties={ 'jobs': 2, 'save_packages':False })) - -addWorker('hz-bbw', 6, '-bigtest-ubuntu-2004', "quay.io/mariadb-foundation/bb-worker:ubuntu20.04", jobs=20, save_packages=False) +MASTER_PACKAGES = os.getenv( + "MASTER_PACKAGES_DIR", default="/mnt/autofs/master_packages" +) +c["workers"].append( + worker.DockerLatentWorker( + "bm-bbw1-docker-ubuntu-2004", + None, + docker_host=config["private"]["docker_workers"]["bm-bbw1-docker"], + image="quay.io/mariadb-foundation/bb-worker:ubuntu20.04", + followStartupLogs=False, + autopull=True, + alwaysPull=True, + masterFQDN=fqdn, + hostconfig={ + "shm_size": "20G", + "ulimits": [ + docker.types.Ulimit(name="memlock", soft=51200000, hard=51200000) + ], + }, + max_builds=1, + volumes=["/srv/buildbot/ccache:/mnt/ccache", MASTER_PACKAGES + ":/packages"], + properties={"jobs": 2, "save_packages": False}, + ) +) + +addWorker( + "hz-bbw", + 6, + "-bigtest-ubuntu-2004", + "quay.io/mariadb-foundation/bb-worker:ubuntu20.04", + jobs=20, + save_packages=False, +) ## Add Power workers -for w_name in ['ppc64le-db-bbw']: - addWorker(w_name, 1, '-ubuntu-2004', "quay.io/mariadb-foundation/bb-worker:ubuntu20.04", jobs=7, save_packages=True, shm_size='20G') - addWorker(w_name, 1, '-ubuntu-2004-debug', "quay.io/mariadb-foundation/bb-worker:ubuntu20.04", jobs=30, save_packages=True) +for w_name in ["ppc64le-db-bbw"]: + addWorker( + w_name, + 1, + "-ubuntu-2004", + "quay.io/mariadb-foundation/bb-worker:ubuntu20.04", + jobs=7, + save_packages=True, + shm_size="20G", + ) + addWorker( + w_name, + 1, + "-ubuntu-2004-debug", + "quay.io/mariadb-foundation/bb-worker:ubuntu20.04", + jobs=30, + save_packages=True, + ) ## ns-x64-bbw-docker -for w_name in ['ns-x64-bbw']: +for w_name in ["ns-x64-bbw"]: end_range = 6 - for i in range(1,end_range): + for i in range(1, end_range): jobs = 7 - addWorker(w_name, i, '-aocc-debian-11', "quay.io/mariadb-foundation/bb-worker:debian11-aocc", jobs=jobs, save_packages=False) - addWorker(w_name, i, '-asan-ubuntu-2204', "quay.io/mariadb-foundation/bb-worker:ubuntu22.04", jobs=jobs, save_packages=False) - addWorker(w_name, i, '-icc-ubuntu-2204', "quay.io/mariadb-foundation/bb-worker:ubuntu22.04-icc", jobs=jobs, save_packages=False) - addWorker(w_name, i, '-ubuntu-2004', "quay.io/mariadb-foundation/bb-worker:ubuntu20.04", jobs=jobs, save_packages=True) + addWorker( + w_name, + i, + "-aocc-debian-11", + "quay.io/mariadb-foundation/bb-worker:debian11-aocc", + jobs=jobs, + save_packages=False, + ) + addWorker( + w_name, + i, + "-asan-ubuntu-2204", + "quay.io/mariadb-foundation/bb-worker:ubuntu22.04", + jobs=jobs, + save_packages=False, + ) + addWorker( + w_name, + i, + "-icc-ubuntu-2204", + "quay.io/mariadb-foundation/bb-worker:ubuntu22.04-icc", + jobs=jobs, + save_packages=False, + ) + addWorker( + w_name, + i, + "-ubuntu-2004", + "quay.io/mariadb-foundation/bb-worker:ubuntu20.04", + jobs=jobs, + save_packages=True, + ) ## Add Valgrind to more powerful machhines -addWorker('amd-bbw', 1, '-valgrind-ubuntu-2204', "quay.io/mariadb-foundation/bb-worker:ubuntu22.04-valgrind", jobs=20, save_packages=False) -addWorker('amd-bbw', 2, '-valgrind-ubuntu-2204', "quay.io/mariadb-foundation/bb-worker:ubuntu22.04-valgrind", jobs=20, save_packages=False) -addWorker('hz-bbw', 6, '-valgrind-ubuntu-2204', "quay.io/mariadb-foundation/bb-worker:ubuntu22.04-valgrind", jobs=20, save_packages=False) +addWorker( + "amd-bbw", + 1, + "-valgrind-ubuntu-2204", + "quay.io/mariadb-foundation/bb-worker:ubuntu22.04-valgrind", + jobs=20, + save_packages=False, +) +addWorker( + "amd-bbw", + 2, + "-valgrind-ubuntu-2204", + "quay.io/mariadb-foundation/bb-worker:ubuntu22.04-valgrind", + jobs=20, + save_packages=False, +) +addWorker( + "hz-bbw", + 6, + "-valgrind-ubuntu-2204", + "quay.io/mariadb-foundation/bb-worker:ubuntu22.04-valgrind", + jobs=20, + save_packages=False, +) -addWorker('hz-bbw', 1, '-msan-clang-debian-11', "quay.io/mariadb-foundation/bb-worker:debian11-msan", jobs=20, save_packages=False) -addWorker('hz-bbw', 4, '-msan-clang-debian-11', "quay.io/mariadb-foundation/bb-worker:debian11-msan", jobs=20, save_packages=False) -addWorker('hz-bbw', 5, '-msan-clang-debian-11', "quay.io/mariadb-foundation/bb-worker:debian11-msan", jobs=30, save_packages=False) +addWorker( + "hz-bbw", + 1, + "-msan-clang-debian-11", + "quay.io/mariadb-foundation/bb-worker:debian11-msan", + jobs=20, + save_packages=False, +) +addWorker( + "hz-bbw", + 4, + "-msan-clang-debian-11", + "quay.io/mariadb-foundation/bb-worker:debian11-msan", + jobs=20, + save_packages=False, +) +addWorker( + "hz-bbw", + 5, + "-msan-clang-debian-11", + "quay.io/mariadb-foundation/bb-worker:debian11-msan", + jobs=30, + save_packages=False, +) -addWorker('hz-bbw', 2, '-debian-12', "quay.io/mariadb-foundation/bb-worker:debian12", jobs=20, save_packages=False) -addWorker('hz-bbw', 5, '-debian-12', "quay.io/mariadb-foundation/bb-worker:debian12", jobs=20, save_packages=False) +addWorker( + "hz-bbw", + 2, + "-debian-12", + "quay.io/mariadb-foundation/bb-worker:debian12", + jobs=20, + save_packages=False, +) +addWorker( + "hz-bbw", + 5, + "-debian-12", + "quay.io/mariadb-foundation/bb-worker:debian12", + jobs=20, + save_packages=False, +) -addWorker('aarch64-bbw', 6, '-ubuntu-2004-debug', "quay.io/mariadb-foundation/bb-worker:ubuntu20.04", jobs=10, save_packages=True) -addWorker('aarch64-bbw', 6, '-debian-10-bintar', "quay.io/mariadb-foundation/bb-worker:debian10-bintar", jobs=10, save_packages=True) +addWorker( + "aarch64-bbw", + 6, + "-ubuntu-2004-debug", + "quay.io/mariadb-foundation/bb-worker:ubuntu20.04", + jobs=10, + save_packages=True, +) +addWorker( + "aarch64-bbw", + 6, + "-debian-10-bintar", + "quay.io/mariadb-foundation/bb-worker:debian10-bintar", + jobs=10, + save_packages=True, +) -addWorker('hz-bbw', 5, '-centos-7-bintar', "quay.io/mariadb-foundation/bb-worker:centos7-bintar", jobs=10, save_packages=True) +addWorker( + "hz-bbw", + 5, + "-centos-7-bintar", + "quay.io/mariadb-foundation/bb-worker:centos7-bintar", + jobs=10, + save_packages=True, +) ####### FACTORY CODE @@ -216,605 +416,1546 @@ f_rpm_autobake = getRpmAutobakeFactory(mtrDbPool) ## f_asan_ubsan_build f_asan_ubsan_build = util.BuildFactory() -f_asan_ubsan_build.addStep(steps.ShellCommand(name="Environment details", command=['bash', '-c', 'date -u && uname -a && ulimit -a'])) -f_asan_ubsan_build.addStep(steps.SetProperty(property="dockerfile", value=util.Interpolate("%(kw:url)s", url=dockerfile), description="dockerfile")) +f_asan_ubsan_build.addStep( + steps.ShellCommand( + name="Environment details", + command=["bash", "-c", "date -u && uname -a && ulimit -a"], + ) +) +f_asan_ubsan_build.addStep( + steps.SetProperty( + property="dockerfile", + value=util.Interpolate("%(kw:url)s", url=dockerfile), + description="dockerfile", + ) +) f_asan_ubsan_build.addStep(downloadSourceTarball()) -f_asan_ubsan_build.addStep(steps.ShellCommand(command=util.Interpolate("tar -xvzf /mnt/packages/%(prop:tarbuildnum)s_%(prop:mariadb_version)s.tar.gz --strip-components=1"))) -f_asan_ubsan_build.addStep(steps.ShellCommand(name="create html log file", command=['bash', '-c', util.Interpolate(getHTMLLogString(), jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))])) +f_asan_ubsan_build.addStep( + steps.ShellCommand( + command=util.Interpolate( + "tar -xvzf /mnt/packages/%(prop:tarbuildnum)s_%(prop:mariadb_version)s.tar.gz --strip-components=1" + ) + ) +) +f_asan_ubsan_build.addStep( + steps.ShellCommand( + name="create html log file", + command=[ + "bash", + "-c", + util.Interpolate( + getHTMLLogString(), + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + ) +) # build steps -f_asan_ubsan_build.addStep(steps.ShellCommand(command='echo "leak:libtasn1\nleak:libgnutls\nleak:libgmp" > mysql-test/lsan.supp', doStepIf=filterBranch)) -f_asan_ubsan_build.addStep(steps.ShellCommand(command='cat mysql-test/lsan.supp', doStepIf=filterBranch)) -f_asan_ubsan_build.addStep(steps.Compile(command= - ["sh", "-c", util.Interpolate('cmake . -DCMAKE_EXPORT_COMPILE_COMMANDS=ON -DCMAKE_BUILD_TYPE=Debug -DWITH_ASAN=YES -DWITH_UBSAN=YES -DPLUGIN_TOKUDB=NO -DPLUGIN_MROONGA=NO -DPLUGIN_OQGRAPH=NO -DPLUGIN_ROCKSDB=NO -DPLUGIN_CONNECT=NO -DWITH_SAFEMALLOC=OFF -DWITH_ZLIB=bundled -DWITH_SSL=bundled -DWITH_DBUG_TRACE=OFF -DWITH_SAFEMALLOC=OFF && make VERBOSE=1 -j%(kw:jobs)s package', jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))], haltOnFailure="true")) -f_asan_ubsan_build.addStep(steps.MTR( - logfiles={"mysqld*": "/buildbot/mysql_logs.html", "syslog": "/var/log/syslog"}, - command=["sh", "-c", util.Interpolate('cd mysql-test && MTR_FEEDBACK_PLUGIN=1 ASAN_OPTIONS="abort_on_error=1" LSAN_OPTIONS="print_suppressions=0,suppressions=`pwd`/lsan.supp" perl mysql-test-run.pl --verbose-restart --force --retry=3 --max-save-core=1 --max-save-datadir=10 --max-test-fail=20 --mem --parallel=$(expr %(kw:jobs)s \* 2)', jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))], - timeout=950, - haltOnFailure="true", - parallel=mtrJobsMultiplier, - dbpool=mtrDbPool, - autoCreateTables=True, - env=MTR_ENV, - )) -f_asan_ubsan_build.addStep(steps.ShellCommand(name="move mysqld log files", alwaysRun=True, command=['bash', '-c', util.Interpolate(moveMTRLogs(), jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))])) -f_asan_ubsan_build.addStep(steps.ShellCommand(name="create var archive", alwaysRun=True, command=['bash', '-c', util.Interpolate(createVar())], doStepIf=hasFailed)) -f_asan_ubsan_build.addStep(steps.DirectoryUpload(name="save mysqld log files", compress="bz2", alwaysRun=True, workersrc='/buildbot/logs/', masterdest=util.Interpolate('/srv/buildbot/packages/' + '%(prop:tarbuildnum)s' + '/logs/' + '%(prop:buildername)s' ))) -f_asan_ubsan_build.addStep(steps.ShellCommand(name="cleanup", command="rm -r * .* 2> /dev/null || true", alwaysRun=True)) +f_asan_ubsan_build.addStep( + steps.ShellCommand( + command='echo "leak:libtasn1\nleak:libgnutls\nleak:libgmp" > mysql-test/lsan.supp', + doStepIf=filterBranch, + ) +) +f_asan_ubsan_build.addStep( + steps.ShellCommand(command="cat mysql-test/lsan.supp", doStepIf=filterBranch) +) +f_asan_ubsan_build.addStep( + steps.Compile( + command=[ + "sh", + "-c", + util.Interpolate( + "cmake . -DCMAKE_EXPORT_COMPILE_COMMANDS=ON -DCMAKE_BUILD_TYPE=Debug -DWITH_ASAN=YES -DWITH_UBSAN=YES -DPLUGIN_TOKUDB=NO -DPLUGIN_MROONGA=NO -DPLUGIN_OQGRAPH=NO -DPLUGIN_ROCKSDB=NO -DPLUGIN_CONNECT=NO -DWITH_SAFEMALLOC=OFF -DWITH_ZLIB=bundled -DWITH_SSL=bundled -DWITH_DBUG_TRACE=OFF -DWITH_SAFEMALLOC=OFF && make VERBOSE=1 -j%(kw:jobs)s package", + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + haltOnFailure="true", + ) +) +f_asan_ubsan_build.addStep( + steps.MTR( + logfiles={"mysqld*": "/buildbot/mysql_logs.html", "syslog": "/var/log/syslog"}, + command=[ + "sh", + "-c", + util.Interpolate( + 'cd mysql-test && MTR_FEEDBACK_PLUGIN=1 ASAN_OPTIONS="abort_on_error=1" LSAN_OPTIONS="print_suppressions=0,suppressions=`pwd`/lsan.supp" perl mysql-test-run.pl --verbose-restart --force --retry=3 --max-save-core=1 --max-save-datadir=10 --max-test-fail=20 --mem --parallel=$(expr %(kw:jobs)s \* 2)', + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + timeout=950, + haltOnFailure="true", + parallel=mtrJobsMultiplier, + dbpool=mtrDbPool, + autoCreateTables=True, + env=MTR_ENV, + ) +) +f_asan_ubsan_build.addStep( + steps.ShellCommand( + name="move mysqld log files", + alwaysRun=True, + command=[ + "bash", + "-c", + util.Interpolate( + moveMTRLogs(), + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + ) +) +f_asan_ubsan_build.addStep( + steps.ShellCommand( + name="create var archive", + alwaysRun=True, + command=["bash", "-c", util.Interpolate(createVar())], + doStepIf=hasFailed, + ) +) +f_asan_ubsan_build.addStep( + steps.DirectoryUpload( + name="save mysqld log files", + compress="bz2", + alwaysRun=True, + workersrc="/buildbot/logs/", + masterdest=util.Interpolate( + "/srv/buildbot/packages/" + + "%(prop:tarbuildnum)s" + + "/logs/" + + "%(prop:buildername)s" + ), + ) +) +f_asan_ubsan_build.addStep( + steps.ShellCommand( + name="cleanup", command="rm -r * .* 2> /dev/null || true", alwaysRun=True + ) +) ## f_asan_build f_asan_build = util.BuildFactory() -f_asan_build.addStep(steps.ShellCommand(name="Environment details", command=['bash', '-c', 'date -u && uname -a && ulimit -a'])) -f_asan_build.addStep(steps.SetProperty(property="dockerfile", value=util.Interpolate("%(kw:url)s", url=dockerfile), description="dockerfile")) +f_asan_build.addStep( + steps.ShellCommand( + name="Environment details", + command=["bash", "-c", "date -u && uname -a && ulimit -a"], + ) +) +f_asan_build.addStep( + steps.SetProperty( + property="dockerfile", + value=util.Interpolate("%(kw:url)s", url=dockerfile), + description="dockerfile", + ) +) f_asan_build.addStep(downloadSourceTarball()) -f_asan_build.addStep(steps.ShellCommand(command=util.Interpolate("tar -xvzf /mnt/packages/%(prop:tarbuildnum)s_%(prop:mariadb_version)s.tar.gz --strip-components=1"))) -f_asan_build.addStep(steps.ShellCommand(name="create html log file", command=['bash', '-c', util.Interpolate(getHTMLLogString(), jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))])) +f_asan_build.addStep( + steps.ShellCommand( + command=util.Interpolate( + "tar -xvzf /mnt/packages/%(prop:tarbuildnum)s_%(prop:mariadb_version)s.tar.gz --strip-components=1" + ) + ) +) +f_asan_build.addStep( + steps.ShellCommand( + name="create html log file", + command=[ + "bash", + "-c", + util.Interpolate( + getHTMLLogString(), + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + ) +) # build steps -f_asan_build.addStep(steps.ShellCommand(command='echo "leak:libtasn1\nleak:libgnutls\nleak:libgmp" > mysql-test/lsan.supp', doStepIf=filterBranch)) -f_asan_build.addStep(steps.ShellCommand(command='cat mysql-test/lsan.supp', doStepIf=filterBranch)) -f_asan_build.addStep(steps.Compile(command= - ["sh", "-c", util.Interpolate('cmake . -DCMAKE_C_COMPILER=clang-14 -DCMAKE_CXX_COMPILER=clang++-14 -DCMAKE_C_FLAGS="-O2 -msse4.2 -Wno-unused-command-line-argument -fdebug-macro -Wno-inconsistent-missing-override" -DCMAKE_CXX_FLAGS="-O2 -msse4.2 -Wno-unused-command-line-argument -fdebug-macro -Wno-inconsistent-missing-override" -DCMAKE_EXPORT_COMPILE_COMMANDS=ON -DCMAKE_BUILD_TYPE=Debug -DWITH_ASAN=YES -DPLUGIN_TOKUDB=NO -DPLUGIN_MROONGA=NO -DPLUGIN_OQGRAPH=NO -DPLUGIN_ROCKSDB=NO -DPLUGIN_CONNECT=NO -DWITH_SAFEMALLOC=OFF -DWITH_ZLIB=bundled -DWITH_SSL=bundled -DWITH_DBUG_TRACE=OFF -DWITH_SAFEMALLOC=OFF && make VERBOSE=1 -j%(kw:jobs)s package', jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))], haltOnFailure="true")) -f_asan_build.addStep(steps.MTR( - logfiles={"mysqld*": "/buildbot/mysql_logs.html", "syslog": "/var/log/syslog"}, - command=["sh", "-c", util.Interpolate('cd mysql-test && MTR_FEEDBACK_PLUGIN=1 ASAN_OPTIONS="abort_on_error=1" LSAN_OPTIONS="print_suppressions=0,suppressions=`pwd`/lsan.supp" perl mysql-test-run.pl --verbose-restart --force --retry=3 --max-save-core=1 --max-save-datadir=10 --max-test-fail=20 --mem --parallel=$(expr %(kw:jobs)s \* 2)', jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))], - timeout=950, - haltOnFailure="true", - parallel=mtrJobsMultiplier, - dbpool=mtrDbPool, - autoCreateTables=True, - env=MTR_ENV, - )) -f_asan_build.addStep(steps.ShellCommand(name="move mysqld log files", alwaysRun=True, command=['bash', '-c', util.Interpolate(moveMTRLogs(), jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))])) -f_asan_build.addStep(steps.ShellCommand(name="create var archive", alwaysRun=True, command=['bash', '-c', util.Interpolate(createVar())], doStepIf=hasFailed)) -f_asan_build.addStep(steps.DirectoryUpload(name="save mysqld log files", compress="bz2", alwaysRun=True, workersrc='/buildbot/logs/', masterdest=util.Interpolate('/srv/buildbot/packages/' + '%(prop:tarbuildnum)s' + '/logs/' + '%(prop:buildername)s' ))) -f_asan_build.addStep(steps.ShellCommand(name="cleanup", command="rm -r * .* 2> /dev/null || true", alwaysRun=True)) +f_asan_build.addStep( + steps.ShellCommand( + command='echo "leak:libtasn1\nleak:libgnutls\nleak:libgmp" > mysql-test/lsan.supp', + doStepIf=filterBranch, + ) +) +f_asan_build.addStep( + steps.ShellCommand(command="cat mysql-test/lsan.supp", doStepIf=filterBranch) +) +f_asan_build.addStep( + steps.Compile( + command=[ + "sh", + "-c", + util.Interpolate( + 'cmake . -DCMAKE_C_COMPILER=clang-14 -DCMAKE_CXX_COMPILER=clang++-14 -DCMAKE_C_FLAGS="-O2 -msse4.2 -Wno-unused-command-line-argument -fdebug-macro -Wno-inconsistent-missing-override" -DCMAKE_CXX_FLAGS="-O2 -msse4.2 -Wno-unused-command-line-argument -fdebug-macro -Wno-inconsistent-missing-override" -DCMAKE_EXPORT_COMPILE_COMMANDS=ON -DCMAKE_BUILD_TYPE=Debug -DWITH_ASAN=YES -DPLUGIN_TOKUDB=NO -DPLUGIN_MROONGA=NO -DPLUGIN_OQGRAPH=NO -DPLUGIN_ROCKSDB=NO -DPLUGIN_CONNECT=NO -DWITH_SAFEMALLOC=OFF -DWITH_ZLIB=bundled -DWITH_SSL=bundled -DWITH_DBUG_TRACE=OFF -DWITH_SAFEMALLOC=OFF && make VERBOSE=1 -j%(kw:jobs)s package', + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + haltOnFailure="true", + ) +) +f_asan_build.addStep( + steps.MTR( + logfiles={"mysqld*": "/buildbot/mysql_logs.html", "syslog": "/var/log/syslog"}, + command=[ + "sh", + "-c", + util.Interpolate( + 'cd mysql-test && MTR_FEEDBACK_PLUGIN=1 ASAN_OPTIONS="abort_on_error=1" LSAN_OPTIONS="print_suppressions=0,suppressions=`pwd`/lsan.supp" perl mysql-test-run.pl --verbose-restart --force --retry=3 --max-save-core=1 --max-save-datadir=10 --max-test-fail=20 --mem --parallel=$(expr %(kw:jobs)s \* 2)', + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + timeout=950, + haltOnFailure="true", + parallel=mtrJobsMultiplier, + dbpool=mtrDbPool, + autoCreateTables=True, + env=MTR_ENV, + ) +) +f_asan_build.addStep( + steps.ShellCommand( + name="move mysqld log files", + alwaysRun=True, + command=[ + "bash", + "-c", + util.Interpolate( + moveMTRLogs(), + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + ) +) +f_asan_build.addStep( + steps.ShellCommand( + name="create var archive", + alwaysRun=True, + command=["bash", "-c", util.Interpolate(createVar())], + doStepIf=hasFailed, + ) +) +f_asan_build.addStep( + steps.DirectoryUpload( + name="save mysqld log files", + compress="bz2", + alwaysRun=True, + workersrc="/buildbot/logs/", + masterdest=util.Interpolate( + "/srv/buildbot/packages/" + + "%(prop:tarbuildnum)s" + + "/logs/" + + "%(prop:buildername)s" + ), + ) +) +f_asan_build.addStep( + steps.ShellCommand( + name="cleanup", command="rm -r * .* 2> /dev/null || true", alwaysRun=True + ) +) ## f_msan_build f_msan_build = util.BuildFactory() -f_msan_build.addStep(steps.ShellCommand(name="Environment details", command=['bash', '-c', 'date -u && uname -a && ulimit -a'])) -f_msan_build.addStep(steps.SetProperty(property="dockerfile", value=util.Interpolate("%(kw:url)s", url=dockerfile), description="dockerfile")) -f_msan_build.addStep(steps.ShellCommand(name="create html log file", command=['bash', '-c', util.Interpolate(getHTMLLogString(), jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))])) +f_msan_build.addStep( + steps.ShellCommand( + name="Environment details", + command=["bash", "-c", "date -u && uname -a && ulimit -a"], + ) +) +f_msan_build.addStep( + steps.SetProperty( + property="dockerfile", + value=util.Interpolate("%(kw:url)s", url=dockerfile), + description="dockerfile", + ) +) +f_msan_build.addStep( + steps.ShellCommand( + name="create html log file", + command=[ + "bash", + "-c", + util.Interpolate( + getHTMLLogString(), + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + ) +) f_msan_build.addStep(downloadSourceTarball()) -f_msan_build.addStep(steps.ShellCommand(command=util.Interpolate("tar -xvzf /mnt/packages/%(prop:tarbuildnum)s_%(prop:mariadb_version)s.tar.gz --strip-components=1"))) +f_msan_build.addStep( + steps.ShellCommand( + command=util.Interpolate( + "tar -xvzf /mnt/packages/%(prop:tarbuildnum)s_%(prop:mariadb_version)s.tar.gz --strip-components=1" + ) + ) +) # build steps -f_msan_build.addStep(steps.ShellCommand(command='ls /msan-libs')) -f_msan_build.addStep(steps.Compile(command= - ["bash", "-xc", util.Interpolate('cmake . -DCMAKE_C_COMPILER=%(kw:c_compiler)s -DCMAKE_CXX_COMPILER=%(kw:cxx_compiler)s -DCMAKE_C_FLAGS="-O2 -Wno-unused-command-line-argument -fdebug-macro" -DCMAKE_CXX_FLAGS="-stdlib=libc++ -O2 -Wno-unused-command-line-argument -fdebug-macro" -DWITH_EMBEDDED_SERVER=OFF -DWITH_UNIT_TESTS=OFF -DCMAKE_BUILD_TYPE=Debug -DWITH_INNODB_{BZIP2,LZ4,LZMA,LZO,SNAPPY}=OFF -DPLUGIN_{ARCHIVE,TOKUDB,MROONGA,OQGRAPH,ROCKSDB,CONNECT,SPIDER}=NO -DWITH_SAFEMALLOC=OFF -DWITH_{ZLIB,SSL,PCRE}=bundled -DHAVE_LIBAIO_H=0 -DCMAKE_DISABLE_FIND_PACKAGE_{URING,LIBAIO}=1 -DWITH_MSAN=ON -DWITH_DBUG_TRACE=OFF && make -j%(kw:jobs)s package', - jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'), - c_compiler=util.Property('c_compiler', default='clang'), - cxx_compiler=util.Property('cxx_compiler', default='clang++') - )], - haltOnFailure="true")) -f_msan_build.addStep(steps.MTR( - logfiles={"mysqld*": "/buildbot/mysql_logs.html"}, - command=["bash", "-xc", util.Interpolate('cd mysql-test && LD_LIBRARY_PATH=/msan-libs MSAN_OPTIONS=abort_on_error=1:poison_in_dtor=0 ./mtr --mem --big-test --force --retry=0 --skip-test=".*compression.*|rpl\.rpl_non_direct_row_mixing_engines|perfschema\.table_io_aggregate_hist_\du_\dt|perfschema\.transaction_nested_events|perfschema\.events_waits_current_MDEV-29091|perfschema\.memory_aggregate_no_a_no_u_no_h|main\.show_explain|main\.show_analyze_json" --max-test-fail=100 --parallel=$(expr %(kw:jobs)s \* 2)', jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))], - timeout=950, - haltOnFailure="true", - parallel=mtrJobsMultiplier, - dbpool=mtrDbPool, - autoCreateTables=True, - env=MTR_ENV, - )) -f_msan_build.addStep(steps.ShellCommand(name="move mysqld log files", alwaysRun=True, command=['bash', '-c', util.Interpolate(moveMTRLogs(), jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))])) -f_msan_build.addStep(steps.ShellCommand(name="create var archive", alwaysRun=True, command=['bash', '-c', util.Interpolate(createVar())], doStepIf=hasFailed)) -f_msan_build.addStep(steps.DirectoryUpload(name="save mysqld log files", compress="bz2", alwaysRun=True, workersrc='/buildbot/logs/', masterdest=util.Interpolate('/srv/buildbot/packages/' + '%(prop:tarbuildnum)s' + '/logs/' + '%(prop:buildername)s' ))) -f_msan_build.addStep(steps.ShellCommand(name="cleanup", command="rm -r * .* 2> /dev/null || true", alwaysRun=True)) +f_msan_build.addStep(steps.ShellCommand(command="ls /msan-libs")) +f_msan_build.addStep( + steps.Compile( + command=[ + "bash", + "-xc", + util.Interpolate( + 'cmake . -DCMAKE_C_COMPILER=%(kw:c_compiler)s -DCMAKE_CXX_COMPILER=%(kw:cxx_compiler)s -DCMAKE_C_FLAGS="-O2 -Wno-unused-command-line-argument -fdebug-macro" -DCMAKE_CXX_FLAGS="-stdlib=libc++ -O2 -Wno-unused-command-line-argument -fdebug-macro" -DWITH_EMBEDDED_SERVER=OFF -DWITH_UNIT_TESTS=OFF -DCMAKE_BUILD_TYPE=Debug -DWITH_INNODB_{BZIP2,LZ4,LZMA,LZO,SNAPPY}=OFF -DPLUGIN_{ARCHIVE,TOKUDB,MROONGA,OQGRAPH,ROCKSDB,CONNECT,SPIDER}=NO -DWITH_SAFEMALLOC=OFF -DWITH_{ZLIB,SSL,PCRE}=bundled -DHAVE_LIBAIO_H=0 -DCMAKE_DISABLE_FIND_PACKAGE_{URING,LIBAIO}=1 -DWITH_MSAN=ON -DWITH_DBUG_TRACE=OFF && make -j%(kw:jobs)s package', + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + c_compiler=util.Property("c_compiler", default="clang"), + cxx_compiler=util.Property("cxx_compiler", default="clang++"), + ), + ], + haltOnFailure="true", + ) +) +f_msan_build.addStep( + steps.MTR( + logfiles={"mysqld*": "/buildbot/mysql_logs.html"}, + command=[ + "bash", + "-xc", + util.Interpolate( + 'cd mysql-test && LD_LIBRARY_PATH=/msan-libs MSAN_OPTIONS=abort_on_error=1:poison_in_dtor=0 ./mtr --mem --big-test --force --retry=0 --skip-test=".*compression.*|rpl\.rpl_non_direct_row_mixing_engines|perfschema\.table_io_aggregate_hist_\du_\dt|perfschema\.transaction_nested_events|perfschema\.events_waits_current_MDEV-29091|perfschema\.memory_aggregate_no_a_no_u_no_h|main\.show_explain|main\.show_analyze_json" --max-test-fail=100 --parallel=$(expr %(kw:jobs)s \* 2)', + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + timeout=950, + haltOnFailure="true", + parallel=mtrJobsMultiplier, + dbpool=mtrDbPool, + autoCreateTables=True, + env=MTR_ENV, + ) +) +f_msan_build.addStep( + steps.ShellCommand( + name="move mysqld log files", + alwaysRun=True, + command=[ + "bash", + "-c", + util.Interpolate( + moveMTRLogs(), + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + ) +) +f_msan_build.addStep( + steps.ShellCommand( + name="create var archive", + alwaysRun=True, + command=["bash", "-c", util.Interpolate(createVar())], + doStepIf=hasFailed, + ) +) +f_msan_build.addStep( + steps.DirectoryUpload( + name="save mysqld log files", + compress="bz2", + alwaysRun=True, + workersrc="/buildbot/logs/", + masterdest=util.Interpolate( + "/srv/buildbot/packages/" + + "%(prop:tarbuildnum)s" + + "/logs/" + + "%(prop:buildername)s" + ), + ) +) +f_msan_build.addStep( + steps.ShellCommand( + name="cleanup", command="rm -r * .* 2> /dev/null || true", alwaysRun=True + ) +) ## f_valgrind_build f_valgrind_build = util.BuildFactory() -f_valgrind_build.addStep(steps.ShellCommand(name="Environment details", command=['bash', '-c', 'date -u && uname -a && ulimit -a'])) -f_valgrind_build.addStep(steps.SetProperty(property="dockerfile", value=util.Interpolate("%(kw:url)s", url=dockerfile), description="dockerfile")) -f_valgrind_build.addStep(steps.ShellCommand(name="create html log file", command=['bash', '-c', util.Interpolate(getHTMLLogString(), jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))])) +f_valgrind_build.addStep( + steps.ShellCommand( + name="Environment details", + command=["bash", "-c", "date -u && uname -a && ulimit -a"], + ) +) +f_valgrind_build.addStep( + steps.SetProperty( + property="dockerfile", + value=util.Interpolate("%(kw:url)s", url=dockerfile), + description="dockerfile", + ) +) +f_valgrind_build.addStep( + steps.ShellCommand( + name="create html log file", + command=[ + "bash", + "-c", + util.Interpolate( + getHTMLLogString(), + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + ) +) f_valgrind_build.addStep(downloadSourceTarball()) -f_valgrind_build.addStep(steps.ShellCommand(command=util.Interpolate("tar -xvzf /mnt/packages/%(prop:tarbuildnum)s_%(prop:mariadb_version)s.tar.gz --strip-components=1"))) +f_valgrind_build.addStep( + steps.ShellCommand( + command=util.Interpolate( + "tar -xvzf /mnt/packages/%(prop:tarbuildnum)s_%(prop:mariadb_version)s.tar.gz --strip-components=1" + ) + ) +) # build steps -f_valgrind_build.addStep(steps.Compile(command= - ["sh", "-c", util.Interpolate('cmake . -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DENABLE_ASSEMBLER=1 -DWITH_EXTRA_CHARSETS=complex -DENABLE_THREAD_SAFE_CLIENT=1 -DWITH_BIG_TABLES=1 -DWITH_PLUGIN_ARIA=1 -DWITH_ARIA_TMP_TABLES=1 -DWITH_JEMALLOC=NO -DCMAKE_BUILD_TYPE=Debug -DSECURITY_HARDENED=OFF -DWITH_VALGRIND=1 -DHAVE_LIBAIO_H=0 -DCMAKE_DISABLE_FIND_PACKAGE_URING=1 -DCMAKE_DISABLE_FIND_PACKAGE_LIBAIO=1 -DWITH_SSL=bundled -DWITH_MAX=AUTO -DWITH_EMBEDDED_SERVER=1 -DWITH_LIBEVENT=bundled -DPLUGIN_PLUGIN_FILE_KEY_MANAGEMENT=NO -DPLUGIN_TEST_SQL_DISCOVERY=DYNAMIC -DPLUGIN_TOKUDB=NO -DPLUGIN_ROCKSDB=NO -DPLUGIN_AUTH_GSSAPI=NO -DENABLE_LOCAL_INFILE=1 -DMYSQL_SERVER_SUFFIX=-valgrind-max -DWITH_DBUG_TRACE=OFF && make -j%(kw:jobs)s', jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))], haltOnFailure="true")) -f_valgrind_build.addStep(steps.MTR( - logfiles={"mysqld*": "/buildbot/mysql_logs.html"}, - command=["sh", "-c", util.Interpolate('cd mysql-test && perl mysql-test-run.pl --valgrind="--leak-check=summary --gen-suppressions=yes --num-callers=10" --skip-test="encryption\.*|^perfschema\.short_option_1$" --mysqld="--loose-innodb-purge-threads=1" --force --retry=0 --max-save-core=1 --max-save-datadir=10 --max-test-fail=20 --parallel=$(expr %(kw:jobs)s \* 2)', jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))], - timeout=950, - haltOnFailure="true", - parallel=mtrJobsMultiplier, - dbpool=mtrDbPool, - autoCreateTables=True, - env=MTR_ENV, - )) -f_valgrind_build.addStep(steps.ShellCommand(name="move mysqld log files", alwaysRun=True, command=['bash', '-c', util.Interpolate(moveMTRLogs(), jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))])) -f_valgrind_build.addStep(steps.ShellCommand(name="create var archive", alwaysRun=True, command=['bash', '-c', util.Interpolate(createVar())], doStepIf=hasFailed)) -f_valgrind_build.addStep(steps.DirectoryUpload(name="save mysqld log files", compress="bz2", alwaysRun=True, workersrc='/buildbot/logs/', masterdest=util.Interpolate('/srv/buildbot/packages/' + '%(prop:tarbuildnum)s' + '/logs/' + '%(prop:buildername)s' ))) -f_valgrind_build.addStep(steps.ShellCommand(name="cleanup", command="rm -r * .* 2> /dev/null || true", alwaysRun=True)) +f_valgrind_build.addStep( + steps.Compile( + command=[ + "sh", + "-c", + util.Interpolate( + "cmake . -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DENABLE_ASSEMBLER=1 -DWITH_EXTRA_CHARSETS=complex -DENABLE_THREAD_SAFE_CLIENT=1 -DWITH_BIG_TABLES=1 -DWITH_PLUGIN_ARIA=1 -DWITH_ARIA_TMP_TABLES=1 -DWITH_JEMALLOC=NO -DCMAKE_BUILD_TYPE=Debug -DSECURITY_HARDENED=OFF -DWITH_VALGRIND=1 -DHAVE_LIBAIO_H=0 -DCMAKE_DISABLE_FIND_PACKAGE_URING=1 -DCMAKE_DISABLE_FIND_PACKAGE_LIBAIO=1 -DWITH_SSL=bundled -DWITH_MAX=AUTO -DWITH_EMBEDDED_SERVER=1 -DWITH_LIBEVENT=bundled -DPLUGIN_PLUGIN_FILE_KEY_MANAGEMENT=NO -DPLUGIN_TEST_SQL_DISCOVERY=DYNAMIC -DPLUGIN_TOKUDB=NO -DPLUGIN_ROCKSDB=NO -DPLUGIN_AUTH_GSSAPI=NO -DENABLE_LOCAL_INFILE=1 -DMYSQL_SERVER_SUFFIX=-valgrind-max -DWITH_DBUG_TRACE=OFF && make -j%(kw:jobs)s", + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + haltOnFailure="true", + ) +) +f_valgrind_build.addStep( + steps.MTR( + logfiles={"mysqld*": "/buildbot/mysql_logs.html"}, + command=[ + "sh", + "-c", + util.Interpolate( + 'cd mysql-test && perl mysql-test-run.pl --valgrind="--leak-check=summary --gen-suppressions=yes --num-callers=10" --skip-test="encryption\.*|^perfschema\.short_option_1$" --mysqld="--loose-innodb-purge-threads=1" --force --retry=0 --max-save-core=1 --max-save-datadir=10 --max-test-fail=20 --parallel=$(expr %(kw:jobs)s \* 2)', + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + timeout=950, + haltOnFailure="true", + parallel=mtrJobsMultiplier, + dbpool=mtrDbPool, + autoCreateTables=True, + env=MTR_ENV, + ) +) +f_valgrind_build.addStep( + steps.ShellCommand( + name="move mysqld log files", + alwaysRun=True, + command=[ + "bash", + "-c", + util.Interpolate( + moveMTRLogs(), + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + ) +) +f_valgrind_build.addStep( + steps.ShellCommand( + name="create var archive", + alwaysRun=True, + command=["bash", "-c", util.Interpolate(createVar())], + doStepIf=hasFailed, + ) +) +f_valgrind_build.addStep( + steps.DirectoryUpload( + name="save mysqld log files", + compress="bz2", + alwaysRun=True, + workersrc="/buildbot/logs/", + masterdest=util.Interpolate( + "/srv/buildbot/packages/" + + "%(prop:tarbuildnum)s" + + "/logs/" + + "%(prop:buildername)s" + ), + ) +) +f_valgrind_build.addStep( + steps.ShellCommand( + name="cleanup", command="rm -r * .* 2> /dev/null || true", alwaysRun=True + ) +) ## f_big_test f_big_test = util.BuildFactory() -f_big_test.addStep(steps.ShellCommand(name="Environment details", command=['bash', '-c', 'date -u && uname -a && ulimit -a'])) -f_big_test.addStep(steps.SetProperty(property="dockerfile", value=util.Interpolate("%(kw:url)s", url=dockerfile), description="dockerfile")) -f_big_test.addStep(steps.ShellCommand(name="create html log file", command=['bash', '-c', util.Interpolate(getHTMLLogString(), jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))])) -# get the source tarball and extract it -f_big_test.addStep(steps.FileDownload(mastersrc=util.Interpolate("/srv/buildbot/packages/" + "%(prop:tarbuildnum)s" + "/" + "%(prop:mariadb_version)s" + ".tar.gz"), - workerdest=util.Interpolate("%(prop:mariadb_version)s" + ".tar.gz"))) -f_big_test.addStep(steps.ShellCommand(command=util.Interpolate("tar -xvzf " + "%(prop:mariadb_version)s" + ".tar.gz --strip-components=1"))) -# build steps -f_big_test.addStep(steps.Compile(command= - ["sh", "-c", util.Interpolate("export PATH=/usr/lib/ccache:/usr/lib64/ccache:$PATH && cmake . -DCMAKE_BUILD_TYPE=RelWithDebInfo -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DPLUGIN_ROCKSDB=NO -DPLUGIN_TOKUDB=NO -DPLUGIN_MROONGA=NO -DPLUGIN_SPIDER=NO -DPLUGIN_OQGRAPH=NO -DPLUGIN_SPHINX=NO && make -j%(kw:jobs)s VERBOSE=1 package", jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))], env={'CCACHE_DIR':'/mnt/ccache'})) -f_big_test.addStep(steps.MTR( - logfiles={"mysqld*": "/buildbot/mysql_logs.html"}, - command=["sh", "-c", util.Interpolate("cd mysql-test && exec perl mysql-test-run.pl --verbose-restart --force --retry=3 --max-save-core=1 --max-save-datadir=10 --max-test-fail=20 --big --mem --parallel=$(expr %(kw:jobs)s \* 2) --skip-test=archive.archive-big", jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))], - timeout=950, - dbpool=mtrDbPool, - parallel=mtrJobsMultiplier, - env=MTR_ENV, - )) -f_big_test.addStep(steps.ShellCommand(name="move mysqld log files", alwaysRun=True, command=['bash', '-c', util.Interpolate(moveMTRLogs(), jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))])) -f_big_test.addStep(steps.ShellCommand(name="create var archive", alwaysRun=True, command=['bash', '-c', util.Interpolate(createVar())], doStepIf=hasFailed)) -f_big_test.addStep(steps.DirectoryUpload(name="save mysqld log files", compress="bz2", alwaysRun=True, workersrc='/buildbot/logs/', masterdest=util.Interpolate('/srv/buildbot/packages/' + '%(prop:tarbuildnum)s' + '/logs/' + '%(prop:buildername)s' ))) -# create package and upload to master -f_big_test.addStep(steps.SetPropertyFromCommand(command="basename mariadb-*-linux-*.tar.gz", property="mariadb_binary")) -#f_big_test.addStep(steps.ShellCommand(name='save_packages', timeout=7200, haltOnFailure=True, command=util.Interpolate('mkdir -p ' + '/packages/' + '%(prop:tarbuildnum)s' + '/' + '%(prop:buildername)s'+ ' && sha256sum %(prop:mariadb_binary)s >> sha256sums.txt && cp ' + '%(prop:mariadb_binary)s sha256sums.txt' + ' /packages/' + '%(prop:tarbuildnum)s' + '/' + '%(prop:buildername)s' + '/' + ' && sync /packages/' + '%(prop:tarbuildnum)s'), doStepIf=savePackage)) -f_big_test.addStep(steps.ShellCommand(name="cleanup", command="rm -r * .* 2> /dev/null || true", alwaysRun=True)) - -## f_full_test -f_full_test = util.BuildFactory() -f_full_test.addStep(steps.ShellCommand(name="Environment details", command=['bash', '-c', 'date -u && uname -a && ulimit -a'])) -f_full_test.addStep(steps.SetProperty(property="dockerfile", value=util.Interpolate("%(kw:url)s", url=dockerfile), description="dockerfile")) -# get the source tarball and extract it -f_full_test.addStep(steps.FileDownload(mastersrc=util.Interpolate("/srv/buildbot/packages/" + "%(prop:tarbuildnum)s" + "/" + "%(prop:mariadb_version)s" + ".tar.gz"), - workerdest=util.Interpolate("%(prop:mariadb_version)s" + ".tar.gz"))) -f_full_test.addStep(steps.ShellCommand(command=util.Interpolate("tar -xvzf " + "%(prop:mariadb_version)s" + ".tar.gz --strip-components=1"))) -# build steps -f_full_test.addStep(steps.Compile(command= - ["sh", "-c", util.Interpolate("export PATH=/usr/lib/ccache:/usr/lib64/ccache:$PATH && cmake . -DBUILD_CONFIG=mysql_release -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DWITH_SSL=system -DWITH_JEMALLOC=auto -DWITH_EMBEDDED_SERVER=1 -DHAVE_EMBEDDED_PRIVILEGE_CONTROL=1 -DWITH_LIBARCHIVE=ON -Wno-dev && make -j%(kw:jobs)s VERBOSE=1 package", jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))], env={'CCACHE_DIR':'/mnt/ccache'})) -f_full_test.addStep(steps.MTR( - addLogs=True, - name="test emb", - command=["sh", "-c", util.Interpolate("cd mysql-test && MTR_FEEDBACK_PLUGIN=1 perl mysql-test-run.pl --verbose-restart --force --retry=3 --max-save-core=0 --max-save-datadir=10 --mem --embedded-server --parallel=$(expr %(kw:jobs)s \* 2)", jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))], - timeout=10800, - dbpool=mtrDbPool, - parallel=mtrJobsMultiplier, - env=MTR_ENV, - )) -f_full_test.addStep(steps.MTR( - addLogs=True, - name="test n", - command=["sh", "-c", util.Interpolate("cd mysql-test && MTR_FEEDBACK_PLUGIN=1 perl mysql-test-run.pl --verbose-restart --force --retry=3 --max-save-core=0 --max-save-datadir=10 --mem --parallel=$(expr %(kw:jobs)s \* 2)", jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))], - timeout=10800, - dbpool=mtrDbPool, - parallel=mtrJobsMultiplier, - env=MTR_ENV, - )) -f_full_test.addStep(steps.MTR( - addLogs=True, - name="test ps-protocol", - command=["sh", "-c", util.Interpolate("cd mysql-test && MTR_FEEDBACK_PLUGIN=1 perl mysql-test-run.pl --verbose-restart --force --retry=3 --max-save-core=0 --max-save-datadir=10 --mem --ps-protocol --parallel=$(expr %(kw:jobs)s \* 2)", jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))], - timeout=10800, - dbpool=mtrDbPool, - parallel=mtrJobsMultiplier, - env=MTR_ENV, - )) -f_full_test.addStep(steps.MTR( - addLogs=True, - name="test ps-embedded", - command=["sh", "-c", util.Interpolate("cd mysql-test && MTR_FEEDBACK_PLUGIN=1 perl mysql-test-run.pl --verbose-restart --force --retry=3 --max-save-core=0 --max-save-datadir=10 --ps --embedded --mem --parallel=$(expr %(kw:jobs)s \* 2)", jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))], - timeout=10800, - dbpool=mtrDbPool, - parallel=mtrJobsMultiplier, - env=MTR_ENV, - )) -f_full_test.addStep(steps.MTR( - addLogs=True, - name="test funcs_1,2,stress,jp with big", - command=["sh", "-c", util.Interpolate("cd mysql-test && MTR_FEEDBACK_PLUGIN=1 perl mysql-test-run.pl --verbose-restart --force --retry=3 --max-save-core=0 --max-save-datadir=10 --mem --suite=funcs_1,funcs_2,stress,jp --big --mysqld=--open-files-limit=0 --mysqld=--log-warnings=1 --parallel=$(expr %(kw:jobs)s \* 2)", jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))], - timeout=10800, - dbpool=mtrDbPool, - parallel=mtrJobsMultiplier, - env=MTR_ENV, - )) -f_full_test.addStep(steps.MTR( - addLogs=True, - name="test engines", - command=["sh", "-c", util.Interpolate("cd mysql-test && MTR_FEEDBACK_PLUGIN=1 perl mysql-test-run.pl --verbose-restart --force --retry=3 --max-save-core=0 --max-save-datadir=10 --mem --suite=spider,spider/bg,engines/funcs,engines/iuds --big --mysqld=--open-files-limit=0 --mysqld=--log-warnings=1 --parallel=$(expr %(kw:jobs)s \* 2)", jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))], - timeout=10800, - dbpool=mtrDbPool, - parallel=mtrJobsMultiplier, - env=MTR_ENV, - )) -f_full_test.addStep(steps.MTR( - addLogs=True, - name="test view-protocol", - command=["sh", "-c", util.Interpolate("cd mysql-test && MTR_FEEDBACK_PLUGIN=1 perl mysql-test-run.pl --view-protocol --suite=main --verbose-restart --force --retry=3 --max-save-core=0 --max-save-datadir=10 --parallel=$(expr %(kw:jobs)s \* 2)", jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))], - timeout=10800, - dbpool=mtrDbPool, - parallel=mtrJobsMultiplier, - env=MTR_ENV, - )) -f_full_test.addStep(steps.ShellCommand(name="cleanup", command="rm -r * .* 2> /dev/null || true", alwaysRun=True)) - -## f_without_server -f_without_server = util.BuildFactory() -f_without_server.addStep(steps.ShellCommand(name="Environment details", command=['bash', '-c', 'date -u && uname -a && ulimit -a'])) -f_without_server.addStep(steps.SetProperty(property="dockerfile", value=util.Interpolate("%(kw:url)s", url=dockerfile), description="dockerfile")) -f_without_server.addStep(steps.ShellCommand(command="ls -la")) -f_without_server.addStep(downloadSourceTarball()) -f_without_server.addStep(steps.ShellCommand(command="ls -la")) -f_without_server.addStep(steps.ShellCommand(command=util.Interpolate("tar -xvzf /mnt/packages/%(prop:tarbuildnum)s_%(prop:mariadb_version)s.tar.gz --strip-components=1"))) -f_without_server.addStep(steps.ShellCommand(command="ls -la")) -# build steps -f_without_server.addStep(steps.Compile(command= - ["sh", "-c", util.Interpolate("export PATH=/usr/lib/ccache:/usr/lib64/ccache:$PATH && cmake . -DCMAKE_BUILD_TYPE=RelWithDebInfo -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_C_COMPILER=%(kw:c_compiler)s -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER=%(kw:cxx_compiler)s -DWITHOUT_SERVER=1 && make -j%(kw:jobs)s package", jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'), c_compiler=util.Property('c_compiler', default='gcc'), cxx_compiler=util.Property('cxx_compiler', default='g++'))], env={'CCACHE_DIR':'/mnt/ccache'}, haltOnFailure="true")) -# create package and upload to master -f_without_server.addStep(steps.SetPropertyFromCommand(command="basename mariadb-*-linux-*.tar.gz", property="mariadb_binary")) -f_without_server.addStep(steps.ShellCommand(name='save_packages', timeout=7200, haltOnFailure=True, command=util.Interpolate('mkdir -p ' + '/packages/' + '%(prop:tarbuildnum)s' + '/' + '%(prop:buildername)s'+ ' && sha256sum %(prop:mariadb_binary)s >> sha256sums.txt && cp ' + '%(prop:mariadb_binary)s sha256sums.txt' + ' /packages/' + '%(prop:tarbuildnum)s' + '/' + '%(prop:buildername)s' + '/' + ' && sync /packages/' + '%(prop:tarbuildnum)s'), doStepIf=savePackage)) -f_without_server.addStep(steps.ShellCommand(name="cleanup", command="rm -r * .* 2> /dev/null || true", alwaysRun=True)) - -## f_eco_php -f_eco_php = util.BuildFactory() -f_eco_php.addStep(steps.ShellCommand( - name="fetch_install_script", - command=["sh", "-xc", "curl https://raw.githubusercontent.com/MariaDB/buildbot/main/dockerfiles/ecofiles/installdb.sh -o /buildbot/installdb.sh && chmod a+x /buildbot/installdb.sh"])) -f_eco_php.addStep(steps.ShellCommand( - name="fetch_test_script", - command=["sh", "-xc", "curl https://raw.githubusercontent.com/MariaDB/buildbot/main/dockerfiles/ecofiles/test-php.sh -o /buildbot/test-php.sh && chmod a+x /buildbot/test-php.sh"])) -f_eco_php.addStep( +f_big_test.addStep( steps.ShellCommand( - name="fetching and installing database", + name="Environment details", + command=["bash", "-c", "date -u && uname -a && ulimit -a"], + ) +) +f_big_test.addStep( + steps.SetProperty( + property="dockerfile", + value=util.Interpolate("%(kw:url)s", url=dockerfile), + description="dockerfile", + ) +) +f_big_test.addStep( + steps.ShellCommand( + name="create html log file", command=[ - "sh", - "-xc", - util.Interpolate(""" - "/buildbot/installdb.sh " """ + os.getenv('ARTIFACTS_URL', default='https://ci.mariadb.org') + """/%(prop:tarbuildnum)s/%(prop:parentbuildername)s/%(prop:mariadb_binary)s" --plugin-load-add=auth _pam --pam_use_cleartext_plugin" - """), + "bash", + "-c", + util.Interpolate( + getHTMLLogString(), + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), ], ) ) -f_eco_php.addStep(steps.ShellCommand( - name="test PHP-7.1", - command=["sh", "-xc", "/buildbot/test-php.sh PHP-7.1"])) -f_eco_php.addStep(steps.ShellCommand( - name="test PHP-8.0", - command=["sh", "-xc", "/buildbot/test-php.sh PHP-8.0"])) -f_eco_php.addStep(steps.ShellCommand( - name="test PHP-8.1", - command=["sh", "-xc", "/buildbot/test-php.sh PHP-8.1"])) -f_eco_php.addStep(steps.ShellCommand( - name="test master", - command=["sh", "-xc", "/buildbot/test-php.sh"])) - -## f_eco_dbdeployer -f_eco_dbdeployer = util.BuildFactory() -f_eco_dbdeployer.addStep(steps.ShellCommand( - name="fetch_test_script", - command=["sh", "-xc", "curl https://raw.githubusercontent.com/MariaDB/buildbot/main/dockerfiles/ecofiles/test-dbdeployer.sh -o /buildbot/test-dbdeployer.sh && chmod a+x /buildbot/test-dbdeployer.sh"])) -f_eco_dbdeployer.addStep(steps.ShellCommand( - name="download if needed latest dbdeployer", - command=["sh", "-xc", "/buildbot/test-dbdeployer.sh dbdeployerfetch"])) -f_eco_dbdeployer.addStep( +# get the source tarball and extract it +f_big_test.addStep( + steps.FileDownload( + mastersrc=util.Interpolate( + "/srv/buildbot/packages/" + + "%(prop:tarbuildnum)s" + + "/" + + "%(prop:mariadb_version)s" + + ".tar.gz" + ), + workerdest=util.Interpolate("%(prop:mariadb_version)s" + ".tar.gz"), + ) +) +f_big_test.addStep( steps.ShellCommand( - name="fetching mariadb tarball", + command=util.Interpolate( + "tar -xvzf " + "%(prop:mariadb_version)s" + ".tar.gz --strip-components=1" + ) + ) +) +# build steps +f_big_test.addStep( + steps.Compile( command=[ "sh", - "-xc", - util.Interpolate(""" - '/buildbot/test-dbdeployer.sh init " """ + os.getenv('ARTIFACTS_URL', default='https://ci.mariadb.org') + """/%(prop:tarbuildnum)s/%(prop:parentbuildername)s/%(prop:mariadb_binary)s"' - """), + "-c", + util.Interpolate( + "export PATH=/usr/lib/ccache:/usr/lib64/ccache:$PATH && cmake . -DCMAKE_BUILD_TYPE=RelWithDebInfo -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DPLUGIN_ROCKSDB=NO -DPLUGIN_TOKUDB=NO -DPLUGIN_MROONGA=NO -DPLUGIN_SPIDER=NO -DPLUGIN_OQGRAPH=NO -DPLUGIN_SPHINX=NO && make -j%(kw:jobs)s VERBOSE=1 package", + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), ], + env={"CCACHE_DIR": "/mnt/ccache"}, ) ) -f_eco_dbdeployer.addStep(steps.ShellCommand( - name="deploy single ma", - command=["sh", "-xc", util.Interpolate("/buildbot/test-dbdeployer.sh deploy single ma%(prop:mariadb_version)s")])) -f_eco_dbdeployer.addStep(steps.ShellCommand( - name="deploy replication ma", - command=["sh", "-xc", util.Interpolate("/buildbot/test-dbdeployer.sh deploy replication ma%(prop:mariadb_version)s")])) -f_eco_dbdeployer.addStep(steps.ShellCommand( - name="global test", - command=["sh", "-xc", "/buildbot/test-dbdeployer.sh global test"])) -f_eco_dbdeployer.addStep(steps.ShellCommand( - name="global replication", - command=["sh", "-xc", "/buildbot/test-dbdeployer.sh global test-replication"])) - -## f_eco_pymysql -f_eco_pymysql = util.BuildFactory() -f_eco_pymysql.addStep(steps.ShellCommand( - name="fetch_install_script", - command=["sh", "-xc", "curl https://raw.githubusercontent.com/MariaDB/buildbot/main/dockerfiles/ecofiles/installdb.sh -o /buildbot/installdb.sh && chmod a+x /buildbot/installdb.sh"])) -f_eco_pymysql.addStep(steps.ShellCommand( - name="fetch_test_script", - command=["sh", "-xc", "curl https://raw.githubusercontent.com/MariaDB/buildbot/main/dockerfiles/ecofiles/test-pymysql.sh -o /buildbot/test-pymysql.sh && chmod a+x /buildbot/test-pymysql.sh"])) -f_eco_dbdeployer.addStep( - steps.ShellCommand( - name="fetching and installing database", +f_big_test.addStep( + steps.MTR( + logfiles={"mysqld*": "/buildbot/mysql_logs.html"}, command=[ "sh", - "-xc", - util.Interpolate(""" - "/buildbot/installdb.sh " """ + os.getenv('ARTIFACTS_URL', default='https://ci.mariadb.org') + """/%(prop:tarbuildnum)s/%(prop:parentbuildername)s/%(prop:mariadb_binary)s" - """), + "-c", + util.Interpolate( + "cd mysql-test && exec perl mysql-test-run.pl --verbose-restart --force --retry=3 --max-save-core=1 --max-save-datadir=10 --max-test-fail=20 --big --mem --parallel=$(expr %(kw:jobs)s \* 2) --skip-test=archive.archive-big", + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), ], + timeout=950, + dbpool=mtrDbPool, + parallel=mtrJobsMultiplier, + env=MTR_ENV, ) ) -f_eco_pymysql.addStep(steps.ShellCommand( - name="test pymysql-main", - command=["sh", "-xc", "/buildbot/test-pymysql.sh"])) -f_eco_pymysql.addStep(steps.ShellCommand( - name="test pymysql-v0.7.11", - command=["sh", "-xc", "/buildbot/test-pymysql.sh v0.7.11"])) - -## f_eco_mysqljs -f_eco_mysqljs = util.BuildFactory() -f_eco_mysqljs.addStep(steps.ShellCommand( - name="fetch_install_script", - command=["sh", "-xc", "curl https://raw.githubusercontent.com/MariaDB/buildbot/main/dockerfiles/ecofiles/installdb.sh -o /buildbot/installdb.sh && chmod a+x /buildbot/installdb.sh"])) -f_eco_mysqljs.addStep(steps.ShellCommand( - name="fetch_test_script", - command=["sh", "-xc", "curl https://raw.githubusercontent.com/MariaDB/buildbot/main/dockerfiles/ecofiles/test-mysqljs.sh -o /buildbot/test-mysqljs.sh && chmod a+x /buildbot/test-mysqljs.sh"])) -f_eco_dbdeployer.addStep( +f_big_test.addStep( steps.ShellCommand( - name="fetching and installing database", + name="move mysqld log files", + alwaysRun=True, command=[ - "sh", - "-xc", - util.Interpolate(""" - "/buildbot/installdb.sh " """ + os.getenv('ARTIFACTS_URL', default='https://ci.mariadb.org') + """/%(prop:tarbuildnum)s/%(prop:parentbuildername)s/%(prop:mariadb_binary)s" - """), + "bash", + "-c", + util.Interpolate( + moveMTRLogs(), + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), ], ) ) -f_eco_mysqljs.addStep(steps.ShellCommand( - name="test mysqljs-master", - command=["sh", "-xc", "/buildbot/test-mysqljs.sh"])) -f_eco_mysqljs.addStep(steps.ShellCommand( - name="test mysqljs-v2.18.1", - command=["sh", "-xc", "/buildbot/test-mysqljs.sh v2.18.1"])) - -## f_bintar -f_bintar = util.BuildFactory() -f_bintar.addStep(steps.ShellCommand(name="Environment details", command=['bash', '-c', 'date -u && uname -a && ulimit -a'])) -f_bintar.addStep(steps.SetProperty(property="dockerfile", value=util.Interpolate("%(kw:url)s", url=dockerfile), description="dockerfile")) -f_bintar.addStep(downloadSourceTarball()) -f_bintar.addStep(steps.ShellCommand(command=util.Interpolate("tar -xvzf /mnt/packages/%(prop:tarbuildnum)s_%(prop:mariadb_version)s.tar.gz --strip-components=1"))) -f_bintar.addStep(steps.ShellCommand(name="create html log file", command=['bash', '-c', util.Interpolate(getHTMLLogString(), jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))])) -# build steps -f_bintar.addStep(steps.Compile(command=["sh", "-c", util.Interpolate( - 'cmake . -DWITH_READLINE=1 -DBUILD_CONFIG=mysql_release -DCMAKE_C_FLAGS="-static-libgcc -static-libstdc++ %(kw:gnutls_no_signal)s" -DCMAKE_CXX_FLAGS="-static-libgcc -static-libstdc++ %(kw:gnutls_no_signal)s" -DWITH_SSL=bundled -DPLATFORM=linux-systemd && make -j%(kw:jobs)s package', - perf_schema=util.Property('perf_schema', default='YES'), - build_type=util.Property('build_type', default='RelWithDebInfo'), - jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'), - c_compiler=util.Property('c_compiler', default='gcc'), - cxx_compiler=util.Property('cxx_compiler', default='g++'), - additional_args=util.Property('additional_args', default=''), - create_package=util.Property('create_package', default='package'), - gnutls_no_signal=util.Property('gnutls_no_signal', default=' ') - )], - env={ - 'CCACHE_DIR':'/mnt/ccache', - 'CMAKE_LIBRARY_PATH': '/scripts/local/lib/', - }, - haltOnFailure="true", -)) -f_bintar.addStep(steps.MTR( - logfiles={"mysqld*": "/buildbot/mysql_logs.html"}, - command=["sh", "-c", util.Interpolate("cd mysql-test && exec perl mysql-test-run.pl --verbose-restart --force --retry=3 --max-save-core=1 --max-save-datadir=10 --max-test-fail=20 --mem --parallel=$(expr %(kw:jobs)s \* 2) %(kw:mtr_additional_args)s", mtr_additional_args=util.Property('mtr_additional_args', default=''), jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))], - timeout=950, - haltOnFailure="true", - parallel=mtrJobsMultiplier, - dbpool=mtrDbPool, - autoCreateTables=True, - env=MTR_ENV, -)) -f_bintar.addStep(steps.ShellCommand(name="move mysqld log files", alwaysRun=True, command=['bash', '-c', util.Interpolate(moveMTRLogs(), jobs=util.Property('jobs', default='$(getconf _NPROCESSORS_ONLN)'))])) -f_bintar.addStep(steps.ShellCommand(name="create var archive", alwaysRun=True, command=['bash', '-c', util.Interpolate(createVar())], doStepIf=hasFailed)) -f_bintar.addStep(steps.DirectoryUpload(name="save log files", compress="bz2", alwaysRun=True, workersrc='/buildbot/logs/', masterdest=util.Interpolate('/srv/buildbot/packages/' + '%(prop:tarbuildnum)s' + '/logs/' + '%(prop:buildername)s' ))) -## trigger packages -f_bintar.addStep(steps.Trigger(schedulerNames=['s_packages'], waitForFinish=False, updateSourceStamp=False, alwaysRun=True, - set_properties={"parentbuildername": Property('buildername'), "tarbuildnum" : Property("tarbuildnum"), "mariadb_version" : Property("mariadb_version"), "master_branch" : Property("master_branch")}, doStepIf=hasAutobake)) -## trigger bigtest -f_bintar.addStep(steps.Trigger(schedulerNames=['s_bigtest'], waitForFinish=False, updateSourceStamp=False, - set_properties={"parentbuildername": Property('buildername'), "tarbuildnum" : Property("tarbuildnum"), "mariadb_version" : Property("mariadb_version"), "master_branch" : Property("master_branch")}, doStepIf=hasBigtest)) -# create package and upload to master -f_bintar.addStep(steps.SetPropertyFromCommand(command="basename mariadb-*-linux-*.tar.gz", property="mariadb_binary", doStepIf=savePackage)) -f_bintar.addStep(steps.ShellCommand(name='save_packages', timeout=7200, haltOnFailure=True, command=util.Interpolate('mkdir -p ' + '/packages/' + '%(prop:tarbuildnum)s' + '/' + '%(prop:buildername)s'+ ' && sha256sum %(prop:mariadb_binary)s >> sha256sums.txt && cp ' + '%(prop:mariadb_binary)s sha256sums.txt' + ' /packages/' + '%(prop:tarbuildnum)s' + '/' + '%(prop:buildername)s' + '/' + ' && sync /packages/' + '%(prop:tarbuildnum)s'), doStepIf=savePackage)) -f_bintar.addStep(steps.Trigger(name='eco', schedulerNames=['s_eco'], waitForFinish=False, updateSourceStamp=False, set_properties={"parentbuildername": Property("buildername"), "tarbuildnum" : Property("tarbuildnum"), "mariadb_binary": Property("mariadb_binary"), "mariadb_version" : Property("mariadb_version"), "master_branch" : Property("master_branch"), "parentbuildername": Property("buildername")}, doStepIf=lambda step: savePackage(step) and hasEco(step))) -f_bintar.addStep(steps.ShellCommand(name="cleanup", command="rm -r * .* 2> /dev/null || true", alwaysRun=True)) +f_big_test.addStep( + steps.ShellCommand( + name="create var archive", + alwaysRun=True, + command=["bash", "-c", util.Interpolate(createVar())], + doStepIf=hasFailed, + ) +) +f_big_test.addStep( + steps.DirectoryUpload( + name="save mysqld log files", + compress="bz2", + alwaysRun=True, + workersrc="/buildbot/logs/", + masterdest=util.Interpolate( + "/srv/buildbot/packages/" + + "%(prop:tarbuildnum)s" + + "/logs/" + + "%(prop:buildername)s" + ), + ) +) +# create package and upload to master +f_big_test.addStep( + steps.SetPropertyFromCommand( + command="basename mariadb-*-linux-*.tar.gz", property="mariadb_binary" + ) +) +# f_big_test.addStep(steps.ShellCommand(name='save_packages', timeout=7200, haltOnFailure=True, command=util.Interpolate('mkdir -p ' + '/packages/' + '%(prop:tarbuildnum)s' + '/' + '%(prop:buildername)s'+ ' && sha256sum %(prop:mariadb_binary)s >> sha256sums.txt && cp ' + '%(prop:mariadb_binary)s sha256sums.txt' + ' /packages/' + '%(prop:tarbuildnum)s' + '/' + '%(prop:buildername)s' + '/' + ' && sync /packages/' + '%(prop:tarbuildnum)s'), doStepIf=savePackage)) +f_big_test.addStep( + steps.ShellCommand( + name="cleanup", command="rm -r * .* 2> /dev/null || true", alwaysRun=True + ) +) + +## f_full_test +f_full_test = util.BuildFactory() +f_full_test.addStep( + steps.ShellCommand( + name="Environment details", + command=["bash", "-c", "date -u && uname -a && ulimit -a"], + ) +) +f_full_test.addStep( + steps.SetProperty( + property="dockerfile", + value=util.Interpolate("%(kw:url)s", url=dockerfile), + description="dockerfile", + ) +) +# get the source tarball and extract it +f_full_test.addStep( + steps.FileDownload( + mastersrc=util.Interpolate( + "/srv/buildbot/packages/" + + "%(prop:tarbuildnum)s" + + "/" + + "%(prop:mariadb_version)s" + + ".tar.gz" + ), + workerdest=util.Interpolate("%(prop:mariadb_version)s" + ".tar.gz"), + ) +) +f_full_test.addStep( + steps.ShellCommand( + command=util.Interpolate( + "tar -xvzf " + "%(prop:mariadb_version)s" + ".tar.gz --strip-components=1" + ) + ) +) +# build steps +f_full_test.addStep( + steps.Compile( + command=[ + "sh", + "-c", + util.Interpolate( + "export PATH=/usr/lib/ccache:/usr/lib64/ccache:$PATH && cmake . -DBUILD_CONFIG=mysql_release -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DWITH_SSL=system -DWITH_JEMALLOC=auto -DWITH_EMBEDDED_SERVER=1 -DHAVE_EMBEDDED_PRIVILEGE_CONTROL=1 -DWITH_LIBARCHIVE=ON -Wno-dev && make -j%(kw:jobs)s VERBOSE=1 package", + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + env={"CCACHE_DIR": "/mnt/ccache"}, + ) +) +f_full_test.addStep( + steps.MTR( + addLogs=True, + name="test emb", + command=[ + "sh", + "-c", + util.Interpolate( + "cd mysql-test && MTR_FEEDBACK_PLUGIN=1 perl mysql-test-run.pl --verbose-restart --force --retry=3 --max-save-core=0 --max-save-datadir=10 --mem --embedded-server --parallel=$(expr %(kw:jobs)s \* 2)", + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + timeout=10800, + dbpool=mtrDbPool, + parallel=mtrJobsMultiplier, + env=MTR_ENV, + ) +) +f_full_test.addStep( + steps.MTR( + addLogs=True, + name="test n", + command=[ + "sh", + "-c", + util.Interpolate( + "cd mysql-test && MTR_FEEDBACK_PLUGIN=1 perl mysql-test-run.pl --verbose-restart --force --retry=3 --max-save-core=0 --max-save-datadir=10 --mem --parallel=$(expr %(kw:jobs)s \* 2)", + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + timeout=10800, + dbpool=mtrDbPool, + parallel=mtrJobsMultiplier, + env=MTR_ENV, + ) +) +f_full_test.addStep( + steps.MTR( + addLogs=True, + name="test ps-protocol", + command=[ + "sh", + "-c", + util.Interpolate( + "cd mysql-test && MTR_FEEDBACK_PLUGIN=1 perl mysql-test-run.pl --verbose-restart --force --retry=3 --max-save-core=0 --max-save-datadir=10 --mem --ps-protocol --parallel=$(expr %(kw:jobs)s \* 2)", + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + timeout=10800, + dbpool=mtrDbPool, + parallel=mtrJobsMultiplier, + env=MTR_ENV, + ) +) +f_full_test.addStep( + steps.MTR( + addLogs=True, + name="test ps-embedded", + command=[ + "sh", + "-c", + util.Interpolate( + "cd mysql-test && MTR_FEEDBACK_PLUGIN=1 perl mysql-test-run.pl --verbose-restart --force --retry=3 --max-save-core=0 --max-save-datadir=10 --ps --embedded --mem --parallel=$(expr %(kw:jobs)s \* 2)", + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + timeout=10800, + dbpool=mtrDbPool, + parallel=mtrJobsMultiplier, + env=MTR_ENV, + ) +) +f_full_test.addStep( + steps.MTR( + addLogs=True, + name="test funcs_1,2,stress,jp with big", + command=[ + "sh", + "-c", + util.Interpolate( + "cd mysql-test && MTR_FEEDBACK_PLUGIN=1 perl mysql-test-run.pl --verbose-restart --force --retry=3 --max-save-core=0 --max-save-datadir=10 --mem --suite=funcs_1,funcs_2,stress,jp --big --mysqld=--open-files-limit=0 --mysqld=--log-warnings=1 --parallel=$(expr %(kw:jobs)s \* 2)", + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + timeout=10800, + dbpool=mtrDbPool, + parallel=mtrJobsMultiplier, + env=MTR_ENV, + ) +) +f_full_test.addStep( + steps.MTR( + addLogs=True, + name="test engines", + command=[ + "sh", + "-c", + util.Interpolate( + "cd mysql-test && MTR_FEEDBACK_PLUGIN=1 perl mysql-test-run.pl --verbose-restart --force --retry=3 --max-save-core=0 --max-save-datadir=10 --mem --suite=spider,spider/bg,engines/funcs,engines/iuds --big --mysqld=--open-files-limit=0 --mysqld=--log-warnings=1 --parallel=$(expr %(kw:jobs)s \* 2)", + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + timeout=10800, + dbpool=mtrDbPool, + parallel=mtrJobsMultiplier, + env=MTR_ENV, + ) +) +f_full_test.addStep( + steps.MTR( + addLogs=True, + name="test view-protocol", + command=[ + "sh", + "-c", + util.Interpolate( + "cd mysql-test && MTR_FEEDBACK_PLUGIN=1 perl mysql-test-run.pl --view-protocol --suite=main --verbose-restart --force --retry=3 --max-save-core=0 --max-save-datadir=10 --parallel=$(expr %(kw:jobs)s \* 2)", + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + timeout=10800, + dbpool=mtrDbPool, + parallel=mtrJobsMultiplier, + env=MTR_ENV, + ) +) +f_full_test.addStep( + steps.ShellCommand( + name="cleanup", command="rm -r * .* 2> /dev/null || true", alwaysRun=True + ) +) + +## f_without_server +f_without_server = util.BuildFactory() +f_without_server.addStep( + steps.ShellCommand( + name="Environment details", + command=["bash", "-c", "date -u && uname -a && ulimit -a"], + ) +) +f_without_server.addStep( + steps.SetProperty( + property="dockerfile", + value=util.Interpolate("%(kw:url)s", url=dockerfile), + description="dockerfile", + ) +) +f_without_server.addStep(steps.ShellCommand(command="ls -la")) +f_without_server.addStep(downloadSourceTarball()) +f_without_server.addStep(steps.ShellCommand(command="ls -la")) +f_without_server.addStep( + steps.ShellCommand( + command=util.Interpolate( + "tar -xvzf /mnt/packages/%(prop:tarbuildnum)s_%(prop:mariadb_version)s.tar.gz --strip-components=1" + ) + ) +) +f_without_server.addStep(steps.ShellCommand(command="ls -la")) +# build steps +f_without_server.addStep( + steps.Compile( + command=[ + "sh", + "-c", + util.Interpolate( + "export PATH=/usr/lib/ccache:/usr/lib64/ccache:$PATH && cmake . -DCMAKE_BUILD_TYPE=RelWithDebInfo -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_C_COMPILER=%(kw:c_compiler)s -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER=%(kw:cxx_compiler)s -DWITHOUT_SERVER=1 && make -j%(kw:jobs)s package", + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + c_compiler=util.Property("c_compiler", default="gcc"), + cxx_compiler=util.Property("cxx_compiler", default="g++"), + ), + ], + env={"CCACHE_DIR": "/mnt/ccache"}, + haltOnFailure="true", + ) +) +# create package and upload to master +f_without_server.addStep( + steps.SetPropertyFromCommand( + command="basename mariadb-*-linux-*.tar.gz", property="mariadb_binary" + ) +) +f_without_server.addStep( + steps.ShellCommand( + name="save_packages", + timeout=7200, + haltOnFailure=True, + command=util.Interpolate( + "mkdir -p " + + "/packages/" + + "%(prop:tarbuildnum)s" + + "/" + + "%(prop:buildername)s" + + " && sha256sum %(prop:mariadb_binary)s >> sha256sums.txt && cp " + + "%(prop:mariadb_binary)s sha256sums.txt" + + " /packages/" + + "%(prop:tarbuildnum)s" + + "/" + + "%(prop:buildername)s" + + "/" + + " && sync /packages/" + + "%(prop:tarbuildnum)s" + ), + doStepIf=savePackage, + ) +) +f_without_server.addStep( + steps.ShellCommand( + name="cleanup", command="rm -r * .* 2> /dev/null || true", alwaysRun=True + ) +) + +## f_eco_php +f_eco_php = util.BuildFactory() +f_eco_php.addStep( + steps.ShellCommand( + name="fetch_install_script", + command=[ + "sh", + "-xc", + "curl https://raw.githubusercontent.com/MariaDB/buildbot/main/dockerfiles/ecofiles/installdb.sh -o /buildbot/installdb.sh && chmod a+x /buildbot/installdb.sh", + ], + ) +) +f_eco_php.addStep( + steps.ShellCommand( + name="fetch_test_script", + command=[ + "sh", + "-xc", + "curl https://raw.githubusercontent.com/MariaDB/buildbot/main/dockerfiles/ecofiles/test-php.sh -o /buildbot/test-php.sh && chmod a+x /buildbot/test-php.sh", + ], + ) +) +f_eco_php.addStep( + steps.ShellCommand( + name="fetching and installing database", + command=[ + "sh", + "-xc", + util.Interpolate( + """ + "/buildbot/installdb.sh " """ + + os.getenv("ARTIFACTS_URL", default="https://ci.mariadb.org") + + """/%(prop:tarbuildnum)s/%(prop:parentbuildername)s/%(prop:mariadb_binary)s" --plugin-load-add=auth _pam --pam_use_cleartext_plugin" + """ + ), + ], + ) +) +f_eco_php.addStep( + steps.ShellCommand( + name="test PHP-7.1", command=["sh", "-xc", "/buildbot/test-php.sh PHP-7.1"] + ) +) +f_eco_php.addStep( + steps.ShellCommand( + name="test PHP-8.0", command=["sh", "-xc", "/buildbot/test-php.sh PHP-8.0"] + ) +) +f_eco_php.addStep( + steps.ShellCommand( + name="test PHP-8.1", command=["sh", "-xc", "/buildbot/test-php.sh PHP-8.1"] + ) +) +f_eco_php.addStep( + steps.ShellCommand( + name="test master", command=["sh", "-xc", "/buildbot/test-php.sh"] + ) +) + +## f_eco_dbdeployer +f_eco_dbdeployer = util.BuildFactory() +f_eco_dbdeployer.addStep( + steps.ShellCommand( + name="fetch_test_script", + command=[ + "sh", + "-xc", + "curl https://raw.githubusercontent.com/MariaDB/buildbot/main/dockerfiles/ecofiles/test-dbdeployer.sh -o /buildbot/test-dbdeployer.sh && chmod a+x /buildbot/test-dbdeployer.sh", + ], + ) +) +f_eco_dbdeployer.addStep( + steps.ShellCommand( + name="download if needed latest dbdeployer", + command=["sh", "-xc", "/buildbot/test-dbdeployer.sh dbdeployerfetch"], + ) +) +f_eco_dbdeployer.addStep( + steps.ShellCommand( + name="fetching mariadb tarball", + command=[ + "sh", + "-xc", + util.Interpolate( + """ + '/buildbot/test-dbdeployer.sh init " """ + + os.getenv("ARTIFACTS_URL", default="https://ci.mariadb.org") + + """/%(prop:tarbuildnum)s/%(prop:parentbuildername)s/%(prop:mariadb_binary)s"' + """ + ), + ], + ) +) +f_eco_dbdeployer.addStep( + steps.ShellCommand( + name="deploy single ma", + command=[ + "sh", + "-xc", + util.Interpolate( + "/buildbot/test-dbdeployer.sh deploy single ma%(prop:mariadb_version)s" + ), + ], + ) +) +f_eco_dbdeployer.addStep( + steps.ShellCommand( + name="deploy replication ma", + command=[ + "sh", + "-xc", + util.Interpolate( + "/buildbot/test-dbdeployer.sh deploy replication ma%(prop:mariadb_version)s" + ), + ], + ) +) +f_eco_dbdeployer.addStep( + steps.ShellCommand( + name="global test", + command=["sh", "-xc", "/buildbot/test-dbdeployer.sh global test"], + ) +) +f_eco_dbdeployer.addStep( + steps.ShellCommand( + name="global replication", + command=["sh", "-xc", "/buildbot/test-dbdeployer.sh global test-replication"], + ) +) + +## f_eco_pymysql +f_eco_pymysql = util.BuildFactory() +f_eco_pymysql.addStep( + steps.ShellCommand( + name="fetch_install_script", + command=[ + "sh", + "-xc", + "curl https://raw.githubusercontent.com/MariaDB/buildbot/main/dockerfiles/ecofiles/installdb.sh -o /buildbot/installdb.sh && chmod a+x /buildbot/installdb.sh", + ], + ) +) +f_eco_pymysql.addStep( + steps.ShellCommand( + name="fetch_test_script", + command=[ + "sh", + "-xc", + "curl https://raw.githubusercontent.com/MariaDB/buildbot/main/dockerfiles/ecofiles/test-pymysql.sh -o /buildbot/test-pymysql.sh && chmod a+x /buildbot/test-pymysql.sh", + ], + ) +) +f_eco_dbdeployer.addStep( + steps.ShellCommand( + name="fetching and installing database", + command=[ + "sh", + "-xc", + util.Interpolate( + """ + "/buildbot/installdb.sh " """ + + os.getenv("ARTIFACTS_URL", default="https://ci.mariadb.org") + + """/%(prop:tarbuildnum)s/%(prop:parentbuildername)s/%(prop:mariadb_binary)s" + """ + ), + ], + ) +) +f_eco_pymysql.addStep( + steps.ShellCommand( + name="test pymysql-main", command=["sh", "-xc", "/buildbot/test-pymysql.sh"] + ) +) +f_eco_pymysql.addStep( + steps.ShellCommand( + name="test pymysql-v0.7.11", + command=["sh", "-xc", "/buildbot/test-pymysql.sh v0.7.11"], + ) +) + +## f_eco_mysqljs +f_eco_mysqljs = util.BuildFactory() +f_eco_mysqljs.addStep( + steps.ShellCommand( + name="fetch_install_script", + command=[ + "sh", + "-xc", + "curl https://raw.githubusercontent.com/MariaDB/buildbot/main/dockerfiles/ecofiles/installdb.sh -o /buildbot/installdb.sh && chmod a+x /buildbot/installdb.sh", + ], + ) +) +f_eco_mysqljs.addStep( + steps.ShellCommand( + name="fetch_test_script", + command=[ + "sh", + "-xc", + "curl https://raw.githubusercontent.com/MariaDB/buildbot/main/dockerfiles/ecofiles/test-mysqljs.sh -o /buildbot/test-mysqljs.sh && chmod a+x /buildbot/test-mysqljs.sh", + ], + ) +) +f_eco_dbdeployer.addStep( + steps.ShellCommand( + name="fetching and installing database", + command=[ + "sh", + "-xc", + util.Interpolate( + """ + "/buildbot/installdb.sh " """ + + os.getenv("ARTIFACTS_URL", default="https://ci.mariadb.org") + + """/%(prop:tarbuildnum)s/%(prop:parentbuildername)s/%(prop:mariadb_binary)s" + """ + ), + ], + ) +) +f_eco_mysqljs.addStep( + steps.ShellCommand( + name="test mysqljs-master", command=["sh", "-xc", "/buildbot/test-mysqljs.sh"] + ) +) +f_eco_mysqljs.addStep( + steps.ShellCommand( + name="test mysqljs-v2.18.1", + command=["sh", "-xc", "/buildbot/test-mysqljs.sh v2.18.1"], + ) +) + +## f_bintar +f_bintar = util.BuildFactory() +f_bintar.addStep( + steps.ShellCommand( + name="Environment details", + command=["bash", "-c", "date -u && uname -a && ulimit -a"], + ) +) +f_bintar.addStep( + steps.SetProperty( + property="dockerfile", + value=util.Interpolate("%(kw:url)s", url=dockerfile), + description="dockerfile", + ) +) +f_bintar.addStep(downloadSourceTarball()) +f_bintar.addStep( + steps.ShellCommand( + command=util.Interpolate( + "tar -xvzf /mnt/packages/%(prop:tarbuildnum)s_%(prop:mariadb_version)s.tar.gz --strip-components=1" + ) + ) +) +f_bintar.addStep( + steps.ShellCommand( + name="create html log file", + command=[ + "bash", + "-c", + util.Interpolate( + getHTMLLogString(), + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + ) +) +# build steps +f_bintar.addStep( + steps.Compile( + command=[ + "sh", + "-c", + util.Interpolate( + 'cmake . -DWITH_READLINE=1 -DBUILD_CONFIG=mysql_release -DCMAKE_C_FLAGS="-static-libgcc -static-libstdc++ %(kw:gnutls_no_signal)s" -DCMAKE_CXX_FLAGS="-static-libgcc -static-libstdc++ %(kw:gnutls_no_signal)s" -DWITH_SSL=bundled -DPLATFORM=linux-systemd && make -j%(kw:jobs)s package', + perf_schema=util.Property("perf_schema", default="YES"), + build_type=util.Property("build_type", default="RelWithDebInfo"), + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + c_compiler=util.Property("c_compiler", default="gcc"), + cxx_compiler=util.Property("cxx_compiler", default="g++"), + additional_args=util.Property("additional_args", default=""), + create_package=util.Property("create_package", default="package"), + gnutls_no_signal=util.Property("gnutls_no_signal", default=" "), + ), + ], + env={ + "CCACHE_DIR": "/mnt/ccache", + "CMAKE_LIBRARY_PATH": "/scripts/local/lib/", + }, + haltOnFailure="true", + ) +) +f_bintar.addStep( + steps.MTR( + logfiles={"mysqld*": "/buildbot/mysql_logs.html"}, + command=[ + "sh", + "-c", + util.Interpolate( + "cd mysql-test && exec perl mysql-test-run.pl --verbose-restart --force --retry=3 --max-save-core=1 --max-save-datadir=10 --max-test-fail=20 --mem --parallel=$(expr %(kw:jobs)s \* 2) %(kw:mtr_additional_args)s", + mtr_additional_args=util.Property("mtr_additional_args", default=""), + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + timeout=950, + haltOnFailure="true", + parallel=mtrJobsMultiplier, + dbpool=mtrDbPool, + autoCreateTables=True, + env=MTR_ENV, + ) +) +f_bintar.addStep( + steps.ShellCommand( + name="move mysqld log files", + alwaysRun=True, + command=[ + "bash", + "-c", + util.Interpolate( + moveMTRLogs(), + jobs=util.Property("jobs", default="$(getconf _NPROCESSORS_ONLN)"), + ), + ], + ) +) +f_bintar.addStep( + steps.ShellCommand( + name="create var archive", + alwaysRun=True, + command=["bash", "-c", util.Interpolate(createVar())], + doStepIf=hasFailed, + ) +) +f_bintar.addStep( + steps.DirectoryUpload( + name="save log files", + compress="bz2", + alwaysRun=True, + workersrc="/buildbot/logs/", + masterdest=util.Interpolate( + "/srv/buildbot/packages/" + + "%(prop:tarbuildnum)s" + + "/logs/" + + "%(prop:buildername)s" + ), + ) +) +## trigger packages +f_bintar.addStep( + steps.Trigger( + schedulerNames=["s_packages"], + waitForFinish=False, + updateSourceStamp=False, + alwaysRun=True, + set_properties={ + "parentbuildername": Property("buildername"), + "tarbuildnum": Property("tarbuildnum"), + "mariadb_version": Property("mariadb_version"), + "master_branch": Property("master_branch"), + }, + doStepIf=hasAutobake, + ) +) +## trigger bigtest +f_bintar.addStep( + steps.Trigger( + schedulerNames=["s_bigtest"], + waitForFinish=False, + updateSourceStamp=False, + set_properties={ + "parentbuildername": Property("buildername"), + "tarbuildnum": Property("tarbuildnum"), + "mariadb_version": Property("mariadb_version"), + "master_branch": Property("master_branch"), + }, + doStepIf=hasBigtest, + ) +) +# create package and upload to master +f_bintar.addStep( + steps.SetPropertyFromCommand( + command="basename mariadb-*-linux-*.tar.gz", + property="mariadb_binary", + doStepIf=savePackage, + ) +) +f_bintar.addStep( + steps.ShellCommand( + name="save_packages", + timeout=7200, + haltOnFailure=True, + command=util.Interpolate( + "mkdir -p " + + "/packages/" + + "%(prop:tarbuildnum)s" + + "/" + + "%(prop:buildername)s" + + " && sha256sum %(prop:mariadb_binary)s >> sha256sums.txt && cp " + + "%(prop:mariadb_binary)s sha256sums.txt" + + " /packages/" + + "%(prop:tarbuildnum)s" + + "/" + + "%(prop:buildername)s" + + "/" + + " && sync /packages/" + + "%(prop:tarbuildnum)s" + ), + doStepIf=savePackage, + ) +) +f_bintar.addStep( + steps.Trigger( + name="eco", + schedulerNames=["s_eco"], + waitForFinish=False, + updateSourceStamp=False, + set_properties={ + "parentbuildername": Property("buildername"), + "tarbuildnum": Property("tarbuildnum"), + "mariadb_binary": Property("mariadb_binary"), + "mariadb_version": Property("mariadb_version"), + "master_branch": Property("master_branch"), + "parentbuildername": Property("buildername"), + }, + doStepIf=lambda step: savePackage(step) and hasEco(step), + ) +) +f_bintar.addStep( + steps.ShellCommand( + name="cleanup", command="rm -r * .* 2> /dev/null || true", alwaysRun=True + ) +) ####### BUILDERS LIST -c['builders'] = [] - -c['builders'].append( - util.BuilderConfig(name="amd64-debian-11-aocc", - workernames=workers["ns-x64-bbw-docker-aocc-debian-11"], - tags=["Ubuntu", "quick", "aocc"], - collapseRequests=True, - nextBuild=nextBuild, - canStartBuild=canStartBuild, - locks=getLocks, - properties={'c_compiler': 'clang', 'cxx_compiler': 'clang++', 'additional_args': '-DCMAKE_C_FLAGS=-Wno-inconsistent-missing-override -DCMAKE_CXX_FLAGS=-Wno-inconsistent-missing-override'}, - # TODO find a better way to deal with the env vars - env={ - "PATH": "/opt/AMD/aocc-compiler-3.2.0/bin:/opt/AMD/aocc-compiler-3.2.0/share/opt-viewer:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin", - "LIBRARY_PATH": "/opt/AMD/aocc-compiler-3.2.0/lib:/opt/AMD/aocc-compiler-3.2.0/lib32:/usr/lib/x86_64-linux-gnu:/usr/lib64:/usr/lib32:/usr/lib:", - "LD_LIBRARY_PATH": "/opt/AMD/aocc-compiler-3.2.0/ompd:/opt/AMD/aocc-compiler-3.2.0/lib:/opt/AMD/aocc-compiler-3.2.0/lib32:/usr/lib/x86_64-linux-gnu:/usr/lib64:/usr/lib32:/usr/lib:", - "C_INCLUDE_PATH": ":/opt/AMD/aocc-compiler-3.2.0/include", - "CPLUS_INCLUDE_PATH": ":/opt/AMD/aocc-compiler-3.2.0/include", - }, - factory=f_quick_build)) - -c['builders'].append( - util.BuilderConfig(name="amd64-ubuntu-2204-icc", - workernames=workers["ns-x64-bbw-docker-icc-ubuntu-2204"], - tags=["Ubuntu", "quick", "icc", "icpc"], - collapseRequests=True, - nextBuild=nextBuild, - canStartBuild=canStartBuild, - locks=getLocks, - properties={'c_compiler': 'icc', 'cxx_compiler': 'icpc'}, - factory=f_quick_build)) - -c['builders'].append( - util.BuilderConfig(name="amd64-ubuntu-2004-eco-php", - workernames=["hz-bbw2-docker-eco-php-ubuntu-2004"], - tags=["Ubuntu", "ecosystem", "PHP"], - collapseRequests=True, - nextBuild=nextBuild, - canStartBuild=canStartBuild, - factory=f_eco_php)) - -c['builders'].append( - util.BuilderConfig(name="amd64-ubuntu-2004-eco-dbdeployer", - workernames=["hz-bbw2-docker-eco-dbdeployer-ubuntu-2004"], - tags=["Ubuntu", "ecosystem", "dbdeployer"], - collapseRequests=True, - nextBuild=nextBuild, - canStartBuild=canStartBuild, - factory=f_eco_dbdeployer)) - -c['builders'].append( - util.BuilderConfig(name="amd64-debian-10-eco-pymysql", - workernames=["hz-bbw2-docker-eco-pymysql-python-3-9-slim-buster"], - tags=["Debian", "ecosystem", "pymysql"], - collapseRequests=True, - nextBuild=nextBuild, - canStartBuild=canStartBuild, - factory=f_eco_pymysql)) - -c['builders'].append( - util.BuilderConfig(name="amd64-debian-10-eco-mysqljs", - workernames=["hz-bbw2-docker-eco-mysqljs-nodejs15-buster"], - tags=["Debian", "ecosystem", "mysqljs"], - collapseRequests=True, - nextBuild=nextBuild, - canStartBuild=canStartBuild, - factory=f_eco_mysqljs)) - -c['builders'].append( - util.BuilderConfig(name="amd64-ubuntu-2004-bigtest", - workernames=["bm-bbw1-docker-ubuntu-2004"] + workers["x64-bbw-docker-bigtest-ubuntu-2004"], - tags=["Ubuntu", "big", "gcc"], - collapseRequests=True, - nextBuild=nextBuild, - canStartBuild=canStartBuild, - factory=f_big_test)) - -c['builders'].append( - util.BuilderConfig(name="amd64-ubuntu-2004-fulltest", - workernames=workers["ns-x64-bbw-docker-ubuntu-2004"], - tags=["Ubuntu", "full", "gcc"], - collapseRequests=True, - nextBuild=nextBuild, - canStartBuild=canStartBuild, - locks=getLocks, - factory=f_full_test)) - -c['builders'].append( - util.BuilderConfig(name="ppc64le-ubuntu-2004-without-server", - workernames=workers["ppc64le-bbw-docker-ubuntu-2004"], - tags=["Ubuntu", "without-server", "gcc", "pc9"], - collapseRequests=True, - nextBuild=nextBuild, - canStartBuild=canStartBuild, - locks=getLocks, - factory=f_without_server)) - -c['builders'].append( - util.BuilderConfig(name="amd64-ubuntu-2204-clang14-asan", - workernames=workers["ns-x64-bbw-docker-asan-ubuntu-2204"], - tags=["Ubuntu", "quick", "clang-14", "asan"], - collapseRequests=True, - nextBuild=nextBuild, - canStartBuild=canStartBuild, - locks=getLocks, - factory=f_asan_build)) - -c['builders'].append( - util.BuilderConfig(name="amd64-debian-12-asan-ubsan", - workernames=workers["x64-bbw-docker-debian-12"], - tags=["Ubuntu", "quick", "gcc", "asan", "ubsan"], - collapseRequests=True, - nextBuild=nextBuild, - canStartBuild=canStartBuild, - locks=getLocks, - factory=f_asan_ubsan_build)) - -c['builders'].append( - util.BuilderConfig(name="amd64-debian-11-msan", - workernames=workers["x64-bbw-docker-msan-clang-debian-11"], - tags=["Debian", "quick", "clang-15", "msan"], - collapseRequests=True, - nextBuild=nextBuild, - canStartBuild=canStartBuild, - properties={'c_compiler': 'clang-15', 'cxx_compiler': 'clang++-15'}, - locks=getLocks, - factory=f_msan_build)) - -c['builders'].append( - util.BuilderConfig(name="amd64-ubuntu-2204-valgrind", - workernames=workers["x64-bbw-docker-valgrind-ubuntu-2204"], - tags=["Ubuntu", "quick", "gcc", "valgrind"], - collapseRequests=True, - nextBuild=nextBuild, - canStartBuild=canStartBuild, - locks=getLocks, - factory=f_valgrind_build)) - -c['builders'].append( - util.BuilderConfig(name="amd64-centos-7-bintar", - workernames=workers["x64-bbw-docker-centos-7-bintar"], - tags=["CentOS", "quick", "bintar"], - collapseRequests=True, - nextBuild=nextBuild, - canStartBuild=canStartBuild, - locks=getLocks, - properties={'gnutls_no_signal': '-DGNUTLS_NO_SIGNAL=0'}, - factory=f_bintar)) - -c['builders'].append( - util.BuilderConfig(name="aarch64-debian-10-bintar", - workernames=workers["aarch64-bbw-docker-debian-10-bintar"], - tags=["Debian", "quick", "bintar"], - collapseRequests=True, - nextBuild=nextBuild, - canStartBuild=canStartBuild, - locks=getLocks, - factory=f_bintar)) - -c['builders'].append( - util.BuilderConfig(name="aarch64-ubuntu-2004-debug", - workernames=workers["aarch64-bbw-docker-ubuntu-2004-debug"], - tags=["Ubuntu", "quick", "gcc", "debug"], - collapseRequests=True, - nextBuild=nextBuild, - canStartBuild=canStartBuild, - locks=getLocks, - properties={ - 'build_type': 'Debug', - 'additional_args': '-DWITH_DBUG_TRACE=OFF', - 'mtr_additional_args': '--skip-test="main\.show_analyze_json"', - }, - factory=f_quick_build)) - -c['builders'].append( - util.BuilderConfig(name="ppc64le-ubuntu-2004-debug", - workernames=workers["ppc64le-bbw-docker-ubuntu-2004-debug"], - tags=["Ubuntu", "quick", "gcc", "debug"], - collapseRequests=True, - nextBuild=nextBuild, - canStartBuild=canStartBuild, - locks=getLocks, - properties={ - 'build_type': 'Debug', - 'additional_args': '-DWITH_DBUG_TRACE=OFF -DWITH_SAFEMALLOC=OFF', - 'mtr_additional_args': '--skip-test="main\.show_analyze_json"', - }, - factory=f_quick_build)) - -c['logEncoding'] = 'utf-8' - -c['multiMaster'] = True - -c['mq'] = { # Need to enable multimaster aware mq. Wamp is the only option for now. - 'type' : 'wamp', - 'router_url': os.getenv('MQ_ROUTER_URL', default='ws://localhost:8085/ws'), - 'realm': 'realm1', +c["builders"] = [] + +c["builders"].append( + util.BuilderConfig( + name="amd64-debian-11-aocc", + workernames=workers["ns-x64-bbw-docker-aocc-debian-11"], + tags=["Ubuntu", "quick", "aocc"], + collapseRequests=True, + nextBuild=nextBuild, + canStartBuild=canStartBuild, + locks=getLocks, + properties={ + "c_compiler": "clang", + "cxx_compiler": "clang++", + "additional_args": "-DCMAKE_C_FLAGS=-Wno-inconsistent-missing-override -DCMAKE_CXX_FLAGS=-Wno-inconsistent-missing-override", + }, + # TODO find a better way to deal with the env vars + env={ + "PATH": "/opt/AMD/aocc-compiler-3.2.0/bin:/opt/AMD/aocc-compiler-3.2.0/share/opt-viewer:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin", + "LIBRARY_PATH": "/opt/AMD/aocc-compiler-3.2.0/lib:/opt/AMD/aocc-compiler-3.2.0/lib32:/usr/lib/x86_64-linux-gnu:/usr/lib64:/usr/lib32:/usr/lib:", + "LD_LIBRARY_PATH": "/opt/AMD/aocc-compiler-3.2.0/ompd:/opt/AMD/aocc-compiler-3.2.0/lib:/opt/AMD/aocc-compiler-3.2.0/lib32:/usr/lib/x86_64-linux-gnu:/usr/lib64:/usr/lib32:/usr/lib:", + "C_INCLUDE_PATH": ":/opt/AMD/aocc-compiler-3.2.0/include", + "CPLUS_INCLUDE_PATH": ":/opt/AMD/aocc-compiler-3.2.0/include", + }, + factory=f_quick_build, + ) +) + +c["builders"].append( + util.BuilderConfig( + name="amd64-ubuntu-2204-icc", + workernames=workers["ns-x64-bbw-docker-icc-ubuntu-2204"], + tags=["Ubuntu", "quick", "icc", "icpc"], + collapseRequests=True, + nextBuild=nextBuild, + canStartBuild=canStartBuild, + locks=getLocks, + properties={"c_compiler": "icc", "cxx_compiler": "icpc"}, + factory=f_quick_build, + ) +) + +c["builders"].append( + util.BuilderConfig( + name="amd64-ubuntu-2004-eco-php", + workernames=["hz-bbw2-docker-eco-php-ubuntu-2004"], + tags=["Ubuntu", "ecosystem", "PHP"], + collapseRequests=True, + nextBuild=nextBuild, + canStartBuild=canStartBuild, + factory=f_eco_php, + ) +) + +c["builders"].append( + util.BuilderConfig( + name="amd64-ubuntu-2004-eco-dbdeployer", + workernames=["hz-bbw2-docker-eco-dbdeployer-ubuntu-2004"], + tags=["Ubuntu", "ecosystem", "dbdeployer"], + collapseRequests=True, + nextBuild=nextBuild, + canStartBuild=canStartBuild, + factory=f_eco_dbdeployer, + ) +) + +c["builders"].append( + util.BuilderConfig( + name="amd64-debian-10-eco-pymysql", + workernames=["hz-bbw2-docker-eco-pymysql-python-3-9-slim-buster"], + tags=["Debian", "ecosystem", "pymysql"], + collapseRequests=True, + nextBuild=nextBuild, + canStartBuild=canStartBuild, + factory=f_eco_pymysql, + ) +) + +c["builders"].append( + util.BuilderConfig( + name="amd64-debian-10-eco-mysqljs", + workernames=["hz-bbw2-docker-eco-mysqljs-nodejs15-buster"], + tags=["Debian", "ecosystem", "mysqljs"], + collapseRequests=True, + nextBuild=nextBuild, + canStartBuild=canStartBuild, + factory=f_eco_mysqljs, + ) +) + +c["builders"].append( + util.BuilderConfig( + name="amd64-ubuntu-2004-bigtest", + workernames=["bm-bbw1-docker-ubuntu-2004"] + + workers["x64-bbw-docker-bigtest-ubuntu-2004"], + tags=["Ubuntu", "big", "gcc"], + collapseRequests=True, + nextBuild=nextBuild, + canStartBuild=canStartBuild, + factory=f_big_test, + ) +) + +c["builders"].append( + util.BuilderConfig( + name="amd64-ubuntu-2004-fulltest", + workernames=workers["ns-x64-bbw-docker-ubuntu-2004"], + tags=["Ubuntu", "full", "gcc"], + collapseRequests=True, + nextBuild=nextBuild, + canStartBuild=canStartBuild, + locks=getLocks, + factory=f_full_test, + ) +) + +c["builders"].append( + util.BuilderConfig( + name="ppc64le-ubuntu-2004-without-server", + workernames=workers["ppc64le-bbw-docker-ubuntu-2004"], + tags=["Ubuntu", "without-server", "gcc", "pc9"], + collapseRequests=True, + nextBuild=nextBuild, + canStartBuild=canStartBuild, + locks=getLocks, + factory=f_without_server, + ) +) + +c["builders"].append( + util.BuilderConfig( + name="amd64-ubuntu-2204-clang14-asan", + workernames=workers["ns-x64-bbw-docker-asan-ubuntu-2204"], + tags=["Ubuntu", "quick", "clang-14", "asan"], + collapseRequests=True, + nextBuild=nextBuild, + canStartBuild=canStartBuild, + locks=getLocks, + factory=f_asan_build, + ) +) + +c["builders"].append( + util.BuilderConfig( + name="amd64-debian-12-asan-ubsan", + workernames=workers["x64-bbw-docker-debian-12"], + tags=["Ubuntu", "quick", "gcc", "asan", "ubsan"], + collapseRequests=True, + nextBuild=nextBuild, + canStartBuild=canStartBuild, + locks=getLocks, + factory=f_asan_ubsan_build, + ) +) + +c["builders"].append( + util.BuilderConfig( + name="amd64-debian-11-msan", + workernames=workers["x64-bbw-docker-msan-clang-debian-11"], + tags=["Debian", "quick", "clang-15", "msan"], + collapseRequests=True, + nextBuild=nextBuild, + canStartBuild=canStartBuild, + properties={"c_compiler": "clang-15", "cxx_compiler": "clang++-15"}, + locks=getLocks, + factory=f_msan_build, + ) +) + +c["builders"].append( + util.BuilderConfig( + name="amd64-ubuntu-2204-valgrind", + workernames=workers["x64-bbw-docker-valgrind-ubuntu-2204"], + tags=["Ubuntu", "quick", "gcc", "valgrind"], + collapseRequests=True, + nextBuild=nextBuild, + canStartBuild=canStartBuild, + locks=getLocks, + factory=f_valgrind_build, + ) +) + +c["builders"].append( + util.BuilderConfig( + name="amd64-centos-7-bintar", + workernames=workers["x64-bbw-docker-centos-7-bintar"], + tags=["CentOS", "quick", "bintar"], + collapseRequests=True, + nextBuild=nextBuild, + canStartBuild=canStartBuild, + locks=getLocks, + properties={"gnutls_no_signal": "-DGNUTLS_NO_SIGNAL=0"}, + factory=f_bintar, + ) +) + +c["builders"].append( + util.BuilderConfig( + name="aarch64-debian-10-bintar", + workernames=workers["aarch64-bbw-docker-debian-10-bintar"], + tags=["Debian", "quick", "bintar"], + collapseRequests=True, + nextBuild=nextBuild, + canStartBuild=canStartBuild, + locks=getLocks, + factory=f_bintar, + ) +) + +c["builders"].append( + util.BuilderConfig( + name="aarch64-ubuntu-2004-debug", + workernames=workers["aarch64-bbw-docker-ubuntu-2004-debug"], + tags=["Ubuntu", "quick", "gcc", "debug"], + collapseRequests=True, + nextBuild=nextBuild, + canStartBuild=canStartBuild, + locks=getLocks, + properties={ + "build_type": "Debug", + "additional_args": "-DWITH_DBUG_TRACE=OFF", + "mtr_additional_args": '--skip-test="main\.show_analyze_json"', + }, + factory=f_quick_build, + ) +) + +c["builders"].append( + util.BuilderConfig( + name="ppc64le-ubuntu-2004-debug", + workernames=workers["ppc64le-bbw-docker-ubuntu-2004-debug"], + tags=["Ubuntu", "quick", "gcc", "debug"], + collapseRequests=True, + nextBuild=nextBuild, + canStartBuild=canStartBuild, + locks=getLocks, + properties={ + "build_type": "Debug", + "additional_args": "-DWITH_DBUG_TRACE=OFF -DWITH_SAFEMALLOC=OFF", + "mtr_additional_args": '--skip-test="main\.show_analyze_json"', + }, + factory=f_quick_build, + ) +) + +c["logEncoding"] = "utf-8" + +c["multiMaster"] = True + +c["mq"] = { # Need to enable multimaster aware mq. Wamp is the only option for now. + "type": "wamp", + "router_url": os.getenv("MQ_ROUTER_URL", default="ws://localhost:8085/ws"), + "realm": "realm1", # valid are: none, critical, error, warn, info, debug, trace - 'wamp_debug_level' : 'info' + "wamp_debug_level": "info", }