Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/slack-vitess-r14.0.5' into slack…
Browse files Browse the repository at this point in the history
…port-10619-slack-vitess-r14.0.5
  • Loading branch information
timvaillancourt committed Aug 30, 2023
2 parents 9bd5467 + 0a2ef83 commit 3bc3575
Show file tree
Hide file tree
Showing 333 changed files with 4,599 additions and 34,576 deletions.
112 changes: 0 additions & 112 deletions .github/workflows/cluster_endtoend_19.yml

This file was deleted.

110 changes: 57 additions & 53 deletions .github/workflows/cluster_endtoend_mysql80.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,29 +6,23 @@ concurrency:
group: format('{0}-{1}', ${{ github.ref }}, 'Cluster (mysql80)')
cancel-in-progress: true

env:
LAUNCHABLE_ORGANIZATION: "vitess"
LAUNCHABLE_WORKSPACE: "vitess-app"
GITHUB_PR_HEAD_SHA: "${{ github.event.pull_request.head.sha }}"

jobs:
build:
name: Run endtoend tests on Cluster (mysql80)
runs-on:
group: vitess-ubuntu20
timeout-minutes: 45

steps:
- name: Check if workflow needs to be skipped
id: skip-workflow
run: |
skip='false'
if [[ "${{github.event.pull_request}}" == "" ]] && [[ "${{github.ref}}" != "refs/heads/main" ]] && [[ ! "${{github.ref}}" =~ ^refs/heads/slack-vitess-r[0-9]+\.[0-9]\.[0-9]+$ ]] && [[ ! "${{github.ref}}" =~ "refs/tags/.*" ]]; then
skip='true'
fi
echo Skip ${skip}
echo "skip-workflow=${skip}" >> $GITHUB_OUTPUT
- name: Check out code
if: steps.skip-workflow.outputs.skip-workflow == 'false'
uses: actions/checkout@v3
uses: actions/checkout@v2

- name: Check for changes in relevant files
if: steps.skip-workflow.outputs.skip-workflow == 'false'
uses: frouioui/paths-filter@main
id: changes
with:
Expand All @@ -39,74 +33,84 @@ jobs:
- 'test.go'
- 'Makefile'
- 'build.env'
- 'go.sum'
- 'go.mod'
- 'go.[sumod]'
- 'proto/*.proto'
- 'tools/**'
- 'config/**'
- 'bootstrap.sh'
- '.github/workflows/cluster_endtoend_mysql80.yml'
- '.github/workflows/**'
- name: Set up Go
if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true'
uses: actions/setup-go@v3
if: steps.changes.outputs.end_to_end == 'true'
uses: actions/setup-go@v2
with:
go-version: 1.18.7

- name: Set up python
if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true'
uses: actions/setup-python@v4
if: steps.changes.outputs.end_to_end == 'true'
uses: actions/setup-python@v2

- name: Tune the OS
if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true'
if: steps.changes.outputs.end_to_end == 'true'
run: |
# Limit local port range to not use ports that overlap with server side
# ports that we listen on.
echo '1024 65535' | sudo tee -a /proc/sys/net/ipv4/ip_local_port_range
# Increase the asynchronous non-blocking I/O. More information at https://dev.mysql.com/doc/refman/5.7/en/innodb-parameters.html#sysvar_innodb_use_native_aio
echo "fs.aio-max-nr = 1048576" | sudo tee -a /etc/sysctl.conf
sudo sysctl -p /etc/sysctl.conf
- name: Get dependencies
if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true'
if: steps.changes.outputs.end_to_end == 'true'
run: |
# Get key to latest MySQL repo
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 467B942D3A79BD29
# Setup MySQL 8.0
wget -c https://dev.mysql.com/get/mysql-apt-config_0.8.20-1_all.deb
echo mysql-apt-config mysql-apt-config/select-server select mysql-8.0 | sudo debconf-set-selections
sudo DEBIAN_FRONTEND="noninteractive" dpkg -i mysql-apt-config*
# Setup Percona Server for MySQL 8.0
sudo apt-get update
# Install everything else we need, and configure
sudo apt-get install -y mysql-server mysql-client make unzip g++ etcd curl git wget eatmydata xz-utils
sudo apt-get install -y lsb-release gnupg2 curl
wget https://repo.percona.com/apt/percona-release_latest.$(lsb_release -sc)_all.deb
sudo DEBIAN_FRONTEND="noninteractive" dpkg -i percona-release_latest.$(lsb_release -sc)_all.deb
sudo percona-release setup ps80
sudo apt-get update
sudo service etcd stop
echo 'mysql version: '
sudo mysql --version
# Install everything else we need, and configure
sudo apt-get install -y percona-server-server percona-server-client make unzip g++ etcd git wget eatmydata xz-utils
sudo service mysql stop
sudo service etcd stop
sudo ln -s /etc/apparmor.d/usr.sbin.mysqld /etc/apparmor.d/disable/
sudo apparmor_parser -R /etc/apparmor.d/usr.sbin.mysqld
go mod download
# install JUnit report formatter
go install github.com/vitessio/go-junit-report@HEAD
- name: Setup launchable dependencies
if: steps.changes.outputs.end_to_end == 'true'
run: |
# Get Launchable CLI installed. If you can, make it a part of the builder image to speed things up
pip3 install --user launchable~=1.0 > /dev/null
# verify that launchable setup is all correct.
launchable verify || true
# Tell Launchable about the build you are producing and testing
launchable record build --name "$GITHUB_RUN_ID" --source .
- name: Run cluster endtoend test
if: steps.skip-workflow.outputs.skip-workflow == 'false' && steps.changes.outputs.end_to_end == 'true'
uses: nick-fields/retry@v2
with:
timeout_minutes: 45
max_attempts: 3
retry_on: error
command: |
# We set the VTDATAROOT to the /tmp folder to reduce the file path of mysql.sock file
# which musn't be more than 107 characters long.
export VTDATAROOT="/tmp/"
source build.env
set -x
# run the tests however you normally do, then produce a JUnit XML file
eatmydata -- go run test.go -docker=false -follow -shard mysql80
if: steps.changes.outputs.end_to_end == 'true'
timeout-minutes: 30
run: |
# We set the VTDATAROOT to the /tmp folder to reduce the file path of mysql.sock file
# which musn't be more than 107 characters long.
export VTDATAROOT="/tmp/"
source build.env
set -x
# run the tests however you normally do, then produce a JUnit XML file
eatmydata -- go run test.go -docker=false -follow -shard mysql80 | tee -a output.txt | go-junit-report -set-exit-code > report.xml
- name: Print test output and Record test result in launchable
if: steps.changes.outputs.end_to_end == 'true' && always()
run: |
# send recorded tests to launchable
launchable record tests --build "$GITHUB_RUN_ID" go-test . || true
# print test output
cat output.txt
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
# DO NOT MODIFY: THIS FILE IS GENERATED USING "make generate_ci_workflows"

name: Cluster (24)
name: Cluster (mysql_server_vault)
on: [push, pull_request]
concurrency:
group: format('{0}-{1}', ${{ github.ref }}, 'Cluster (24)')
group: format('{0}-{1}', ${{ github.ref }}, 'Cluster (mysql_server_vault)')
cancel-in-progress: true

jobs:
build:
name: Run endtoend tests on Cluster (24)
name: Run endtoend tests on Cluster (mysql_server_vault)
runs-on:
group: vitess-ubuntu20

Expand Down Expand Up @@ -113,5 +113,14 @@ jobs:
set -x
# run the tests however you normally do, then produce a JUnit XML file
eatmydata -- go run test.go -docker=false -follow -shard 24
# run the tests however you normally do, then produce a JUnit XML file
eatmydata -- go run test.go -docker=false -follow -shard mysql_server_vault | tee -a output.txt | go-junit-report -set-exit-code > report.xml

- name: Print test output and Record test result in launchable
if: steps.changes.outputs.end_to_end == 'true' && always()
run: |
# send recorded tests to launchable
launchable record tests --build "$GITHUB_RUN_ID" go-test . || true
# print test output
cat output.txt
Loading

0 comments on commit 3bc3575

Please sign in to comment.