Skip to content

Commit e825131

Browse files
committed
[hotfix][ci] Optimize CI performance by implementing parallel execution
1 parent daf27fa commit e825131

File tree

3 files changed

+382
-317
lines changed

3 files changed

+382
-317
lines changed

.github/workflows/flink_cdc.yml

Lines changed: 32 additions & 317 deletions
Original file line numberDiff line numberDiff line change
@@ -30,78 +30,13 @@ on:
3030
- 'docs/**'
3131
- 'README.md'
3232

33-
# Concurrency strategy:
34-
# github.workflow: distinguish this workflow from others
35-
# github.event_name: distinguish `push` event from `pull_request` event
36-
# github.event.number: set to the number of the pull request if `pull_request` event
37-
# github.run_id: otherwise, it's a `push` or `schedule` event, only cancel if we rerun the workflow
38-
#
39-
# Reference:
40-
# https://docs.github.com/en/actions/using-jobs/using-concurrency
41-
# https://docs.github.com/en/actions/learn-github-actions/contexts#github-context
4233
concurrency:
4334
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event.number || github.run_id }}
4435
cancel-in-progress: true
4536

46-
env:
47-
MODULES_CORE: "\
48-
flink-cdc-cli,\
49-
flink-cdc-common,\
50-
flink-cdc-composer,\
51-
flink-cdc-runtime"
52-
53-
MODULES_PIPELINE_CONNECTORS: "\
54-
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-values,\
55-
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql,\
56-
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris,\
57-
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-starrocks,\
58-
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-kafka,\
59-
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon"
60-
61-
MODULES_MYSQL: "\
62-
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-mysql-cdc,\
63-
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-mysql-cdc"
64-
65-
MODULES_POSTGRES: "\
66-
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-postgres-cdc,\
67-
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-postgres-cdc"
68-
69-
MODULES_ORACLE: "\
70-
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-oracle-cdc,\
71-
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-oracle-cdc"
72-
73-
MODULES_MONGODB: "\
74-
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-mongodb-cdc,\
75-
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-mongodb-cdc"
76-
77-
MODULES_SQLSERVER: "\
78-
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-sqlserver-cdc,\
79-
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-sqlserver-cdc"
80-
81-
MODULES_TIDB: "\
82-
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-tidb-cdc,\
83-
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-tidb-cdc"
84-
85-
MODULES_OCEANBASE: "\
86-
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-oceanbase-cdc,\
87-
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-oceanbase-cdc"
88-
89-
MODULES_DB2: "\
90-
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-db2-cdc,\
91-
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-db2-cdc"
92-
93-
MODULES_VITESS: "\
94-
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-vitess-cdc,\
95-
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-vitess-cdc"
96-
97-
MODULES_PIPELINE_E2E: "\
98-
flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests"
99-
100-
MODULES_SOURCE_E2E: "\
101-
flink-cdc-e2e-tests/flink-cdc-source-e2e-tests"
102-
10337
jobs:
10438
license_check:
39+
name: License Check
10540
runs-on: ubuntu-latest
10641
steps:
10742
- name: Check out repository code
@@ -116,260 +51,40 @@ jobs:
11651
run: mvn --no-snapshot-updates -B package -DskipTests
11752
- name: Run license check
11853
run: gem install rubyzip -v 2.3.0 && ./tools/ci/license_check.rb
119-
120-
compile_and_test:
121-
needs: license_check
122-
# Only run the CI pipeline for the flink-cdc-connectors repository
123-
# if: github.repository == 'apache/flink-cdc-connectors'
124-
runs-on: ubuntu-latest
54+
ut:
12555
strategy:
12656
matrix:
127-
jdk: [ 8 ]
128-
module: [ "core",
129-
"pipeline_connectors",
130-
"mysql",
131-
"postgres",
132-
"oracle",
133-
"mongodb",
134-
"sqlserver",
135-
"tidb",
136-
"oceanbase",
137-
"db2",
138-
"vitess",
139-
"pipeline_e2e",
140-
"source_e2e"
141-
]
142-
timeout-minutes: 120
143-
env:
144-
MVN_COMMON_OPTIONS: -Dmaven.wagon.http.pool=false \
145-
-Dorg.slf4j.simpleLogger.showDateTime=true \
146-
-Dorg.slf4j.simpleLogger.dateTimeFormat=HH:mm:ss.SSS \
147-
-Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \
148-
--no-snapshot-updates -B \
149-
--settings /home/vsts/work/1/s/tools/ci/google-mirror-settings.xml \
150-
-Dfast -Dlog.dir=/home/vsts/work/_temp/debug_files \
151-
-Dlog4j.configurationFile=file:///home/vsts/work/1/s/tools/ci/log4j.properties
152-
steps:
153-
- run: echo "Running CI pipeline for JDK version ${{ matrix.jdk }}"
154-
155-
- name: Clean up disk space
156-
run: |
157-
set -euo pipefail
158-
159-
echo "Disk space before cleanup"
160-
df -h
161-
162-
echo "Cleaning up disk space"
163-
sudo rm -rf /usr/share/dotnet
164-
sudo rm -rf /usr/local/lib/android
165-
sudo rm -rf /opt/ghc
166-
sudo rm -rf /opt/hostedtoolcache/CodeQL
167-
sudo docker image prune --all --force
168-
169-
echo "Disk space after cleanup"
170-
df -h
171-
172-
- name: Check out repository code
173-
uses: actions/checkout@v4
174-
with:
175-
submodules: true
176-
177-
- name: Set JDK
178-
uses: actions/setup-java@v4
179-
with:
180-
java-version: ${{ matrix.jdk }}
181-
distribution: 'temurin'
182-
cache: 'maven'
183-
184-
- name: Set Maven 3.8.6
185-
uses: stCarolas/setup-maven@v5
186-
with:
187-
maven-version: 3.8.6
188-
189-
- name: Compile and test ${{ matrix.module }}
190-
timeout-minutes: 90
191-
run: |
192-
set -o pipefail
193-
194-
case ${{ matrix.module }} in
195-
("core")
196-
modules=${{ env.MODULES_CORE }}
197-
;;
198-
("pipeline_connectors")
199-
modules=${{ env.MODULES_PIPELINE_CONNECTORS }}
200-
;;
201-
("mysql")
202-
modules=${{ env.MODULES_MYSQL }}
203-
;;
204-
("postgres")
205-
modules=${{ env.MODULES_POSTGRES }}
206-
;;
207-
("oracle")
208-
modules=${{ env.MODULES_ORACLE }}
209-
;;
210-
("mongodb")
211-
modules=${{ env.MODULES_MONGODB }}
212-
;;
213-
("sqlserver")
214-
modules=${{ env.MODULES_SQLSERVER }}
215-
;;
216-
("tidb")
217-
modules=${{ env.MODULES_TIDB }}
218-
;;
219-
("oceanbase")
220-
modules=${{ env.MODULES_OCEANBASE }}
221-
;;
222-
("db2")
223-
modules=${{ env.MODULES_DB2 }}
224-
;;
225-
("vitess")
226-
modules=${{ env.MODULES_VITESS }}
227-
;;
228-
("pipeline_e2e")
229-
compile_modules="${{ env.MODULES_CORE }},${{ env.MODULES_PIPELINE_CONNECTORS }},${{ env.MODULES_MYSQL }},${{ env.MODULES_POSTGRES }},${{ env.MODULES_ORACLE }},${{ env.MODULES_MONGODB }},${{ env.MODULES_SQLSERVER }},${{ env.MODULES_TIDB }},${{ env.MODULES_OCEANBASE }},${{ env.MODULES_DB2 }},${{ env.MODULES_VITESS }},${{ env.MODULES_PIPELINE_E2E }}"
230-
modules=${{ env.MODULES_PIPELINE_E2E }}
231-
;;
232-
("source_e2e")
233-
compile_modules="${{ env.MODULES_CORE }},${{ env.MODULES_PIPELINE_CONNECTORS }},${{ env.MODULES_MYSQL }},${{ env.MODULES_POSTGRES }},${{ env.MODULES_ORACLE }},${{ env.MODULES_MONGODB }},${{ env.MODULES_SQLSERVER }},${{ env.MODULES_TIDB }},${{ env.MODULES_OCEANBASE }},${{ env.MODULES_DB2 }},${{ env.MODULES_VITESS }},${{ env.MODULES_SOURCE_E2E }}"
234-
modules=${{ env.MODULES_SOURCE_E2E }}
235-
;;
236-
esac
237-
238-
if [ ${{ matrix.module }} != "pipeline_e2e" ] && [ ${{ matrix.module }} != "source_e2e" ]; then
239-
compile_modules=$modules
240-
fi
241-
242-
mvn --no-snapshot-updates -B -DskipTests -pl $compile_modules -am install && mvn --no-snapshot-updates -B -pl $modules verify
243-
244-
- name: Print JVM thread dumps when cancelled
245-
if: ${{ failure() }}
246-
run: |
247-
# ----------------------------------------------------------------------------
248-
# Copyright 2023 The Netty Project
249-
#
250-
# ----------------------------------------------------------------------------
251-
# Source: https://github.com/netty/netty/blob/main/.github/actions/thread-dump-jvms/action.yml
252-
echo "$OSTYPE"
253-
if [[ "$OSTYPE" == "linux-gnu"* ]] && command -v sudo &> /dev/null; then
254-
echo "Setting up JVM thread dumps"
255-
# use jattach so that Java processes in docker containers are also covered
256-
# download jattach
257-
curl -s -L -o /tmp/jattach https://github.com/apangin/jattach/releases/download/v2.1/jattach
258-
if command -v sha256sum &> /dev/null; then
259-
# verify hash of jattach binary
260-
sha256sum -c <(echo "07885fdc782e02e7302c6d190f54c3930afa10a38140365adf54076ec1086a8e /tmp/jattach") || exit 1
261-
fi
262-
chmod +x /tmp/jattach
263-
for java_pid in $(sudo pgrep java); do
264-
echo "----------------------- pid $java_pid -----------------------"
265-
echo "command line: $(sudo cat /proc/$java_pid/cmdline | xargs -0 echo)"
266-
sudo /tmp/jattach $java_pid jcmd VM.command_line || true
267-
sudo /tmp/jattach $java_pid jcmd "Thread.print -l"
268-
sudo /tmp/jattach $java_pid jcmd GC.heap_info || true
269-
done
270-
else
271-
for java_pid in $(jps -q -J-XX:+PerfDisableSharedMem); do
272-
echo "----------------------- pid $java_pid -----------------------"
273-
jcmd $java_pid VM.command_line || true
274-
jcmd $java_pid Thread.print -l
275-
jcmd $java_pid GC.heap_info || true
276-
done
277-
fi
278-
exit 0
279-
280-
281-
migration_test_ut:
282-
needs: license_check
283-
runs-on: ubuntu-latest
284-
steps:
285-
- name: Check out repository code
286-
uses: actions/checkout@v4
287-
with:
288-
submodules: true
289-
- name: Compile snapshot CDC version
290-
run: mvn --no-snapshot-updates -B install -DskipTests
291-
- name: Run migration tests
292-
run: cd flink-cdc-migration-tests && mvn clean verify
293-
294-
pipeline_migration_test:
295-
needs: migration_test_ut
296-
runs-on: ubuntu-latest
57+
java-version: [ '8' ]
58+
module: [ 'core', 'pipeline_connectors', 'mysql', 'postgres', 'oracle', 'mongodb6', 'mongodb7', 'sqlserver', 'tidb', 'oceanbase', 'db2', 'vitess' ]
59+
name: Unit Tests
60+
uses: ./.github/workflows/flink_cdc_base.yml
61+
with:
62+
java-version: ${{ matrix.java-version }}
63+
module: ${{ matrix.module }}
64+
pipeline_e2e:
29765
strategy:
29866
matrix:
299-
java-version: [ '8', '11' ]
300-
301-
steps:
302-
- uses: actions/checkout@v4
303-
- name: Set up Ruby
304-
uses: ruby/setup-ruby@v1
305-
with:
306-
ruby-version: 3.0
307-
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
308-
- uses: actions/setup-java@v4
309-
with:
310-
java-version: ${{ matrix.java-version }}
311-
distribution: temurin
312-
cache: maven
313-
- name: Install dependencies
314-
run: gem install terminal-table
315-
- name: Prepare CDC versions
316-
run: CDC_SOURCE_HOME=$PWD ruby tools/mig-test/prepare_libs.rb
317-
- name: Prepare Flink distro
318-
run: wget https://dlcdn.apache.org/flink/flink-1.18.1/flink-1.18.1-bin-scala_2.12.tgz && tar -xzvf flink-1.18.1-bin-scala_2.12.tgz
319-
working-directory: ./tools/mig-test
320-
- name: Patch Flink configs
321-
run: FLINK_HOME=./flink-1.18.1/ ruby misc/patch_flink_conf.rb
322-
working-directory: ./tools/mig-test
323-
- name: Start containers
324-
run: cd conf && docker compose up -d
325-
working-directory: ./tools/mig-test
326-
- name: Run migration tests
327-
run: FLINK_HOME=./flink-1.18.1/ ruby run_migration_test.rb
328-
working-directory: ./tools/mig-test
329-
- name: Stop containers
330-
if: always()
331-
run: cd conf && docker compose down
332-
working-directory: ./tools/mig-test
333-
334-
data_stream_migration_test:
335-
needs: migration_test_ut
336-
runs-on: ubuntu-latest
67+
java-version: [ '8' ]
68+
flink-version: ['1.17.2', '1.18.1', '1.19.1', '1.20.0']
69+
module: [ 'pipeline_e2e' ]
70+
name: Pipeline E2E Tests
71+
uses: ./.github/workflows/flink_cdc_base.yml
72+
with:
73+
java-version: ${{ matrix.java-version }}
74+
flink-version: ${{ matrix.flink-version }}
75+
module: ${{ matrix.module }}
76+
source_e2e:
33777
strategy:
33878
matrix:
339-
java-version: [ '8', '11' ]
340-
341-
steps:
342-
- uses: actions/checkout@v4
343-
- name: Set up Ruby
344-
uses: ruby/setup-ruby@v1
345-
with:
346-
ruby-version: 3.0
347-
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
348-
- uses: actions/setup-java@v4
349-
with:
350-
java-version: ${{ matrix.java-version }}
351-
distribution: temurin
352-
cache: maven
353-
- name: Install dependencies
354-
run: gem install terminal-table
355-
- name: Prepare CDC versions
356-
run: CDC_SOURCE_HOME=$PWD ruby tools/mig-test/prepare_libs.rb
357-
- name: Prepare Flink distro
358-
run: wget https://dlcdn.apache.org/flink/flink-1.18.1/flink-1.18.1-bin-scala_2.12.tgz && tar -xzvf flink-1.18.1-bin-scala_2.12.tgz
359-
working-directory: ./tools/mig-test
360-
- name: Patch Flink configs
361-
run: FLINK_HOME=./flink-1.18.1/ ruby misc/patch_flink_conf.rb
362-
working-directory: ./tools/mig-test
363-
- name: Compile Dummy DataStream Jobs
364-
run: cd datastream && ruby compile_jobs.rb
365-
working-directory: ./tools/mig-test
366-
- name: Start containers
367-
run: cd conf && docker compose up -d
368-
working-directory: ./tools/mig-test
369-
- name: Run migration tests
370-
run: cd datastream && FLINK_HOME=../flink-1.18.1/ ruby run_migration_test.rb
371-
working-directory: ./tools/mig-test
372-
- name: Stop containers
373-
if: always()
374-
run: cd conf && docker compose down
375-
working-directory: ./tools/mig-test
79+
java-version: [ '8' ]
80+
flink-version: ['1.16.3', '1.17.2', '1.18.1', '1.19.1', '1.20.0']
81+
module: [ 'source_e2e' ]
82+
name: Source E2E Tests
83+
uses: ./.github/workflows/flink_cdc_base.yml
84+
with:
85+
java-version: ${{ matrix.java-version }}
86+
flink-version: ${{ matrix.flink-version }}
87+
module: ${{ matrix.module }}
88+
migration_test:
89+
name: Migration Tests
90+
uses: ./.github/workflows/flink_cdc_migration_test.yml

0 commit comments

Comments
 (0)