Skip to content

Commit d18932d

Browse files
committed
Merge branch 'master' into FLINK-37479
2 parents f7104b3 + 5eaf9eb commit d18932d

File tree

119 files changed

+10125
-458
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

119 files changed

+10125
-458
lines changed

.github/labeler.yml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -95,3 +95,5 @@ maxcompute-pipeline-connector:
9595
- flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-maxcompute/**/*
9696
iceberg-pipeline-connector:
9797
- flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-iceberg/**/*
98+
postgres-pipeline-connector:
99+
- flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-postgres/**/*

.github/workflows/flink_cdc_base.yml

Lines changed: 101 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -18,18 +18,18 @@ name: Flink CDC Base Workflow
1818
on:
1919
workflow_call:
2020
inputs:
21-
java-version:
21+
java-versions:
2222
description: "Jdk versions to test against."
2323
required: false
2424
type: string
2525
default: "['8']"
26-
flink-version:
26+
flink-versions:
2727
description: "Flink versions to test against."
2828
required: false
2929
type: string
3030
default: "['generic']"
31-
module:
32-
description: "Flink CDC module to test against."
31+
modules:
32+
description: "Flink CDC modules to test against."
3333
required: true
3434
type: string
3535
parallelism:
@@ -48,27 +48,38 @@ env:
4848
flink-cdc-common,\
4949
flink-cdc-composer,\
5050
flink-cdc-runtime,\
51-
flink-cdc-connect/flink-cdc-source-connectors/flink-cdc-base"
51+
flink-cdc-connect/flink-cdc-source-connectors/flink-cdc-base,\
52+
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-values"
5253

5354
MODULES_PIPELINE_CONNECTORS: "\
54-
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-values,\
55-
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql,\
5655
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris,\
57-
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-starrocks,\
58-
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-kafka,\
59-
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon,\
6056
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-elasticsearch,\
57+
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-iceberg,\
58+
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-kafka,\
59+
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-maxcompute,\
60+
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql,\
6161
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-oceanbase,\
62-
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-maxcompute"
62+
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-maxcompute,\
63+
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-postgres,\
64+
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon,\
65+
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-starrocks,\
66+
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-fluss,\
67+
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-values"
6368

64-
MODULES_MYSQL: "\
69+
MODULES_MYSQL_SOURCE: "\
6570
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-mysql-cdc,\
6671
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-mysql-cdc"
6772

68-
MODULES_POSTGRES: "\
73+
MODULES_MYSQL_PIPELINE: "\
74+
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql"
75+
76+
MODULES_POSTGRES_SOURCE: "\
6977
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-postgres-cdc,\
7078
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-postgres-cdc"
7179

80+
MODULES_POSTGRES_PIPELINE: "\
81+
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-postgres"
82+
7283
MODULES_ORACLE: "\
7384
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-oracle-cdc,\
7485
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-oracle-cdc"
@@ -85,10 +96,13 @@ env:
8596
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-tidb-cdc,\
8697
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-tidb-cdc"
8798

88-
MODULES_OCEANBASE: "\
99+
MODULES_OCEANBASE_SOURCE: "\
89100
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-oceanbase-cdc,\
90101
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-oceanbase-cdc"
91102

103+
MODULES_OCEANBASE_PIPELINE: "\
104+
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-oceanbase"
105+
92106
MODULES_DB2: "\
93107
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-db2-cdc,\
94108
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-db2-cdc"
@@ -97,21 +111,46 @@ env:
97111
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-vitess-cdc,\
98112
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-vitess-cdc"
99113

114+
MODULES_DORIS: "\
115+
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris"
116+
117+
MODULES_STARROCKS: "\
118+
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-starrocks"
119+
120+
MODULES_ICEBERG: "\
121+
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-iceberg"
122+
123+
MODULES_KAFKA: "\
124+
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-kafka"
125+
126+
MODULES_PAIMON: "\
127+
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon"
128+
129+
MODULES_ELASTICSEARCH: "\
130+
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-elasticsearch"
131+
132+
MODULES_MAXCOMPUTE: "\
133+
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-maxcompute"
134+
135+
MODULES_FLUSS: "\
136+
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-fluss"
137+
100138
MODULES_PIPELINE_E2E: "\
101139
flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests"
102140

103141
MODULES_SOURCE_E2E: "\
104142
flink-cdc-e2e-tests/flink-cdc-source-e2e-tests"
105143

106144
jobs:
107-
compile_and_test:
145+
test:
108146
runs-on: ubuntu-latest
109147
timeout-minutes: 120
110148
strategy:
111149
fail-fast: false
112150
matrix:
113-
java-version: ${{ fromJSON(inputs.java-version) }}
114-
flink-version: ${{ fromJSON(inputs.flink-version) }}
151+
java-version: ${{ fromJSON(inputs.java-versions) }}
152+
flink-version: ${{ fromJSON(inputs.flink-versions) }}
153+
module: ${{ fromJSON(inputs.modules) }}
115154
steps:
116155
- run: echo "Running CI pipeline for JDK version ${{ matrix.java-version }}"
117156
- name: Clean up disk space
@@ -149,25 +188,28 @@ jobs:
149188
maven-version: 3.8.6
150189

151190
- name: Compile and test
152-
timeout-minutes: 60
191+
timeout-minutes: 90
153192
run: |
154193
. .github/workflows/utils.sh
155194
jvm_timezone=$(random_timezone)
156195
echo "JVM timezone is set to $jvm_timezone"
157196
set -o pipefail
158197
159-
case ${{ inputs.module }} in
198+
case ${{ matrix.module }} in
160199
("core")
161200
modules=${{ env.MODULES_CORE }}
162201
;;
163-
("pipeline_connectors")
164-
modules=${{ env.MODULES_PIPELINE_CONNECTORS }}
202+
("mysql-source")
203+
modules=${{ env.MODULES_MYSQL_SOURCE }}
204+
;;
205+
("mysql-pipeline")
206+
modules=${{ env.MODULES_MYSQL_PIPELINE }}
165207
;;
166-
("mysql")
167-
modules=${{ env.MODULES_MYSQL }}
208+
("postgres-source")
209+
modules=${{ env.MODULES_POSTGRES_SOURCE }}
168210
;;
169-
("postgres")
170-
modules=${{ env.MODULES_POSTGRES }}
211+
("postgres-pipeline")
212+
modules=${{ env.MODULES_POSTGRES_PIPELINE }}
171213
;;
172214
("oracle")
173215
modules=${{ env.MODULES_ORACLE }}
@@ -184,34 +226,61 @@ jobs:
184226
("tidb")
185227
modules=${{ env.MODULES_TIDB }}
186228
;;
187-
("oceanbase")
188-
modules=${{ env.MODULES_OCEANBASE }}
229+
("oceanbase-source")
230+
modules=${{ env.MODULES_OCEANBASE_SOURCE }}
231+
;;
232+
("oceanbase-pipeline")
233+
modules=${{ env.MODULES_OCEANBASE_PIPELINE }}
189234
;;
190235
("db2")
191236
modules=${{ env.MODULES_DB2 }}
192237
;;
193238
("vitess")
194239
modules=${{ env.MODULES_VITESS }}
195240
;;
241+
("doris")
242+
modules=${{ env.MODULES_DORIS }}
243+
;;
244+
("elasticsearch")
245+
modules=${{ env.MODULES_ELASTICSEARCH }}
246+
;;
247+
("iceberg")
248+
modules=${{ env.MODULES_ICEBERG }}
249+
;;
250+
("kafka")
251+
modules=${{ env.MODULES_KAFKA }}
252+
;;
253+
("maxcompute")
254+
modules=${{ env.MODULES_MAXCOMPUTE }}
255+
;;
256+
("paimon")
257+
modules=${{ env.MODULES_PAIMON }}
258+
;;
259+
("starrocks")
260+
modules=${{ env.MODULES_STARROCKS }}
261+
;;
262+
("fluss")
263+
modules=${{ env.MODULES_FLUSS }}
264+
;;
196265
("pipeline_e2e")
197-
compile_modules="${{ env.MODULES_CORE }},${{ env.MODULES_PIPELINE_CONNECTORS }},${{ env.MODULES_MYSQL }},${{ env.MODULES_POSTGRES }},${{ env.MODULES_ORACLE }},${{ env.MODULES_MONGODB }},${{ env.MODULES_SQLSERVER }},${{ env.MODULES_TIDB }},${{ env.MODULES_OCEANBASE }},${{ env.MODULES_DB2 }},${{ env.MODULES_VITESS }},${{ env.MODULES_PIPELINE_E2E }}"
266+
compile_modules="${{ env.MODULES_CORE }},${{ env.MODULES_PIPELINE_CONNECTORS }},${{ env.MODULES_MYSQL_SOURCE }},${{ env.MODULES_POSTGRES_SOURCE }},${{ env.MODULES_ORACLE }},${{ env.MODULES_MONGODB }},${{ env.MODULES_SQLSERVER }},${{ env.MODULES_TIDB }},${{ env.MODULES_OCEANBASE_SOURCE }},${{ env.MODULES_DB2 }},${{ env.MODULES_VITESS }},${{ env.MODULES_PIPELINE_E2E }}"
198267
modules=${{ env.MODULES_PIPELINE_E2E }}
199268
;;
200269
("source_e2e")
201-
compile_modules="${{ env.MODULES_CORE }},${{ env.MODULES_PIPELINE_CONNECTORS }},${{ env.MODULES_MYSQL }},${{ env.MODULES_POSTGRES }},${{ env.MODULES_ORACLE }},${{ env.MODULES_MONGODB }},${{ env.MODULES_SQLSERVER }},${{ env.MODULES_TIDB }},${{ env.MODULES_OCEANBASE }},${{ env.MODULES_DB2 }},${{ env.MODULES_VITESS }},${{ env.MODULES_SOURCE_E2E }}"
270+
compile_modules="${{ env.MODULES_CORE }},${{ env.MODULES_PIPELINE_CONNECTORS }},${{ env.MODULES_MYSQL_SOURCE }},${{ env.MODULES_POSTGRES_SOURCE }},${{ env.MODULES_ORACLE }},${{ env.MODULES_MONGODB }},${{ env.MODULES_SQLSERVER }},${{ env.MODULES_TIDB }},${{ env.MODULES_OCEANBASE_SOURCE }},${{ env.MODULES_DB2 }},${{ env.MODULES_VITESS }},${{ env.MODULES_SOURCE_E2E }}"
202271
modules=${{ env.MODULES_SOURCE_E2E }}
203272
;;
204273
esac
205274
206-
if [ ${{ inputs.module }} != "pipeline_e2e" ] && [ ${{ inputs.module }} != "source_e2e" ]; then
275+
if [ ${{ matrix.module }} != "pipeline_e2e" ] && [ ${{ matrix.module }} != "source_e2e" ]; then
207276
compile_modules=$modules
208277
fi
209278
210279
build_maven_parameter=""
211280
212-
if [ ${{ inputs.module }} == "mongodb6" ]; then
281+
if [ ${{ matrix.module }} == "mongodb6" ]; then
213282
build_maven_parameter="-DspecifiedMongoVersion=6.0.16"
214-
elif [ ${{ inputs.module }} == "mongodb7" ]; then
283+
elif [ ${{ matrix.module }} == "mongodb7" ]; then
215284
build_maven_parameter="-DspecifiedMongoVersion=7.0.12"
216285
fi
217286

.github/workflows/flink_cdc_ci.yml

Lines changed: 22 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -57,16 +57,24 @@ jobs:
5757
run: mvn --no-snapshot-updates -B package -DskipTests
5858
- name: Run license check
5959
run: gem install rubyzip -v 2.3.0 && ./tools/ci/license_check.rb
60-
ut:
61-
strategy:
62-
fail-fast: false
63-
matrix:
64-
module: [ 'core', 'pipeline_connectors', 'mysql', 'postgres', 'oracle', 'mongodb6', 'mongodb7', 'sqlserver', 'tidb', 'oceanbase', 'db2', 'vitess' ]
65-
name: Unit Tests
60+
common:
61+
name: Common Unit Tests
62+
uses: ./.github/workflows/flink_cdc_base.yml
63+
with:
64+
java-versions: "[8]"
65+
modules: "['core']"
66+
pipeline-ut:
67+
name: Pipeline Unit Tests
68+
uses: ./.github/workflows/flink_cdc_base.yml
69+
with:
70+
java-versions: "[8]"
71+
modules: "['mysql-pipeline', 'postgres-pipeline', 'oceanbase-pipeline', 'doris', 'elasticsearch', 'iceberg', 'kafka', 'maxcompute', 'paimon', 'starrocks', 'fluss']"
72+
source-ut:
73+
name: Source Unit Tests
6674
uses: ./.github/workflows/flink_cdc_base.yml
6775
with:
68-
java-version: "[8]"
69-
module: ${{ matrix.module }}
76+
java-versions: "[8]"
77+
modules: "['mysql-source', 'postgres-source', 'oracle', 'mongodb6', 'mongodb7', 'sqlserver', 'tidb', 'oceanbase-source', 'db2', 'vitess']"
7078
pipeline_e2e:
7179
strategy:
7280
fail-fast: false
@@ -75,14 +83,14 @@ jobs:
7583
name: Pipeline E2E Tests (${{ matrix.parallelism }}-Parallelism)
7684
uses: ./.github/workflows/flink_cdc_base.yml
7785
with:
78-
java-version: "[8]"
79-
flink-version: "['1.19.2', '1.20.1']"
80-
module: pipeline_e2e
86+
java-versions: "[8]"
87+
flink-versions: "['1.19.2', '1.20.1']"
88+
modules: "['pipeline_e2e']"
8189
parallelism: ${{ matrix.parallelism }}
8290
source_e2e:
8391
name: Source E2E Tests
8492
uses: ./.github/workflows/flink_cdc_base.yml
8593
with:
86-
java-version: "[8]"
87-
flink-version: "['1.19.2', '1.20.1']"
88-
module: source_e2e
94+
java-versions: "[8]"
95+
flink-versions: "['1.19.2', '1.20.1']"
96+
modules: "['source_e2e']"

.github/workflows/flink_cdc_ci_nightly.yml

Lines changed: 25 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -47,35 +47,45 @@ jobs:
4747
run: mvn --no-snapshot-updates -B package -DskipTests
4848
- name: Run license check
4949
run: gem install rubyzip -v 2.3.0 && ./tools/ci/license_check.rb
50-
ut:
50+
common:
5151
if: github.repository == 'apache/flink-cdc'
52-
strategy:
53-
fail-fast: false
54-
matrix:
55-
module: [ 'core', 'pipeline_connectors', 'mysql', 'postgres', 'oracle', 'mongodb6', 'mongodb7', 'sqlserver', 'tidb', 'oceanbase', 'db2', 'vitess' ]
56-
name: Unit Tests
52+
name: Common Unit Tests
53+
uses: ./.github/workflows/flink_cdc_base.yml
54+
with:
55+
java-versions: "[11]"
56+
modules: "['core']"
57+
pipeline-ut:
58+
if: github.repository == 'apache/flink-cdc'
59+
name: Pipeline Unit Tests
60+
uses: ./.github/workflows/flink_cdc_base.yml
61+
with:
62+
java-versions: "[11]"
63+
modules: "['mysql-pipeline', 'postgres-pipeline', 'oceanbase-pipeline', 'doris', 'elasticsearch', 'iceberg', 'kafka', 'maxcompute', 'paimon', 'starrocks', 'fluss']"
64+
source-ut:
65+
if: github.repository == 'apache/flink-cdc'
66+
name: Source Unit Tests
5767
uses: ./.github/workflows/flink_cdc_base.yml
5868
with:
59-
java-version: "[11]"
60-
module: ${{ matrix.module }}
69+
java-versions: "[11]"
70+
modules: "['mysql-source', 'postgres-source', 'oracle', 'mongodb6', 'mongodb7', 'sqlserver', 'tidb', 'oceanbase-source', 'db2', 'vitess']"
6171
pipeline_e2e:
6272
if: github.repository == 'apache/flink-cdc'
6373
strategy:
6474
fail-fast: false
6575
matrix:
6676
parallelism: [ 1, 4 ]
67-
name: Pipeline E2E Tests (${{ matrix.parallelism }} Parallelism)
77+
name: Pipeline E2E Tests (${{ matrix.parallelism }}-Parallelism)
6878
uses: ./.github/workflows/flink_cdc_base.yml
6979
with:
70-
java-version: "[11]"
71-
flink-version: "['1.19.2', '1.20.1']"
72-
module: pipeline_e2e
80+
java-versions: "[11]"
81+
flink-versions: "['1.19.2', '1.20.1']"
82+
modules: "['pipeline_e2e']"
7383
parallelism: ${{ matrix.parallelism }}
7484
source_e2e:
7585
if: github.repository == 'apache/flink-cdc'
7686
name: Source E2E Tests
7787
uses: ./.github/workflows/flink_cdc_base.yml
7888
with:
79-
java-version: "[11]"
80-
flink-version: "['1.19.2', '1.20.1']"
81-
module: source_e2e
89+
java-versions: "[11]"
90+
flink-versions: "['1.19.2', '1.20.1']"
91+
modules: "['source_e2e']"

docs/content.zh/docs/connectors/flink-sources/mysql-cdc.md

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -335,7 +335,7 @@ Flink SQL> SELECT * FROM orders;
335335
<td style="word-wrap: break-word;">1000</td>
336336
<td>Integer</td>
337337
<td>
338-
在快照操作期间,连接器将查询每个包含的表,以生成该表中所有行的读取事件。 此参数确定 MySQL 连接是否将表的所有结果拉入内存(速度很快,但需要大量内存), 或者结果是否需要流式传输(传输速度可能较慢,但适用于非常大的表)。 该值指定了在连接器对结果进行流式处理之前,表必须包含的最小行数,默认值为1000。将此参数设置为`0`以跳过所有表大小检查,并始终在快照期间对所有结果进行流式处理。</td>
338+
仅仅cdc 1.x 版本支持的参数。在快照操作期间,连接器将查询每个包含的表,以生成该表中所有行的读取事件。 此参数确定 MySQL 连接是否将表的所有结果拉入内存(速度很快,但需要大量内存), 或者结果是否需要流式传输(传输速度可能较慢,但适用于非常大的表)。 该值指定了在连接器对结果进行流式处理之前,表必须包含的最小行数,默认值为1000。将此参数设置为`0`以跳过所有表大小检查,并始终在快照期间对所有结果进行流式处理。</td>
339339
</tr>
340340
<tr>
341341
<td>connect.timeout</td>
@@ -361,7 +361,7 @@ Flink SQL> SELECT * FROM orders;
361361
<tr>
362362
<td>jdbc.properties.*</td>
363363
<td>optional</td>
364-
<td style="word-wrap: break-word;">20</td>
364+
<td style="word-wrap: break-word;"></td>
365365
<td>String</td>
366366
<td>传递自定义 JDBC URL 属性的选项。用户可以传递自定义属性,如 'jdbc.properties.useSSL' = 'false'.</td>
367367
</tr>
@@ -435,6 +435,18 @@ Flink SQL> SELECT * FROM orders;
435435
警告:跳过 backfill 可能会导致数据不一致,因为快照阶段发生的某些 binlog 事件可能会被重放(仅保证 at-least-once )。
436436
例如,更新快照阶段已更新的值,或删除快照阶段已删除的数据。这些重放的 binlog 事件应进行特殊处理。
437437
</tr>
438+
<tr>
439+
<td>use.legacy.json.format</td>
440+
<td>optional</td>
441+
<td style="word-wrap: break-word;">true</td>
442+
<td>Boolean</td>
443+
<td>是否使用 legacy JSON 格式来转换 Binlog 中的 JSON 类型的数据。 <br>
444+
这代表着是否使用 legacy JSON 格式来转换 Binlog 中的 JSON 类型的数据。
445+
如果用户配置 'use.legacy.json.format' = 'true',则从 Binlog 中转换 JSON 类型的数据时,会移除值之前的空格和逗号之后的空格。例如,
446+
Binlog 中 JSON 类型的数据 {"key1": "value1", "key2": "value2"} 会被转换为 {"key1":"value1","key2":"value2"}。
447+
如果设置 'use.legacy.json.format' = 'false', 这条数据会被转换为 {"key1": "value1", "key2": "value2"}, 也就是 key 和 value 前的空格都会被保留。
448+
</td>
449+
</tr>
438450
</tbody>
439451
</table>
440452
</div>

0 commit comments

Comments
 (0)