-
Notifications
You must be signed in to change notification settings - Fork 11
420 lines (355 loc) · 14.7 KB
/
test.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
name: Matrix testing
on:
push:
branches: [ "OPENGPDB_STABLE", "OPENGPDB_6_27_STABLE", "MDB_6_25_STABLE_YEZZEY" ]
pull_request:
branches: [ "OPENGPDB_STABLE", "OPENGPDB_6_27_STABLE", "MDB_6_25_STABLE_YEZZEY" ]
jobs:
## ======================================================================
## Job: prepare-test-matrix
## ======================================================================
prepare-test-matrix:
runs-on: ubuntu-20.04
outputs:
test-matrix: ${{ steps.set-matrix.outputs.matrix }}
steps:
- id: set-matrix
run: |
echo "=== Matrix Preparation Diagnostics ==="
echo "Event type: ${{ github.event_name }}"
echo "Test selection input: '${{ github.event.inputs.test_selection }}'"
stat -fc %T /sys/fs/cgroup/
# Define defaults
DEFAULT_NUM_PRIMARY_MIRROR_PAIRS=3
DEFAULT_ENABLE_CGROUPS=false
DEFAULT_ENABLE_CORE_CHECK=true
DEFAULT_PG_SETTINGS_OPTIMIZER=""
# Define base test configurations
ALL_TESTS='{
"include": [
{"test":"ic-good-opt-off",
"make_configs":["src/test/regress:installcheck-good"],
"pg_settings":{"optimizer":"off"}
},
{"test":"ic-good-opt-on",
"make_configs":["src/test/regress:installcheck-good"],
"pg_settings":{"optimizer":"on"}
},
{"test":"ic-resgroup",
"make_configs":["src/test/isolation2:installcheck-resgroup"],
"enable_cgroups":true
},
{"test":"ic-contrib",
"make_configs":["contrib/auto_explain:installcheck",
"contrib/citext:installcheck",
"contrib/btree_gin:installcheck",
"contrib/file_fdw:installcheck",
"contrib/formatter_fixedwidth:installcheck",
"contrib/extprotocol:installcheck",
"contrib/dblink:installcheck",
"contrib/pg_trgm:installcheck",
"contrib/indexscan:installcheck",
"contrib/hstore:installcheck",
"contrib/pgcrypto:installcheck",
"contrib/tablefunc:installcheck",
"contrib/passwordcheck:installcheck",
"contrib/sslinfo:installcheck"]
},
{"test":"ic-parallel-retrieve-cursor",
"make_configs":["src/test/isolation2:installcheck-parallel-retrieve-cursor"]
},
{"test":"ic-mirrorless",
"make_configs":["src/test/isolation2:installcheck-mirrorless"]
}
]
}'
# Function to apply defaults
apply_defaults() {
echo "$1" | jq --arg npm "$DEFAULT_NUM_PRIMARY_MIRROR_PAIRS" \
--argjson ec "$DEFAULT_ENABLE_CGROUPS" \
--argjson ecc "$DEFAULT_ENABLE_CORE_CHECK" \
--arg opt "$DEFAULT_PG_SETTINGS_OPTIMIZER" \
'def get_defaults:
{
num_primary_mirror_pairs: ($npm|tonumber),
enable_cgroups: $ec,
enable_core_check: $ecc,
pg_settings: {
optimizer: $opt
}
};
get_defaults * .'
}
# Extract all valid test names from ALL_TESTS
VALID_TESTS=$(echo "$ALL_TESTS" | jq -r '.include[].test')
# Parse input test selection
IFS=',' read -ra SELECTED_TESTS <<< "${{ github.event.inputs.test_selection }}"
# Default to all tests if selection is empty or 'all'
if [[ "${SELECTED_TESTS[*]}" == "all" || -z "${SELECTED_TESTS[*]}" ]]; then
mapfile -t SELECTED_TESTS <<< "$VALID_TESTS"
fi
# Validate and filter selected tests
INVALID_TESTS=()
FILTERED_TESTS=()
for TEST in "${SELECTED_TESTS[@]}"; do
TEST=$(echo "$TEST" | tr -d '[:space:]') # Trim whitespace
if echo "$VALID_TESTS" | grep -qw "$TEST"; then
FILTERED_TESTS+=("$TEST")
else
INVALID_TESTS+=("$TEST")
fi
done
# Handle invalid tests
if [[ ${#INVALID_TESTS[@]} -gt 0 ]]; then
echo "::error::Invalid test(s) selected: ${INVALID_TESTS[*]}"
echo "Valid tests are: $(echo "$VALID_TESTS" | tr '\n' ', ')"
exit 1
fi
# Build result JSON with defaults applied
RESULT='{"include":['
FIRST=true
for TEST in "${FILTERED_TESTS[@]}"; do
CONFIG=$(jq -c --arg test "$TEST" '.include[] | select(.test == $test)' <<< "$ALL_TESTS")
FILTERED_WITH_DEFAULTS=$(apply_defaults "$CONFIG")
if [[ "$FIRST" == true ]]; then
FIRST=false
else
RESULT="${RESULT},"
fi
RESULT="${RESULT}${FILTERED_WITH_DEFAULTS}"
done
RESULT="${RESULT}]}"
# Output the matrix for GitHub Actions
echo "Final matrix configuration:"
echo "$RESULT" | jq .
# Fix: Use block redirection
{
echo "matrix<<EOF"
echo "$RESULT"
echo "EOF"
} >> "$GITHUB_OUTPUT"
echo "=== Matrix Preparation Complete ==="
## ======================================================================
## Job: build
## ======================================================================
build:
env:
JOB_TYPE: build
runs-on: ubuntu-20.04
timeout-minutes: 120
outputs:
build_timestamp: ${{ steps.set_timestamp.outputs.timestamp }}
steps:
- uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Build and push
uses: docker/build-push-action@v2
with:
context: .
file: ./docker/test/Dockerfile
tags: test_test:1234
outputs: type=docker,dest=/tmp/myimage.tar
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: myimage
path: /tmp/myimage.tar
## ======================================================================
## Job: test
## ======================================================================
test:
name: ${{ matrix.test }}
needs: [build, prepare-test-matrix]
runs-on: ubuntu-20.04
timeout-minutes: 120
# actionlint-allow matrix[*].pg_settings
strategy:
fail-fast: false # Continue with other tests if one fails
matrix: ${{ fromJson(needs.prepare-test-matrix.outputs.test-matrix) }}
steps:
- name: Check disk space
run: df . -h
- name: Free disk space
run: |
sudo docker rmi $(docker image ls -aq) >/dev/null 2>&1 || true
sudo rm -rf \
/usr/share/dotnet /usr/local/lib/android /opt/ghc \
/usr/local/share/powershell /usr/share/swift /usr/local/.ghcup \
/usr/lib/jvm || true
echo "some directories deleted"
sudo apt install aptitude -y >/dev/null 2>&1
sudo aptitude purge aria2 ansible azure-cli shellcheck rpm xorriso zsync \
esl-erlang firefox gfortran-8 gfortran-9 google-chrome-stable \
google-cloud-sdk imagemagick \
libmagickcore-dev libmagickwand-dev libmagic-dev ant ant-optional kubectl \
mercurial apt-transport-https mono-complete libmysqlclient \
unixodbc-dev yarn chrpath libssl-dev libxft-dev \
libfreetype6 libfreetype6-dev libfontconfig1 libfontconfig1-dev \
snmp pollinate libpq-dev postgresql-client powershell ruby-full \
sphinxsearch subversion mongodb-org azure-cli microsoft-edge-stable \
-y -f >/dev/null 2>&1
sudo aptitude purge google-cloud-sdk -f -y >/dev/null 2>&1
sudo aptitude purge microsoft-edge-stable -f -y >/dev/null 2>&1 || true
sudo apt purge microsoft-edge-stable -f -y >/dev/null 2>&1 || true
sudo aptitude purge '~n ^mysql' -f -y >/dev/null 2>&1
sudo aptitude purge '~n ^php' -f -y >/dev/null 2>&1
sudo aptitude purge '~n ^dotnet' -f -y >/dev/null 2>&1
sudo apt-get autoremove -y >/dev/null 2>&1
sudo apt-get autoclean -y >/dev/null 2>&1
echo "some packages purged"
- name: Check disk space
run: |
sudo dpkg-query -Wf '${Installed-Size}\t${Package}\n' | sort -nr | head
df . -h
sudo du /usr/ -hx -d 4 --threshold=1G | sort -hr | head
- name: Download artifact
uses: actions/download-artifact@v4
with:
name: myimage
path: /tmp
- name: Load Docker image
run: |
docker image ls -a
docker load --input /tmp/myimage.tar
docker image ls -a
- name: "Run Tests: ${{ matrix.test }}"
run: |
set -o pipefail
# Initialize test status
overall_status=0
# Create logs directory structure
mkdir -p build-logs/details
# Create results directory structure
mkdir -p build-results
# Set PostgreSQL options if defined
PG_OPTS=""
if [[ "${{ matrix.pg_settings.optimizer != '' }}" == "true" ]]; then
PG_OPTS="$PG_OPTS -c optimizer=${{ matrix.pg_settings.optimizer }}"
fi
# Set regroup options if defined
TEST_CGROUP=""
if [ "${{ matrix.enable_cgroups }}" = "true" ]; then
TEST_CGROUP="true"
fi
# Read configs into array
IFS=' ' read -r -a configs <<< "${{ join(matrix.make_configs, ' ') }}"
echo "=== Starting test execution for ${{ matrix.test }} ==="
echo "Number of configurations to execute: ${#configs[@]}"
echo ""
# Execute each config separately
for ((i=0; i<${#configs[@]}; i++)); do
config="${configs[$i]}"
IFS=':' read -r dir target <<< "$config"
echo "=== Executing configuration $((i+1))/${#configs[@]} ==="
echo "Make command: make -C $dir $target"
echo "Environment:"
echo "- PGOPTIONS: ${PG_OPTS}"
config_log="build-logs/details/make-${{ matrix.test }}-config$i.log"
docker run \
-e TEST_DIR="${dir}" \
-e TEST_TARGET="${target}" \
-e PGOPTIONS="${PG_OPTS}" \
-e TEST_CGROUP="${TEST_CGROUP}" \
--name test_test_container test_test:1234 \
--privileged \
2>&1 | tee "$config_log"
RESULTS_PATH="build-results/results-${{ matrix.test }}-${dir}"
mkdir -p $RESULTS_PATH
docker cp test_test_container:/home/gpadmin $RESULTS_PATH
docker rm test_test_container
done
echo "=== Test execution completed ==="
echo "Log files:"
ls -l build-logs/details/
# Store number of configurations for parsing step
echo "NUM_CONFIGS=${#configs[@]}" >> "$GITHUB_ENV"
# Report overall status
if [ $overall_status -eq 0 ]; then
echo "All test executions completed successfully"
else
echo "::warning::Some test executions failed, check individual logs for details"
fi
exit $overall_status
- name: Check and Display Diffs
if: always()
run: |
# Search for regression.diffs recursively
found_file=$(find . -type f -name "regression.diffs" | head -n 1)
if [[ -n "$found_file" ]]; then
echo "Found regression.diffs at: $found_file"
cat "$found_file"
else
echo "No regression.diffs file found in the hierarchy."
fi
- name: Upload test logs
if: always()
uses: actions/upload-artifact@v4
with:
name: test-logs-${{ matrix.test }}-${{ needs.build.outputs.build_timestamp }}
path: |
build-logs/
- name: Upload test results files
uses: actions/upload-artifact@v4
with:
name: results-${{ matrix.test }}-${{ needs.build.outputs.build_timestamp }}
path: |
**/regression.out
**/regression.diffs
**/results/
- name: Upload test regression logs
if: failure() || cancelled()
uses: actions/upload-artifact@v4
with:
name: regression-logs-${{ matrix.test }}-${{ needs.build.outputs.build_timestamp }}
path: |
**/regression.out
**/regression.diffs
**/results/
gpAux/gpdemo/datadirs/standby/log/
gpAux/gpdemo/datadirs/qddir/demoDataDir-1/log/
gpAux/gpdemo/datadirs/dbfast1/demoDataDir0/log/
gpAux/gpdemo/datadirs/dbfast2/demoDataDir1/log/
gpAux/gpdemo/datadirs/dbfast3/demoDataDir2/log/
gpAux/gpdemo/datadirs/dbfast_mirror1/demoDataDir0/log/
gpAux/gpdemo/datadirs/dbfast_mirror2/demoDataDir1/log/
gpAux/gpdemo/datadirs/dbfast_mirror3/demoDataDir2/log/
## ======================================================================
## Job: report
## ======================================================================
report:
name: Generate GPDB Build Report
needs: [build, prepare-test-matrix, test]
if: always()
runs-on: ubuntu-20.04
steps:
- name: Generate Final Report
run: |
{
echo "# Apache Cloudberry Build Pipeline Report"
echo "## Job Status"
echo "- Build Job: ${{ needs.build.result }}"
echo "- Test Job: ${{ needs.test.result }}"
echo "- Completion Time: $(date -u +'%Y-%m-%d %H:%M:%S UTC')"
if [[ "${{ needs.build.result }}" == "success" && "${{ needs.test.result }}" == "success" ]]; then
echo "✅ Pipeline completed successfully"
else
echo "⚠️ Pipeline completed with failures"
if [[ "${{ needs.build.result }}" != "success" ]]; then
echo "### Build Job Failure"
echo "Check build logs for details"
fi
if [[ "${{ needs.test.result }}" != "success" ]]; then
echo "### Test Job Failure"
echo "Check test logs and regression files for details"
fi
fi
} >> "$GITHUB_STEP_SUMMARY"
- name: Notify on failure
if: |
(needs.build.result != 'success' || needs.test.result != 'success')
run: |
echo "::error::Build/Test pipeline failed! Check job summaries and logs for details"
echo "Timestamp: $(date -u +'%Y-%m-%d %H:%M:%S UTC')"
echo "Build Result: ${{ needs.build.result }}"
echo "Test Result: ${{ needs.test.result }}"