Skip to content

Commit 4d7c34a

Browse files
committed
Merge remote-tracking branch 'origin/main' into feature/dependency-extraction-DYN-1235
2 parents 75fef9e + af7b73c commit 4d7c34a

File tree

251 files changed

+10184
-3019
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

251 files changed

+10184
-3019
lines changed

.coderabbit.yaml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,9 @@ reviews:
1919
auto_incremental_review: false
2020
suggested_labels: false
2121
suggested_reviewers: false
22+
sequence_diagrams: false
23+
related_issues: false
24+
related_prs: false
2225
finishing_touches:
2326
docstrings:
2427
enabled: false

.github/actions/docker-build/action.yml

Lines changed: 28 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,18 @@ inputs:
3434
aws_secret_access_key:
3535
description: 'AWS Secret Access Key'
3636
required: false
37+
base_image_tag:
38+
description: 'Optional override for base image tag passed to build.sh'
39+
required: false
40+
runtime_image_tag:
41+
description: 'Optional override for RUNTIME_IMAGE_TAG build-arg'
42+
required: false
43+
cuda_version:
44+
description: 'Optional override for CUDA_VERSION build-arg'
45+
required: false
46+
torch_backend:
47+
description: 'Optional override for TORCH_BACKEND build-arg (e.g., cu129)'
48+
required: false
3749

3850
outputs:
3951
image_tag:
@@ -81,14 +93,29 @@ runs:
8193
echo "BUILD_START_TIME=${BUILD_START_TIME}" >> $GITHUB_ENV
8294
8395
echo "image_tag=$IMAGE_TAG" >> $GITHUB_OUTPUT
96+
# Collect optional overrides provided by the workflow
97+
EXTRA_ARGS=""
98+
if [ -n "${{ inputs.base_image_tag }}" ]; then
99+
EXTRA_ARGS+=" --base-image-tag ${{ inputs.base_image_tag }}"
100+
fi
101+
if [ -n "${{ inputs.runtime_image_tag }}" ]; then
102+
EXTRA_ARGS+=" --build-arg RUNTIME_IMAGE_TAG=${{ inputs.runtime_image_tag }}"
103+
fi
104+
if [ -n "${{ inputs.cuda_version }}" ]; then
105+
EXTRA_ARGS+=" --build-arg CUDA_VERSION=${{ inputs.cuda_version }}"
106+
fi
107+
if [ -n "${{ inputs.torch_backend }}" ]; then
108+
EXTRA_ARGS+=" --build-arg TORCH_BACKEND=${{ inputs.torch_backend }}"
109+
fi
110+
84111
./container/build.sh --tag "$IMAGE_TAG" \
85112
--target ${{ inputs.target }} \
86113
--vllm-max-jobs 10 \
87114
--framework ${{ inputs.framework }} \
88115
--platform ${{ inputs.platform }} \
89116
--use-sccache \
90117
--sccache-bucket "$SCCACHE_S3_BUCKET" \
91-
--sccache-region "$AWS_DEFAULT_REGION"
118+
--sccache-region "$AWS_DEFAULT_REGION" $EXTRA_ARGS
92119
93120
BUILD_END_TIME=$(date -u +%Y-%m-%dT%H:%M:%SZ)
94121
echo "🕐 Build ended at: ${BUILD_END_TIME}"

.github/actions/pytest/action.yml

Lines changed: 82 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,11 +12,37 @@ inputs:
1212
description: 'Maximum number of cores available to docker'
1313
required: false
1414
default: '10'
15+
framework:
16+
description: 'Framework name for test metrics'
17+
required: false
18+
default: 'unknown'
19+
test_type:
20+
description: 'Test type (unit, e2e, integration)'
21+
required: false
22+
default: 'e2e'
23+
platform_arch:
24+
description: 'Platform architecture (amd64, arm64)'
25+
required: false
26+
default: 'amd64'
1527

1628

1729
runs:
1830
using: "composite"
1931
steps:
32+
- name: Setup Test Environment
33+
shell: bash
34+
run: |
35+
# Setup test directories
36+
mkdir -p test-results
37+
38+
# Set platform architecture from input
39+
PLATFORM_ARCH="${{ inputs.platform_arch }}"
40+
if [[ -z "${PLATFORM_ARCH}" ]]; then
41+
PLATFORM_ARCH="amd64"
42+
fi
43+
echo "PLATFORM_ARCH=${PLATFORM_ARCH}" >> $GITHUB_ENV
44+
echo "🏗️ Platform architecture: ${PLATFORM_ARCH}"
45+
2046
- name: Run tests
2147
shell: bash
2248
env:
@@ -25,9 +51,64 @@ runs:
2551
PYTEST_XML_FILE: pytest_test_report.xml
2652
HF_HOME: /runner/_work/_temp
2753
run: |
54+
# Run pytest with detailed output and JUnit XML
55+
set +e # Don't exit on test failures
56+
2857
docker run --runtime=nvidia --rm --gpus all -w /workspace \
2958
--cpus=${NUM_CPUS} \
3059
--network host \
3160
--name ${{ env.CONTAINER_ID }}_pytest \
61+
-v "$(pwd)/test-results:/test-results" \
3262
${{ inputs.image_tag }} \
33-
bash -c "pytest -xsv --basetemp=/tmp --junitxml=${{ env.PYTEST_XML_FILE }} -m \"${{ inputs.pytest_marks }}\""
63+
bash -c "pytest -v --tb=short --basetemp=/tmp --junitxml=/test-results/${{ env.PYTEST_XML_FILE }} --durations=10 -m \"${{ inputs.pytest_marks }}\""
64+
65+
TEST_EXIT_CODE=$?
66+
echo "TEST_EXIT_CODE=${TEST_EXIT_CODE}" >> $GITHUB_ENV
67+
echo "🧪 Tests completed with exit code: ${TEST_EXIT_CODE}"
68+
69+
# Always continue to results processing
70+
exit 0
71+
72+
- name: Process Test Results
73+
shell: bash
74+
run: |
75+
76+
# Check for JUnit XML file and determine test status
77+
JUNIT_FILE="test-results/pytest_test_report.xml"
78+
79+
if [[ -f "$JUNIT_FILE" ]]; then
80+
echo "✅ JUnit XML generated successfully"
81+
# Extract basic test counts for status determination
82+
TOTAL_TESTS=$(grep -o 'tests="[0-9]*"' "$JUNIT_FILE" | grep -o '[0-9]*' | head -1 || echo "0")
83+
FAILED_TESTS=$(grep -o 'failures="[0-9]*"' "$JUNIT_FILE" | grep -o '[0-9]*' | head -1 || echo "0")
84+
ERROR_TESTS=$(grep -o 'errors="[0-9]*"' "$JUNIT_FILE" | grep -o '[0-9]*' | head -1 || echo "0")
85+
echo "📊 ${TOTAL_TESTS} tests completed (${FAILED_TESTS} failed, ${ERROR_TESTS} errors)"
86+
87+
# Create metadata file with step context information
88+
METADATA_FILE="test-results/test_metadata.json"
89+
echo '{' > "$METADATA_FILE"
90+
echo ' "job_name": "${{ github.job }}",' >> "$METADATA_FILE"
91+
echo ' "framework": "${{ inputs.framework }}",' >> "$METADATA_FILE"
92+
echo ' "test_type": "${{ inputs.test_type }}",' >> "$METADATA_FILE"
93+
echo ' "platform_arch": "${{ inputs.platform_arch }}",' >> "$METADATA_FILE"
94+
echo ' "junit_xml_file": "pytest_test_report.xml",' >> "$METADATA_FILE"
95+
echo ' "step_name": "Run ${{ inputs.test_type }} tests"' >> "$METADATA_FILE"
96+
echo '}' >> "$METADATA_FILE"
97+
echo "📝 Created test metadata file"
98+
else
99+
echo "⚠️ JUnit XML file not found - test results may not be available for upload"
100+
TOTAL_TESTS=0
101+
FAILED_TESTS=1 # Treat missing XML as failure
102+
ERROR_TESTS=0
103+
fi
104+
105+
# Exit with original test result to maintain workflow behavior
106+
exit ${TEST_EXIT_CODE}
107+
108+
- name: Upload Test Results
109+
uses: actions/upload-artifact@v4
110+
if: always() # Always upload test results, even if tests failed
111+
with:
112+
name: test-results-${{ inputs.framework }}-${{ inputs.test_type }}-${{ env.PLATFORM_ARCH }}
113+
path: test-results/${{ env.PYTEST_XML_FILE }}
114+
retention-days: 7

0 commit comments

Comments
 (0)