Use self-hosted runner on compute-01 for full GitOps

This commit is contained in:
2026-02-02 23:46:14 +00:00
parent db0bd65281
commit 11da494a4f

View File

@@ -17,7 +17,7 @@ env:
jobs: jobs:
compile-and-deploy: compile-and-deploy:
runs-on: ubuntu-latest runs-on: [self-hosted, kubeflow]
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v4 uses: actions/checkout@v4
@@ -30,7 +30,7 @@ jobs:
python-version: '3.11' python-version: '3.11'
- name: Install kfp - name: Install kfp
run: pip install kfp run: pip install kfp --quiet
- name: Get changed pipeline files - name: Get changed pipeline files
id: changed id: changed
@@ -38,48 +38,42 @@ jobs:
if [ -n "${{ github.event.inputs.pipeline_path }}" ]; then if [ -n "${{ github.event.inputs.pipeline_path }}" ]; then
echo "files=${{ github.event.inputs.pipeline_path }}" >> $GITHUB_OUTPUT echo "files=${{ github.event.inputs.pipeline_path }}" >> $GITHUB_OUTPUT
else else
CHANGED=$(git diff --name-only HEAD~1 HEAD -- 'pipelines/**/*.py' | tr '\n' ' ') CHANGED=$(git diff --name-only HEAD~1 HEAD -- 'pipelines/**/*.py' 2>/dev/null || find pipelines -name "*.py" | head -5)
echo "files=$CHANGED" >> $GITHUB_OUTPUT echo "files=$CHANGED" >> $GITHUB_OUTPUT
fi fi
- name: Compile and upload pipelines - name: Compile and upload pipelines
if: steps.changed.outputs.files != '' if: steps.changed.outputs.files != ''
env:
KFP_ENDPOINT: ${{ env.KUBEFLOW_ENDPOINT }}
run: | run: |
for file in ${{ steps.changed.outputs.files }}; do for file in ${{ steps.changed.outputs.files }}; do
if [ -f "$file" ]; then if [ -f "$file" ]; then
echo "Processing $file..." echo "🔧 Processing $file..."
# Extract pipeline name from filename # Extract pipeline name from filename
PIPELINE_NAME=$(basename "$file" .py | tr '_' '-') PIPELINE_NAME=$(basename "$file" .py | tr '_' '-')
YAML_FILE="${PIPELINE_NAME}.yaml" YAML_FILE="${PIPELINE_NAME}.yaml"
# Compile pipeline # Compile pipeline
echo "Compiling $file to $YAML_FILE..." echo "📦 Compiling $file..."
kfp dsl compile --py "$file" --output "$YAML_FILE" kfp dsl compile --py "$file" --output "$YAML_FILE"
# Upload to Kubeflow (requires auth configured) # Upload to Kubeflow
# Note: This requires Kubeflow to be accessible from GHA runners echo "🚀 Uploading $PIPELINE_NAME to Kubeflow..."
# For private clusters, use self-hosted runners or Tailscale kfp --endpoint ${{ env.KUBEFLOW_ENDPOINT }} pipeline create \
echo "Compiled: $YAML_FILE" -p "$PIPELINE_NAME" \
echo "Upload manually or configure runner access to Kubeflow" "$YAML_FILE" || echo "Pipeline may already exist, trying upload..."
echo "✅ Done: $PIPELINE_NAME"
fi fi
done done
- name: Upload compiled pipelines as artifacts
uses: actions/upload-artifact@v4
with:
name: compiled-pipelines
path: '*.yaml'
retention-days: 7
- name: Summary - name: Summary
run: | run: |
echo "## Compiled Pipelines" >> $GITHUB_STEP_SUMMARY echo "## 🎯 Pipeline Deployment Summary" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY
for f in *.yaml; do echo "**Endpoint:** ${{ env.KUBEFLOW_ENDPOINT }}" >> $GITHUB_STEP_SUMMARY
[ -f "$f" ] && echo "- $f" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY
echo "### Processed Files:" >> $GITHUB_STEP_SUMMARY
for f in ${{ steps.changed.outputs.files }}; do
[ -f "$f" ] && echo "- ✅ $f" >> $GITHUB_STEP_SUMMARY
done done
echo "" >> $GITHUB_STEP_SUMMARY
echo "Download artifacts and upload to Kubeflow, or configure self-hosted runner with Tailscale access." >> $GITHUB_STEP_SUMMARY