Add GHA workflow for pipeline compilation

This commit is contained in:
2026-02-02 23:41:49 +00:00
parent 09ee583d6e
commit db0bd65281
2 changed files with 213 additions and 0 deletions

85
.github/workflows/deploy-pipelines.yaml vendored Normal file
View File

@@ -0,0 +1,85 @@
name: Deploy Kubeflow Pipelines
on:
push:
branches: [master, main]
paths:
- 'pipelines/**/*.py'
workflow_dispatch:
inputs:
pipeline_path:
description: 'Path to specific pipeline (e.g., pipelines/examples/hello_world.py)'
required: false
default: ''
env:
KUBEFLOW_ENDPOINT: https://kubeflow.walleye-frog.ts.net/pipeline
jobs:
compile-and-deploy:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 2
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.11'
- name: Install kfp
run: pip install kfp
- name: Get changed pipeline files
id: changed
run: |
if [ -n "${{ github.event.inputs.pipeline_path }}" ]; then
echo "files=${{ github.event.inputs.pipeline_path }}" >> $GITHUB_OUTPUT
else
CHANGED=$(git diff --name-only HEAD~1 HEAD -- 'pipelines/**/*.py' | tr '\n' ' ')
echo "files=$CHANGED" >> $GITHUB_OUTPUT
fi
- name: Compile and upload pipelines
if: steps.changed.outputs.files != ''
env:
KFP_ENDPOINT: ${{ env.KUBEFLOW_ENDPOINT }}
run: |
for file in ${{ steps.changed.outputs.files }}; do
if [ -f "$file" ]; then
echo "Processing $file..."
# Extract pipeline name from filename
PIPELINE_NAME=$(basename "$file" .py | tr '_' '-')
YAML_FILE="${PIPELINE_NAME}.yaml"
# Compile pipeline
echo "Compiling $file to $YAML_FILE..."
kfp dsl compile --py "$file" --output "$YAML_FILE"
# Upload to Kubeflow (requires auth configured)
# Note: This requires Kubeflow to be accessible from GHA runners
# For private clusters, use self-hosted runners or Tailscale
echo "Compiled: $YAML_FILE"
echo "Upload manually or configure runner access to Kubeflow"
fi
done
- name: Upload compiled pipelines as artifacts
uses: actions/upload-artifact@v4
with:
name: compiled-pipelines
path: '*.yaml'
retention-days: 7
- name: Summary
run: |
echo "## Compiled Pipelines" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
for f in *.yaml; do
[ -f "$f" ] && echo "- $f" >> $GITHUB_STEP_SUMMARY
done
echo "" >> $GITHUB_STEP_SUMMARY
echo "Download artifacts and upload to Kubeflow, or configure self-hosted runner with Tailscale access." >> $GITHUB_STEP_SUMMARY

128
hello_world.yaml Normal file
View File

@@ -0,0 +1,128 @@
# PIPELINE DEFINITION
# Name: hello-world-pipeline
# Description: A simple hello world pipeline to test Kubeflow setup
# Inputs:
# name: str [Default: 'Kubeflow User']
components:
comp-process-greeting:
executorLabel: exec-process-greeting
inputDefinitions:
parameters:
greeting:
parameterType: STRING
outputDefinitions:
parameters:
Output:
parameterType: STRING
comp-say-hello:
executorLabel: exec-say-hello
inputDefinitions:
parameters:
name:
parameterType: STRING
outputDefinitions:
parameters:
Output:
parameterType: STRING
deploymentSpec:
executors:
exec-process-greeting:
container:
args:
- --executor_input
- '{{$}}'
- --function_to_execute
- process_greeting
command:
- sh
- -c
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.15.2'\
\ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\
$0\" \"$@\"\n"
- sh
- -ec
- 'program_path=$(mktemp -d)
printf "%s" "$0" > "$program_path/ephemeral_component.py"
_KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@"
'
- "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\
\ *\n\ndef process_greeting(greeting: str) -> str:\n \"\"\"Process the\
\ greeting message.\"\"\"\n processed = greeting.upper()\n print(f\"\
Processed: {processed}\")\n return processed\n\n"
image: python:3.11-slim
exec-say-hello:
container:
args:
- --executor_input
- '{{$}}'
- --function_to_execute
- say_hello
command:
- sh
- -c
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.15.2'\
\ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\
$0\" \"$@\"\n"
- sh
- -ec
- 'program_path=$(mktemp -d)
printf "%s" "$0" > "$program_path/ephemeral_component.py"
_KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@"
'
- "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\
\ *\n\ndef say_hello(name: str) -> str:\n \"\"\"Simple component that\
\ returns a greeting.\"\"\"\n message = f\"Hello, {name}! Welcome to\
\ Kubeflow Pipelines.\"\n print(message)\n return message\n\n"
image: python:3.11-slim
pipelineInfo:
description: A simple hello world pipeline to test Kubeflow setup
name: hello-world-pipeline
root:
dag:
tasks:
process-greeting:
cachingOptions:
enableCache: true
componentRef:
name: comp-process-greeting
dependentTasks:
- say-hello
inputs:
parameters:
greeting:
taskOutputParameter:
outputParameterKey: Output
producerTask: say-hello
taskInfo:
name: process-greeting
say-hello:
cachingOptions:
enableCache: true
componentRef:
name: comp-say-hello
inputs:
parameters:
name:
componentInputParameter: name
taskInfo:
name: say-hello
inputDefinitions:
parameters:
name:
defaultValue: Kubeflow User
isOptional: true
parameterType: STRING
schemaVersion: 2.1.0
sdkVersion: kfp-2.15.2