Skip to content

Commit

Permalink
make_devcontainer support
Browse files Browse the repository at this point in the history
  • Loading branch information
alliepiper committed Apr 23, 2024
1 parent 0baa8e4 commit 7dcbea9
Show file tree
Hide file tree
Showing 4 changed files with 92 additions and 47 deletions.
10 changes: 6 additions & 4 deletions .devcontainer/make_devcontainers.sh
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ while [[ $# -gt 0 ]]; do
done

MATRIX_FILE="../ci/matrix.yaml"
COMPUTE_MATRIX="../ci/compute-matrix.py"

# Enable verbose mode if requested
if [ "$VERBOSE" = true ]; then
Expand All @@ -82,16 +83,17 @@ if [ "$VERBOSE" = true ]; then
fi

# Read matrix.yaml and convert it to json
matrix_json=$(yq -o json ${MATRIX_FILE})
matrix_json=$(python3 ${COMPUTE_MATRIX} ${MATRIX_FILE} --devcontainer-info)

# Exclude Windows environments
readonly matrix_json=$(echo "$matrix_json" | jq 'del(.pull_request.nvcc[] | select(.os | contains("windows")))')
if [ "$VERBOSE" = true ]; then
echo "$matrix_json"
fi

# Get the devcontainer image version and define image tag root
readonly DEVCONTAINER_VERSION=$(echo "$matrix_json" | jq -r '.devcontainer_version')

# Get unique combinations of cuda version, compiler name/version, and Ubuntu version
readonly combinations=$(echo "$matrix_json" | jq -c '[.pull_request.nvcc[] | {cuda: .cuda, compiler_name: .compiler.name, compiler_exe: .compiler.exe, compiler_version: .compiler.version, os: .os}] | unique | .[]')
readonly combinations=$(echo "$matrix_json" | jq -c '.combinations[]')

# Update the base devcontainer with the default values
# The root devcontainer.json file is used as the default container as well as a template for all
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/nightly.yml
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ jobs:
- name: Compute matrix outputs
id: compute-matrix
run: |
ci/compute-matrix.py ci/matrix.yaml ${{ github.workflow }} --dirty-projects ${{ steps.inspect-changes.outputs.DIRTY_PROJECTS }}
ci/compute-matrix.py ci/matrix.yaml --workflow ${{ github.workflow }} --dirty-projects ${{ steps.inspect-changes.outputs.DIRTY_PROJECTS }}
dispatch-groups:
name: ${{ matrix.name }}
Expand Down
16 changes: 8 additions & 8 deletions .github/workflows/pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ jobs:
- name: Compute matrix outputs
id: compute-matrix
run: |
ci/compute-matrix.py ci/matrix.yaml ${{ github.workflow }} --dirty-projects ${{ steps.inspect-changes.outputs.DIRTY_PROJECTS }}
ci/compute-matrix.py ci/matrix.yaml --workflow ${{ github.workflow }} --dirty-projects ${{ steps.inspect-changes.outputs.DIRTY_PROJECTS }}
dispatch-groups:
name: ${{ matrix.name }}
Expand All @@ -75,12 +75,12 @@ jobs:
name: ${{ matrix.name }}
jobs: ${{ toJSON(fromJSON(needs.compute-matrix.outputs.WORKFLOW)[ matrix.name ]) }}

# verify-devcontainers:
# name: Verify Dev Containers
# permissions:
# id-token: write
# contents: read
# uses: ./.github/workflows/verify-devcontainers.yml
verify-devcontainers:
name: Verify Dev Containers
permissions:
id-token: write
contents: read
uses: ./.github/workflows/verify-devcontainers.yml

# This job is the final job that runs after all other jobs and is used for branch protection status checks.
# See: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/collaborating-on-repositories-with-code-quality-features/about-status-checks
Expand All @@ -91,7 +91,7 @@ jobs:
if: ${{ always() }} # need to use always() instead of !cancelled() because skipped jobs count as success
needs:
- dispatch-groups
# - verify-devcontainers
- verify-devcontainers
steps:
- name: Check status of all precursor jobs
if: >-
Expand Down
111 changes: 77 additions & 34 deletions ci/compute-matrix.py
Original file line number Diff line number Diff line change
Expand Up @@ -476,14 +476,78 @@ def natural_sort_key(key):
return workflow_dispatch_groups


def print_gha_workflow(args):
matrix_jobs = preprocess_matrix_jobs(matrix_yaml['workflows'][args.workflow])

# print("::group::Matrix Jobs", file=sys.stderr)
# print("Matrix Jobs:", file=sys.stderr)
# for matrix_job in matrix_jobs:
# print(json.dumps(matrix_job, indent=None, separators=(',', ':')), file=sys.stderr)
# print("::end-group::", file=sys.stderr)

if args.dirty_projects:
matrix_jobs = filter_projects(matrix_jobs, args.dirty_projects)

workflow_dispatch_groups = {}
for matrix_job in matrix_jobs:
merge_dispatch_groups(workflow_dispatch_groups, matrix_job_to_dispatch_group(matrix_job))

final_workflow = finalize_workflow_dispatch_groups(workflow_dispatch_groups)

# Pretty print the workflow json to stderr:
print("::group::Final Workflow", file=sys.stderr)
print(json.dumps(final_workflow, indent=2), file=sys.stderr)
print("::end-group::", file=sys.stderr)

# Print a single-line, compact version of the workflow json to stdout:
write_output("WORKFLOW", json.dumps(final_workflow, indent=None, separators=(',', ':')))
# Print the list of key (dispatch group) names to stdout in a single line as a json list:
write_output("WORKFLOW_KEYS", json.dumps(list(final_workflow.keys()), indent=None, separators=(',', ':')))


def print_devcontainer_info(args):
devcontiner_version = matrix_yaml['devcontainer_version']

matrix_jobs = []
for workflow in matrix_yaml['workflows']:
matrix_jobs.extend(matrix_yaml['workflows'][workflow])
matrix_jobs = preprocess_matrix_jobs(matrix_jobs)

# Remove all but the following keys from the matrix jobs:
keep_keys = ['ctk', 'host_compiler', 'os']
combinations = [{key: job[key] for key in keep_keys} for job in matrix_jobs]

# Remove duplicates and filter out windows jobs:
unique_combinations = []
for combo in combinations:
if not is_windows(combo) and combo not in unique_combinations:
unique_combinations.append(combo)

for combo in unique_combinations:
combo['compiler_name'] = combo['host_compiler']['name']
combo['compiler_version'] = combo['host_compiler']['version']
combo['compiler_exe'] = combo['host_compiler']['exe']
del combo['host_compiler']

combo['cuda'] = combo['ctk']
del combo['ctk']

devcontainer_json = {'devcontainer_version': devcontiner_version, 'combinations': unique_combinations}

# Pretty print the devcontainer json to stdout:
print(json.dumps(devcontainer_json, indent=2))


def main():
global matrix_yaml

parser = argparse.ArgumentParser(description='Compute matrix for workflow')
parser.add_argument('matrix_file', help='Path to the matrix YAML file')
parser.add_argument('workflow', help='Name of the workflow')
parser.add_argument('--dirty-projects', nargs='*', dest='dirty_projects',
help='Project(s) to rerun', default=[])
parser_mode = parser.add_mutually_exclusive_group(required=True)
parser_mode.add_argument('--workflow', help='Print GHA workflow [pull_request, nightly, weekly, etc]')
parser_mode.add_argument('--devcontainer-info', action='store_true',
help='Print devcontainer info instead of GHA workflows.')
parser.add_argument('--dirty-projects', nargs='*', help='Filter jobs to only these projects')
args = parser.parse_args()

# Check if the matrix file exists
Expand All @@ -495,7 +559,7 @@ def main():
matrix_yaml = yaml.safe_load(f)

# Check if the workflow is valid
if args.workflow not in matrix_yaml['workflows']:
if args.workflow and 'workflows' not in matrix_yaml:
print(f"Error: Workflow 'workflows.{args.workflow}' does not exist in the matrix YAML.")
sys.exit(1)

Expand All @@ -504,41 +568,20 @@ def main():
parser.print_usage()
sys.exit(1)

# Print the arguments to stderr:
print("Arguments:", file=sys.stderr)
print(args, file=sys.stderr)

# print("Arguments:", file=sys.stderr)
# print(args, file=sys.stderr)
# print("::group::Matrix YAML", file=sys.stderr)
# print("Matrix YAML:", file=sys.stderr)
# print(matrix_yaml, file=sys.stderr)
# print("::end-group::", file=sys.stderr)

matrix_jobs = preprocess_matrix_jobs(matrix_yaml['workflows'][args.workflow])

# print("::group::Matrix Jobs", file=sys.stderr)
# print("Matrix Jobs:", file=sys.stderr)
# for matrix_job in matrix_jobs:
# print(json.dumps(matrix_job, indent=None, separators=(',', ':')), file=sys.stderr)
# print("::end-group::", file=sys.stderr)

if args.dirty_projects:
matrix_jobs = filter_projects(matrix_jobs, args.dirty_projects)

workflow_dispatch_groups = {}
for matrix_job in matrix_jobs:
merge_dispatch_groups(workflow_dispatch_groups, matrix_job_to_dispatch_group(matrix_job))

final_workflow = finalize_workflow_dispatch_groups(workflow_dispatch_groups)

# Pretty print the workflow json to stderr:
print("::group::Final Workflow", file=sys.stderr)
print(json.dumps(final_workflow, indent=2), file=sys.stderr)
print("::end-group::", file=sys.stderr)

# Print a single-line, compact version of the workflow json to stdout:
write_output("WORKFLOW", json.dumps(final_workflow, indent=None, separators=(',', ':')))
# Print the list of key (dispatch group) names to stdout in a single line as a json list:
write_output("WORKFLOW_KEYS", json.dumps(list(final_workflow.keys()), indent=None, separators=(',', ':')))
if args.workflow:
print_gha_workflow(args)
elif args.devcontainer_info:
print_devcontainer_info(args)
else:
print("Error: Either --workflow WORKFLOW or --devcontainers must be specified.", file=sys.stderr)
sys.exit(1)


if __name__ == '__main__':
Expand Down

0 comments on commit 7dcbea9

Please sign in to comment.