output-gha-workflow.py 9.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221
  1. #!/usr/bin/env python3
  2. # ‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹
  3. # SPDX-License-Identifier: GPL-2.0
  4. # Copyright (c) 2023 Ricardo Pardini <ricardo@pardini.net>
  5. # This file is a part of the Armbian Build Framework https://github.com/armbian/build/
  6. # ‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹
  7. import json
  8. import logging
  9. import os
  10. import sys
  11. sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
  12. from common import armbian_utils
  13. from common import gha
  14. # Prepare logging
  15. armbian_utils.setup_logging()
  16. log: logging.Logger = logging.getLogger("output-gha-matrix")
  17. class BuildJob(gha.BaseWorkflowJob):
  18. def __init__(self, id: str, name: str):
  19. super().__init__(id, name)
  20. self.add_default_envs()
  21. self.add_ghcr_login_step()
  22. def add_default_envs(self):
  23. self.envs["OCI_TARGET_BASE"] = "ghcr.io/${{ github.repository }}/" # This is picked up by the Docker launcher automatically
  24. self.envs["DOCKER_ARMBIAN_BASE_COORDINATE_PREFIX"] = "ghcr.io/${{ github.repository }}:armbian-next-" # Use Docker image in same repo
  25. self.envs[
  26. "DOCKER_SKIP_UPDATE"] = "yes" # Do not apt update/install/requirements/etc during Dockerfile build, trust DOCKER_ARMBIAN_BASE_COORDINATE_PREFIX's images are up-to-date
  27. def add_ghcr_login_step(self):
  28. # Login to ghcr.io, we're gonna do a lot of OCI lookups.
  29. login_step = self.add_step("docker-login-ghcr", "Docker Login to GitHub Container Registry")
  30. login_step.uses = "docker/login-action@v2"
  31. login_step.withs["registry"] = "ghcr.io"
  32. login_step.withs["username"] = "${{ github.repository_owner }}" # GitHub username or org
  33. login_step.withs["password"] = "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
  34. # # Login to ghcr.io, we're gonna do a lot of OCI lookups.
  35. # - name: Docker Login to GitHub Container Registry
  36. # uses: docker/login-action@v2
  37. # with:
  38. # registry: ghcr.io
  39. # username: ${{ github.repository_owner }} # GitHub username or org
  40. # password: ${{ secrets.GITHUB_TOKEN }} # GitHub actions builtin token. repo has to have pkg access.
  41. class ArtifactJob(BuildJob):
  42. def __init__(self, id: str, name: str):
  43. super().__init__(id, name)
  44. class ImageJob(BuildJob):
  45. def __init__(self, id: str, name: str):
  46. super().__init__(id, name)
  47. class PrepareJob(BuildJob):
  48. def __init__(self, id: str, name: str):
  49. super().__init__(id, name)
  50. def add_initial_checkout(self):
  51. # Checkout the build repo
  52. checkout_step = self.add_step("checkout-build-repo", "Checkout build repo")
  53. checkout_step.uses = "actions/checkout@v3"
  54. checkout_step.withs["repository"] = "${{ github.repository_owner }}/armbian-build"
  55. checkout_step.withs["ref"] = "extensions"
  56. checkout_step.withs["fetch-depth"] = 1
  57. checkout_step.withs["clean"] = "false"
  58. # Now grab the SHA1 from the checked out copy
  59. grab_sha1_step = self.add_step("git-info", "Grab SHA1")
  60. grab_sha1_step.run = 'echo "sha1=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT'
  61. self.add_job_output_from_step(grab_sha1_step, "sha1")
  62. def add_cache_restore_step(self):
  63. # Restore the cache
  64. restore_cache_step = self.add_step("restore-cache", "Restore cache")
  65. restore_cache_step.uses = "actions/cache@v3"
  66. restore_cache_step.withs["path"] = "cache/memoize\ncache/oci/positive"
  67. restore_cache_step.withs["key"] = '${{ runner.os }}-cache-${{ github.sha }}-${{ steps.git-info.outputs.sha1 }}'
  68. restore_cache_step.withs["restore-keys"] = '${{ runner.os }}-matrix-cache-'
  69. def add_cache_chown_step(self):
  70. # chown the cache back to normal user
  71. chown_cache_step = self.add_step("chown-cache", "Chown cache")
  72. chown_cache_step.run = 'sudo chown -R $USER:$USER cache/memoize cache/oci/positive'
  73. def prepare_gh_releases_step(self):
  74. # @TODO this is outdated, needs replacement. Also it deletes the release if it already exists, which is not what we want. Might be necessary to move the tag.
  75. gh_releases_step = self.add_step("gh-releases", "Prepare GitHub Releases")
  76. gh_releases_step.uses = "marvinpinto/action-automatic-releases@latest"
  77. gh_releases_step.withs["repo_token"] = "${{ secrets.GITHUB_TOKEN }}"
  78. gh_releases_step.withs["automatic_release_tag"] = "latest-images"
  79. gh_releases_step.withs["prerelease"] = "false"
  80. gh_releases_step.withs["title"] = "Latest images"
  81. # read the outdated artifacts+imaes json file passed as first argument as a json object
  82. with open(sys.argv[1]) as f:
  83. info = json.load(f)
  84. # Create a WorkflowFactory
  85. wfFactory: gha.WorkflowFactory = gha.WorkflowFactory()
  86. # Create prepare job
  87. pJob: PrepareJob = PrepareJob(f"prepare", f"prepare all")
  88. pJob.set_runs_on(["self-hosted", "Linux", "matrix-prepare"]) # @TODO: de-hardcode?
  89. pJob.add_initial_checkout()
  90. pJob.add_cache_restore_step()
  91. pJobUpToDateStep = pJob.add_step(f"check-up-to-date", f"Check up to date")
  92. pJobUpToDateStep.run = f'rm -rfv output/info; bash ./compile.sh workflow rpardini-generic # DEBUG=yes'
  93. # The outputs are added later, for each artifact.
  94. pJob.add_cache_chown_step()
  95. pJob.prepare_gh_releases_step()
  96. wfFactory.add_job(pJob)
  97. all_artifact_jobs = {}
  98. u2date_artifact_outputs = {}
  99. for artifact_id in info["artifacts"]:
  100. artifact = info["artifacts"][artifact_id]
  101. skip = not not artifact["oci"]["up-to-date"]
  102. # if skip:
  103. # continue
  104. artifact_name = artifact['in']['artifact_name']
  105. # desc = f"{artifact['out']['artifact_final_file_basename']}"
  106. desc = f"{artifact['out']['artifact_name']}"
  107. # runs_in = ["self-hosted", "Linux", 'armbian', f"artifact-{artifact_name}"]
  108. runs_on = "fast"
  109. # @TODO: externalize this logic.
  110. # rootfs's fo arm64 are built on self-hosted runners tagged with "rootfs-<arch>"
  111. if artifact_name in ["rootfs"]:
  112. rootfs_arch = artifact['in']['inputs']['ARCH'] # @TODO we should resolve arch _much_ ealier in the pipeline and make it standard
  113. if rootfs_arch in ["arm64"]: # (future: add armhf)
  114. runs_on = ["self-hosted", "Linux", f"rootfs-{rootfs_arch}"]
  115. # all kernels are built on self-hosted runners.
  116. if artifact_name in ["kernel"]:
  117. runs_on = ["self-hosted", "Linux", "alfa"]
  118. inputs = artifact['in']['original_inputs']
  119. cmds = (["artifact"] + armbian_utils.map_to_armbian_params(inputs["vars"]) + inputs["configs"])
  120. invocation = " ".join(cmds)
  121. item = {"desc": desc, "runs_on": runs_on, "invocation": invocation}
  122. aJob: ArtifactJob = ArtifactJob(f"artifact-{artifact_id}", f"{desc}")
  123. aJob.set_runs_on(runs_on)
  124. build_step = aJob.add_step(f"build-artifact", f"Build artifact {desc}")
  125. build_step.run = f'echo "fake artifact: {invocation}"'
  126. # Add output to prepare job... & set the GHA output, right here. Hey us, it's us from the future. We're so smart.
  127. # write to a github actions output variable. use the filesystem.
  128. gha.set_gha_output(f"u2d-{artifact_id}", ("yes" if skip else "no"))
  129. output: gha.WorkflowJobOutput = pJob.add_job_output_from_step(pJobUpToDateStep, f"u2d-{artifact_id}")
  130. input: gha.WorkflowJobInput = aJob.add_job_input_from_needed_job_output(output)
  131. aJob.add_condition_from_input(input, "== 'no'")
  132. u2date_output: gha.WorkflowJobOutput = aJob.add_job_output_from_input(f"up-to-date-artifact", input)
  133. all_artifact_jobs[artifact_id] = aJob
  134. u2date_artifact_outputs[artifact_id] = u2date_output
  135. wfFactory.add_job(aJob)
  136. # Ok now the images...
  137. for image_id in info["images"]:
  138. image = info["images"][image_id]
  139. # skip = image["outdated_artifacts_count"] == 0
  140. # if skip:
  141. # continue
  142. desc = f"{image['image_file_id']} {image_id}"
  143. runs_on = "fast"
  144. image_arch = image['out']['ARCH']
  145. if image_arch in ["arm64"]: # , "armhf"
  146. runs_on = ["self-hosted", "Linux", f"image-{image_arch}"]
  147. inputs = image['in']
  148. cmds = (armbian_utils.map_to_armbian_params(inputs["vars"]) + inputs["configs"]) # image build is "build" command, omitted here
  149. invocation = " ".join(cmds)
  150. iJob: ImageJob = ImageJob(f"image-{image_id}", f"{desc}")
  151. iJob.set_runs_on(runs_on)
  152. build_step = iJob.add_step(f"build-image", f"Build image {desc}")
  153. build_step.run = f'echo "fake image: {invocation}"'
  154. # Make it use the outputs from the artifacts needed for this image
  155. for artifact_id in image["artifact_ids"]:
  156. log.info(f"Image {image_id} wants artifact {artifact_id}")
  157. aJob = all_artifact_jobs[artifact_id]
  158. aJobU2dOutput = u2date_artifact_outputs[artifact_id]
  159. u2dinput = iJob.add_job_input_from_needed_job_output(aJobU2dOutput)
  160. iJob.add_condition_from_input(u2dinput, "== 'no'")
  161. wfFactory.add_job(iJob)
  162. # Convert gha_workflow to YAML
  163. gha_workflow_yaml = armbian_utils.to_yaml(wfFactory.render_yaml())
  164. # Write the YAML the target file
  165. with open(sys.argv[2], "w") as f:
  166. f.write(gha_workflow_yaml)