This creates a StorageSource object after uploading to Google Cloud Storage
cr_build_upload_gcs(
local,
remote = paste0(local, format(Sys.time(), "%Y%m%d%H%M%S"), ".tar.gz"),
bucket = cr_bucket_get(),
predefinedAcl = "bucketOwnerFullControl",
deploy_folder = "deploy"
)
cr_buildstep_source_move(deploy_folder)
Local directory containing the Dockerfile etc. you wish to deploy
The name of the folder in your bucket
The Google Cloud Storage bucket to upload to
The ACL rules for the object uploaded. Set to "bucketLevel" for buckets with bucket level access enabled
Which folder to deploy from - this will mean the files uploaded will be by default in /workspace/deploy/
A Source object
cr_build_upload_gcs
copies the files into the deploy_folder
in your working directory, then tars it for upload. Files will be available on Cloud Build at /workspace/deploy_folder/*
.
cr_buildstep_source_move
is a way to move the StorageSource files in /workspace/deploy_folder/*
into the root /workspace/*
location, which is more consistent with RepoSource objects or GitHub build triggers created using cr_buildtrigger_repo. This means the same runtime code can run for both sources.
Other Cloud Build functions:
Build()
,
RepoSource()
,
Source()
,
StorageSource()
,
cr_build_artifacts()
,
cr_build_list()
,
cr_build_logs()
,
cr_build_make()
,
cr_build_status()
,
cr_build_targets()
,
cr_build_wait()
,
cr_build_write()
,
cr_build_yaml_artifact()
,
cr_build_yaml_secrets()
,
cr_build_yaml()
,
cr_build()
if (FALSE) {
cr_project_set("my-project")
cr_bucket_set("my-bucket")
my_gcs_source <- cr_build_upload_gcs("my_folder")
build1 <- cr_build("cloudbuild.yaml", source = my_gcs_source)
}
cr_buildstep_source_move("deploy")
#> [[1]]
#> ==cloudRunnerBuildStep==
#> name: ubuntu
#> args:
#> - bash
#> - -c
#> - ls -R /workspace/; [ -d "/workspace/deploy" ] && cd /workspace/deploy && mv * ../;
#> ls -R /workspace/
#> id: move source files
#>