blob: 548a46abc321625c589655d533949197bdde0dd9 [file] [log] [blame]
# Copyright 2019 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Recipe for updating goma configurations."""
from recipe_engine.recipe_api import Property
from string import Template
DEPS = [
"fuchsia/gcloud",
"fuchsia/gerrit",
"fuchsia/git",
"fuchsia/kubectl",
"fuchsia/status_check",
"fuchsia/yaml",
"recipe_engine/context",
"recipe_engine/file",
"recipe_engine/json",
"recipe_engine/path",
"recipe_engine/properties",
"recipe_engine/raw_io",
"recipe_engine/step",
"recipe_engine/time",
]
PROPERTIES = {
"repository": Property(
kind=str,
help="repository that hold the goma configurations",
default="https://fuchsia.googlesource.com/infra/config",
),
"config_root": Property(
kind=str,
help="root directory in repository that stores goma configurations",
default="goma",
),
"toolchain_project": Property(
kind=str, help="project name that hosts goma toolchains", default="goma-fuchsia"
),
"cluster_project": Property(
kind=str, help="project name that hosts goma cluster", default="goma-fuchsia"
),
"cluster": Property(kind=str, help="the name of the cluster", default="rbe-dev"),
"tag": Property(
kind=str, help="container tag for gomatools containers", default="latest"
),
"dry_run": Property(
kind=bool, help="dry_run without landing changes to repository", default=True
),
}
COMMIT_MESSAGE = """[goma] Update config for {project}/{cluster}:
Using gomatools tag: {tag}
Exempt-From-Owner-Approval: Roller.
"""
YAML_TEMPLATE_TEST_DATA = """# Copyright 2020 Google Inc. All Rights Reserved.
apiVersion: apps/v1beta2
kind: Deployment
metadata:
labels:
app: auth-server
annotations:
imagetag: $IMAGETAG
buildtag: $TAG
name: auth-server
spec:
replicas: 2
selector:
matchLabels:
app: auth-server
template:
metadata:
labels:
app: auth-server
spec:
containers:
- command:
- /opt/goma/bin/auth_server
- --project-id=$PROJECT_ID
name: auth-server
image: gcr.io/$CONTAINER_PROJECT_ID/auth-server:$IMAGETAG
ports:
- containerPort: 5050
protocol: TCP
- containerPort: 8081
protocol: TCP
resources:
limits:
cpu: 1500m
memory: 1500Mi
requests:
cpu: 100m
memory: 100Mi
# following could be configured by PodPreset?
livenessProbe:
httpGet:
path: /healthz
port: 8081
scheme: HTTP
initialDelaySeconds: 3
periodSeconds: 5
readinessProbe:
httpGet:
path: /healthz
port: 8081
scheme: HTTP
initialDelaySeconds: 3
periodSeconds: 5"""
def generate_time_stamp(api):
return "{:%Y%m%d_%H%M%S}".format(api.time.utcnow())
def get_region_for_cluster(api, goma_config_dir, cluster):
storage_yaml = goma_config_dir.join("gke-res", cluster, "storage.yaml")
if not api.path.exists(storage_yaml):
raise api.step.StepFailure(
"unknown cluster name %s" % cluster
) # pragma no cover
return api.yaml.retrieve_field(storage_yaml, "region")
def gen_configmap_memorystore(api, config_dir, cluster_project, cluster):
"""Generate the content of memorystore yaml file for a goma k8s cluster.
This function mocks the behavior of 'gen-configmap-memorystore.sh' from
'cloudbuild/k8s'.
Args:
* cluster (str) - The name of the k8s cluster.
"""
region = get_region_for_cluster(api, config_dir, cluster)
if region == "":
raise api.step.StepFailure(
"region config not found in file"
) # pragma: no cover
host = api.gcloud(
"redis",
"instances",
"describe",
"%s-memorystore" % cluster,
"--project",
cluster_project,
"--region",
region,
"--format",
"get(host)",
step_name="retrieve host info",
stdout=api.raw_io.output(),
).stdout.strip()
port = api.gcloud(
"redis",
"instances",
"describe",
"%s-memorystore" % cluster,
"--project",
cluster_project,
"--region",
region,
"--format",
"get(port)",
step_name="retrieve port info",
stdout=api.raw_io.output(),
).stdout.strip()
return """apiVersion: v1
kind: ConfigMap
metadata:
name: memorystore
data:
REDISHOST: "{}"
REDISPORT: "{}"
""".format(
host, port
)
def patch_yaml(api, input_yaml, patch_file, test_data=""):
"""Patch a k8s yaml configuration file using kubectl patch.
This function mocks the behavior of 'apply_patch' from
'cloudbuild/k8s/config.sh'.
Args:
* input_yaml (Path) - The path to the yaml that needs to be patched.
* patch_file (Path) - The path to the patch file.
* test_data (string) - The test data content of a patch file.
"""
ptype = str(patch_file)[str(patch_file).rfind(".") + 1 :]
patch_data = api.file.read_text(
"read patch %s" % patch_file, patch_file, test_data=test_data
)
patched_yaml = api.kubectl(
"patch",
"-f",
input_yaml,
"--local=true",
"--type=%s" % ptype,
"--patch",
patch_data,
"-o",
"yaml",
step_name="patch %s" % input_yaml,
stdout=api.raw_io.output(),
).stdout
api.file.write_text("write patched yaml %s" % input_yaml, input_yaml, patched_yaml)
def template_to_output(api, template_file, templates_root, yaml_root):
"""Generate the file name of generated k8s configuration yaml from a template.
Args:
* template_file (Path) - The path to the template file.
* templates_root (Path) - The path to the root directory of the templates.
* yaml_root (Path) - The path to the root directory that holds generated
k8s configuration yamls.
"""
template_file_rel = api.path.relpath(str(template_file), str(templates_root))
target_file = api.path.join(str(yaml_root), template_file_rel)
if target_file.endswith(".in"):
target_file = target_file[: len(target_file) - len(".in")]
if target_file.endswith(".custom"):
target_file = target_file[: len(target_file) - len(".custom")]
return api.path.abs_to_path(target_file)
def generate_k8s_yaml_from_template(
api,
input_yaml,
output_yaml,
cluster_project,
cluster,
container_project,
rbe_instance_prefix,
cmd_files_bucket,
toolchain_config_bucket,
cache_bucket_prefix,
imagetag="latest",
opt_pre_shared_cert="",
tag="",
test_data="",
):
"""Generate k8s yaml configuration files from templates.
This function mocks the behavior of 'fix.sh' in 'cloudbuild/k8s'.
Args:
* input_yaml (Path) - The path to the template file.
* output_yaml (Path) - The path to the generated yaml file.
* project_id (str) - The project id of the gcloud project hosting k8s
clusters.
* cluster (str) - The name of the k8s cluster.
* container_project (str) - The project id of the gcloud project
hosting goma docker images.
* rbe_instance_prefix (str) - The instance prefix for RBE workers.
* cmd_files_bucket (str) - The GCS bucket name for Goma config files.
* toolchain_config_bucket (str) - The GCS bucket of Goma toolchain config.
* cache_bucket_prefix (str) - The GCS bucket name for toolchain caches.
* imagetag (str) - The container tag name for Goma GCP images.
* opt_pre_shared_cert (str) - The optional path to pre shared SSL certs.
* tag (str) - The time stamp tag.
* test_data (str) - Test data for template.
"""
replace_dict = {
"PROJECT_ID": cluster_project,
"CLUSTER": cluster,
"CONTAINER_PROJECT_ID": container_project,
"RBE_INSTANCE_PREFIX": rbe_instance_prefix,
"CMD_FILES_BUCKET": cmd_files_bucket,
"TOOLCHAIN_CONFIG_BUCKET": toolchain_config_bucket,
"CACHE_BUCKET_PREFIX": cache_bucket_prefix,
"TAG": tag,
}
if opt_pre_shared_cert:
# Not currently used by Fuchsia goma.
replace_dict["OPT_PRE_SHARED_CERT"] = opt_pre_shared_cert # pragma no cover
infile = api.file.read_text(
"read input template %s" % input_yaml, input_yaml, test_data=test_data
)
outfile = ""
# First pass, looking for image url and retrieve image digest.
for curline in infile.splitlines(True):
if "image:" in curline:
image_url = curline[
curline.find("image:") + len("image:") : curline.find(":$IMAGETAG")
].strip()
image_url_temp = Template(image_url)
image_url = image_url_temp.substitute(replace_dict)
# SHA256 can be retrieved through gcloud container images list-tags gcr.io/goma-fuchsia/auth-server --filter "tags: \"latest\"" --limit 1 --format='get(digest)'
digest = api.gcloud(
"container",
"images",
"list-tags",
image_url,
"--filter",
"tags: %s" % imagetag,
"--limit",
"1",
"--format=get(digest)",
step_name="retrieve digest for %s" % image_url,
stdout=api.raw_io.output(),
).stdout.strip()
break
# Second pass, replace place holders to actual image spec data.
replace_dict["IMAGETAG"] = imagetag
for curline in infile.splitlines(True):
if ":$IMAGETAG" in curline:
curline = curline.replace(":$IMAGETAG", "@" + digest)
curline_temp = Template(curline)
curline = curline_temp.substitute(replace_dict)
outfile += curline
api.file.write_text("write gke yaml %s" % output_yaml, output_yaml, outfile)
def generate_k8s_yaml_from_template_on_directory(
api,
templates_root,
k8s_config_root,
project_id,
container_project_id,
cluster,
rbe_instance_prefix,
cmd_files_bucket,
toolchain_config_bucket,
imagetag,
cache_bucket_prefix,
tag,
):
"""Generate the k8s configuration yamls from a template directory.
Args:
* api (RecipeApi) - The RecipeApi object.
* templates_root (Path) - The path to the root directory of the templates.
* k8s_config_root (Path) - The path to the root directory that holds generated
k8s configuration yamls.
* project_id (str) - The project id of the gcloud project hosting k8s
clusters.
* cluster (str) - The name of the k8s cluster.
* rbe_instance_prefix (str) - The instance prefix for RBE workers.
* cmd_files_bucket (str) - The GCS bucket name for Goma config files.
* toolchain_config_bucket (str) - The GCS bucket of Goma toolchain config.
* imagetag (str) - The container tag name for Goma GCP images.
* cache_bucket_prefix (str) - The GCS bucket name for toolchain caches.
* tag (str) - The time stamp tag.
"""
with api.step.nest("generate yaml from template directory %s" % templates_root):
for item in api.file.glob_paths(
"glob template dir %s" % str(templates_root),
templates_root,
"*/*yaml*",
test_data=[
templates_root.join("goma", "deploy_auth-server.yaml.custom.in"),
templates_root.join("goma", "deploy_cmd-cache-server.yaml"),
],
):
output_file = template_to_output(api, item, templates_root, k8s_config_root)
api.file.ensure_directory("ensure directory", api.path.dirname(output_file))
if str(item).endswith(".yaml") or str(item).endswith(".yaml.custom"):
api.file.remove("remove %s" % output_file, output_file)
api.file.copy("copy %s" % item, item, output_file)
continue
with api.step.nest(
"generate yaml {} from template {}".format(item, output_file)
):
generate_k8s_yaml_from_template(
api,
item,
output_file,
project_id,
cluster,
container_project_id,
rbe_instance_prefix,
cmd_files_bucket,
toolchain_config_bucket,
cache_bucket_prefix,
imagetag=imagetag,
tag=tag,
test_data=YAML_TEMPLATE_TEST_DATA,
)
def config_cluster(
api, config_dir, cluster_project, toolchain_project, cluster, tag, timestamp
):
"""Generate Goma k8s cluster configurations.
This function mocks the behaviors of `./build.sh k8s config $CLUSTER`. It
only supports Goma GCP with RBE.
Args:
* api (RecipeApi) - The RecipeApi object.
* cluster_project (str) - The project id of the gcloud project hosting k8s
clusters.
* toolchain_project (str) - The project id of the gcloud project hosting
goma toolchain images.
* cluster (str) - The name of the cluster.
* imagetag (str) - The container tag name for Goma GCP images.
* timestamp (str) - The timestamp tag.
"""
rbe_instance_prefix = "projects/%s/instances" % cluster_project
cmd_files_bucket = "%s-files" % toolchain_project
toolchain_config_bucket = "%s-toolchain-config" % toolchain_project
cache_bucket_prefix = "{}-{}".format(cluster_project, cluster)
# We only support RBE
cluster_template = "rbe"
k8s_config_root = config_dir.join("k8s").join(cluster)
with api.step.nest("Remove existing k8s configurations"):
for item in api.file.glob_paths(
"glob existing configurations",
k8s_config_root.join(cluster),
"*/*.yaml",
test_data=[
k8s_config_root.join(
"rbe-dev", "goma", "configmap_nginx-extra-conf.yaml"
)
],
):
api.file.remove("remove %s" % item, item)
templates_root = config_dir.join("k8s").join("templates-%s" % cluster_template)
cluster_templates_root = config_dir.join("k8s").join(cluster, "templates")
project_templates_root = config_dir.join("k8s").join(
cluster, "templates-%s" % cluster_project
)
with api.step.nest("process yamls"):
# Process yaml from ${templates}"/*/*yaml*
generate_k8s_yaml_from_template_on_directory(
api,
templates_root,
k8s_config_root,
cluster_project,
toolchain_project,
cluster,
rbe_instance_prefix,
cmd_files_bucket,
toolchain_config_bucket,
tag,
cache_bucket_prefix,
tag=timestamp,
)
# Process yaml from "${cluster}/templates/"*/*yaml*
generate_k8s_yaml_from_template_on_directory(
api,
cluster_templates_root,
k8s_config_root,
cluster_project,
toolchain_project,
cluster,
rbe_instance_prefix,
cmd_files_bucket,
toolchain_config_bucket,
tag,
cache_bucket_prefix,
tag=timestamp,
)
# Process yaml from "${cluster}/templates-${project}/"*/*yaml*
generate_k8s_yaml_from_template_on_directory(
api,
project_templates_root,
k8s_config_root,
cluster_project,
toolchain_project,
cluster,
rbe_instance_prefix,
cmd_files_bucket,
toolchain_config_bucket,
tag,
cache_bucket_prefix,
tag=timestamp,
)
# Patch yaml files
with api.step.nest("patch yaml files"):
for item in api.file.glob_paths(
"glob yaml dir %s" % str(k8s_config_root),
k8s_config_root,
"*/*.yaml",
test_data=[k8s_config_root.join("goma", "deploy_exec-server.yaml")],
):
item_rel = api.path.relpath(item, k8s_config_root)
for patch_file in api.file.glob_paths(
"glob patch dir %s" % str(k8s_config_root.join("patches")),
k8s_config_root.join("patches"),
item_rel + ".*",
test_data=[
k8s_config_root.join(
"patches",
"goma",
"deploy_exec-server.yaml.replica.strategic",
)
],
):
patch_yaml(api, item, patch_file)
# gen memory store
with api.step.nest("generate memory store"):
api.file.write_text(
"write configmap-memorystore.yaml file",
k8s_config_root.join("goma", "configmap-memorystore.yaml"),
gen_configmap_memorystore(api, config_dir, cluster_project, cluster),
)
def RunSteps(
api,
repository,
config_root,
toolchain_project,
cluster_project,
cluster,
tag,
dry_run,
):
timestamp = generate_time_stamp(api)
# checkout
infra_config_dir = api.path["start_dir"].join("config")
goma_config_dir = infra_config_dir.join(config_root)
# for recipe tests, add mock files.
api.path.mock_add_paths(goma_config_dir.join("gke", "rbe-dev", "cluster.yaml"))
api.path.mock_add_paths(goma_config_dir.join("gke-res", "rbe-dev", "storage.yaml"))
api.git.checkout(
url=repository, path=infra_config_dir, submodules=False, cache=False
)
with api.step.nest("configurate goma GCP backend"):
api.path.mock_add_paths(goma_config_dir.join("gke", cluster, "cluster.yaml"))
config_cluster(
api,
goma_config_dir,
cluster_project,
toolchain_project,
cluster,
tag,
timestamp,
)
# Push changes to infra/config.
with api.context(cwd=infra_config_dir):
# Calculate the Change ID for Gerrit.
api.git.add(add_all=True, intent_to_add=True)
diff_step = api.git(
"git diff",
"diff",
stdout=api.raw_io.output(),
step_test_data=lambda: api.raw_io.test_api.stream_output("a diff"),
)
hash_step = api.git.hash_object(
diff_step.stdout,
step_test_data=lambda: api.raw_io.test_api.stream_output("abc123"),
)
change_id = "I%s" % hash_step.stdout.strip()
message = COMMIT_MESSAGE.format(
project=cluster_project, cluster=cluster, tag=tag,
) + ("\nChange-Id: %s\n" % change_id)
api.git.commit(
message=message, all_files=True,
)
diff_step = api.step(
"diff", ["git", "diff", "HEAD^"], stdout=api.raw_io.output()
)
diff_step.presentation.logs["diff"] = diff_step.stdout.splitlines()
push_step = api.git.push("HEAD:refs/for/master", ok_ret="any")
if push_step.retcode != 0:
# Maybe caused by change ID collision.
push_step.presentation.step_summary_text = "rejected by gerrit"
push_step.presentation.step_text = "\nChange is identical to a previous CL"
return
gerrit_host = api.gerrit.host_from_remote_url(repository)
if dry_run:
api.gerrit.abandon(
"abandon the change", change_id, message="dry run", host=gerrit_host
)
else:
labels = {"Code-Review": 2}
api.gerrit.set_review(
"submit to commit queue", change_id, labels=labels, host=gerrit_host
)
api.gerrit.submit(name="submit", change_id=change_id, host=gerrit_host)
def GenTests(api):
dry_run_properties = api.properties(
repository="https://fuchsia.googlesource.com/infra/config",
config_root="goma",
toolchain_project="goma-fuchsia",
cluster_project="goma-fuchsia",
tag="latest",
dry_run=True,
)
default_properties = api.properties(
repository="https://fuchsia.googlesource.com/infra/config",
config_root="goma",
toolchain_project="goma-fuchsia",
cluster_project="goma-fuchsia",
tag="latest",
dry_run=False,
)
region_step_data = api.step_data(
"configurate goma GCP backend.generate memory store.load yaml [START_DIR]/config/goma/gke-res/rbe-dev/storage.yaml",
stdout=api.json.output({"region": "us-central"}),
)
yield api.status_check.test("dry_run") + dry_run_properties + region_step_data
yield api.status_check.test("default") + default_properties + region_step_data
yield api.status_check.test(
"change_id_collision"
) + default_properties + api.step_data("git push", retcode=1) + region_step_data