Address more comments about deploy production_build command
CB-49
Change-Id: I68924069b627e3bf7e547dfe3c2e25a800041cfe
diff --git a/cobaltb.py b/cobaltb.py
index 9b6b2ca..00d3b0b 100755
--- a/cobaltb.py
+++ b/cobaltb.py
@@ -31,6 +31,7 @@
import tools.golint as golint
import tools.process_starter as process_starter
import tools.test_runner as test_runner
+import tools.production_util as production_util
from tools.test_runner import E2E_TEST_ANALYZER_PUBLIC_KEY_PEM
from tools.test_runner import E2E_TEST_SHUFFLER_PUBLIC_KEY_PEM
@@ -536,11 +537,11 @@
args.cloud_project_name, args.cluster_zone)
def _deploy_build(args):
- if _parse_bool(args.is_production_cluster):
- print("Production configs must be built using './cobaltb.py deploy "
+ if args.production_dir:
+ print("Production configs should be built using './cobaltb.py deploy "
"production_build' which will build a clean version of the binaries, then "
"build the docker images in one step.")
- answer = raw_input("Continue? (y/N) ")
+ answer = raw_input("Continue anyway? (y/N) ")
if not _parse_bool(answer):
return
container_util.build_all_docker_images(
@@ -551,8 +552,7 @@
"config using the './cobaltb.py update_config' command.")
def _deploy_production_build(args):
- full_ref = container_util.build_and_push_production_docker_images(
- args.cloud_project_prefix,
+ full_ref = production_util.build_and_push_production_docker_images(
args.cloud_project_name,
args.production_dir,
args.git_revision)
@@ -560,7 +560,8 @@
if full_ref:
print
print
- print 'To enable this build, copy this json blob into versions.json:'
+ print('Done pushing the new build. To set this as the default build, copy '
+ 'the following json blob into the versions.json.')
print
print '{'
print ' "shuffler": "%s",' % full_ref
@@ -837,10 +838,6 @@
help='The zone in which your GKE "container cluster" is located. '
'Default=%s' % cluster_settings['cluster_zone'],
default=cluster_settings['cluster_zone'])
- parser.add_argument('--is_production_cluster',
- help='True if we are deploying to a production cluster '
- 'Default=%s' % cluster_settings['is_production_cluster'],
- default=cluster_settings['is_production_cluster'])
def _add_static_ip_args(parser, cluster_settings):
parser.add_argument('--shuffler_static_ip',
@@ -863,10 +860,7 @@
_add_cloud_access_args(parser, cluster_settings)
_add_static_ip_args(parser, cluster_settings)
-def _add_deploy_start_stop_args(parser, cluster_settings):
- parser.add_argument('--job',
- help='The job you wish to start or stop. Valid choices are "shuffler", '
- '"analyzer-service", "report-master". Required.')
+def _add_deploy_start_args(parser, cluster_settings):
parser.add_argument('--bigtable_instance_id',
help='Specify a Cloud Bigtable instance within the specified Cloud '
'project that the Analyzer should connect to. This is required '
@@ -956,7 +950,6 @@
'shuffler_static_ip' : '',
'shuffler_use_memstore' : '',
'use_tls': '',
- 'is_production_cluster': 'false',
}
if production_cluster_json_file:
_cluster_settings_from_json(cluster_settings, production_cluster_json_file)
@@ -1623,19 +1616,27 @@
parents=[parent_parser], help='Start one of the jobs on GKE.')
sub_parser.set_defaults(func=_deploy_start)
_add_gke_deployment_args(sub_parser, cluster_settings)
- _add_deploy_start_stop_args(sub_parser, cluster_settings)
+ sub_parser.add_argument('--job',
+ help='The job you wish to start. Valid choices are "shuffler", '
+ '"analyzer-service", "report-master". Required.')
+ _add_deploy_start_args(sub_parser, cluster_settings)
sub_parser = deploy_subparsers.add_parser('stop',
parents=[parent_parser], help='Stop one of the jobs on GKE.')
sub_parser.set_defaults(func=_deploy_stop)
_add_gke_deployment_args(sub_parser, cluster_settings)
- _add_deploy_start_stop_args(sub_parser, cluster_settings)
+ sub_parser.add_argument('--job',
+ help='The job you wish to stop. Valid choices are "shuffler", '
+ '"analyzer-service", "report-master". Required.')
+ _add_deploy_start_args(sub_parser, cluster_settings)
sub_parser = deploy_subparsers.add_parser('stopstart',
parents=[parent_parser], help='Stop and start a job on GKE.')
sub_parser.set_defaults(func=_deploy_stopstart)
_add_gke_deployment_args(sub_parser, cluster_settings)
- _add_deploy_start_stop_args(sub_parser, cluster_settings)
+ sub_parser.add_argument('--job',
+ help='The job you wish to stop then start. Valid choices are "shuffler", '
+ '"analyzer-service", "report-master". Required.')
sub_parser = deploy_subparsers.add_parser('upload_secret_keys',
parents=[parent_parser], help='Creates |secret| objects in the '
diff --git a/production/fuchsia-cobalt-us-central1-c/cluster.json b/production/fuchsia-cobalt-us-central1-c/cluster.json
index 137c779..8b5570b 100644
--- a/production/fuchsia-cobalt-us-central1-c/cluster.json
+++ b/production/fuchsia-cobalt-us-central1-c/cluster.json
@@ -13,6 +13,5 @@
"shuffler_preferred_address": "shuffler.cobalt-api.fuchsia.com:443",
"shuffler_static_ip" : "",
"shuffler_use_memstore" : "true",
- "use_tls": "true",
- "is_production_cluster": "true"
+ "use_tls": "true"
}
diff --git a/tools/container_util.py b/tools/container_util.py
index eb39747..b9b5960 100755
--- a/tools/container_util.py
+++ b/tools/container_util.py
@@ -150,8 +150,6 @@
# //kubernetes/report_master/Dockerfile
REPORT_MASTER_GCS_SERVICE_ACCOUNT_JSON_FILE_NAME = "gcs_service_account.json"
-COBALT_REPO_CLONE_URL = "https://fuchsia.googlesource.com/cobalt"
-
def _ensure_dir(dir_path):
"""Ensures that the directory at |dir_path| exists. If not it is created.
@@ -221,94 +219,6 @@
SHUFFLER_DOCKER_BUILD_DIR,
extra_args=["--build-arg", "config_file=%s"%config_file_name])
-
-def _select_git_revision():
- tags = subprocess.check_output(['git', 'tag', '-l',
- '--sort=-version:refname']).strip().split('\n')[:5]
- tags.append('HEAD')
-
- while True:
- print
- print
- print('Which version would you like to build for?')
- for i, tag in enumerate(tags):
- print('({}) {}'.format(i, tag))
-
- selection = raw_input('? ')
- try:
- selection = int(selection)
- if selection < len(tags):
- return tags[selection]
- except:
- print("Invalid selection")
-
-
-def build_and_push_production_docker_images(cloud_project_prefix,
- cloud_project_name, production_dir, git_revision):
- """ Builds and pushes production-ready docker images from a clean git repo.
- cloud_project_prefix {sring}: For example "google.com"
- cloud_project_name {sring}: For example "shuffler-test". The prefix and
- name are used when forming the URI to the image in the registry and
- also the bigtable project name.
- production_dir {string}: The directory of the production config files.
- git_revision {string}: A git revision passed in from the command line, if none
- is provided, the user will be prompted to select one.
- latest will be used.
- """
-
- clean_repo_dir = tempfile.mkdtemp('-cobalt-production-build')
-
- try:
- subprocess.check_call(['git', 'clone', COBALT_REPO_CLONE_URL,
- clean_repo_dir])
-
- wd = os.getcwd()
- os.chdir(clean_repo_dir)
-
- if not git_revision:
- git_revision = _select_git_revision()
-
- if git_revision is 'HEAD':
- git_revision = subprocess.check_output(['git', 'rev-parse',
- 'HEAD']).strip()
-
- subprocess.check_call(['git', 'checkout', git_revision])
- describe = subprocess.check_output(['git', 'describe']).strip()
- full_rev = subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip()
-
- subprocess.check_call(['./cobaltb.py', 'setup'])
- subprocess.check_call(['./cobaltb.py', 'clean', '--full'])
- subprocess.check_call(['./cobaltb.py', 'build'])
- p = subprocess.Popen(['./cobaltb.py', 'deploy', 'build',
- '--production_dir=%s' % production_dir],
- stdin=subprocess.PIPE)
- p.communicate('yes')
-
- tags_to_apply = ['latest', full_rev]
- if describe is not git_revision:
- tags_to_apply.append(describe)
- subrev = ''
- for part in git_revision.split('.'):
- if subrev is not '':
- subrev += '.'
- subrev += part
- tags_to_apply.append(subrev)
-
- for tag in tags_to_apply:
- push_shuffler_to_container_registry(cloud_project_prefix,
- cloud_project_name, tag)
- push_analyzer_service_to_container_registry(cloud_project_prefix,
- cloud_project_name, tag)
- push_report_master_to_container_registry(cloud_project_prefix,
- cloud_project_name, tag)
-
- os.chdir(wd)
-
- return full_rev
- finally:
- print("Cleaning up")
- shutil.rmtree(clean_repo_dir)
-
def _image_registry_uri(cloud_project_prefix, cloud_project_name, image_name,
tag='latest'):
if not cloud_project_prefix:
@@ -526,7 +436,7 @@
cluster_zone, cluster_name,
bigtable_instance_id,
static_ip_address,
- version):
+ docker_tag):
""" Starts the analyzer-service deployment and service.
cloud_project_prefix {sring}: For example "google.com"
cloud_project_name {sring}: For example "shuffler-test". The prefix and
@@ -536,11 +446,11 @@
within the specified project to be used by the Analyzer Service.
static_ip_address {string}: A static IP address that has already been
reserved on the GKE cluster.
- version {string}: The version of the docker image to use. If none is provided,
- latest will be used.
+ docker_tag {string}: The docker_tag of the docker image to use. If none is
+ provided, latest will be used.
"""
image_uri = _image_registry_uri(cloud_project_prefix, cloud_project_name,
- ANALYZER_SERVICE_IMAGE_NAME, version)
+ ANALYZER_SERVICE_IMAGE_NAME, docker_tag)
bigtable_project_name = compound_project_name(cloud_project_prefix,
cloud_project_name)
@@ -574,7 +484,7 @@
cluster_zone, cluster_name,
bigtable_instance_id,
static_ip_address,
- version,
+ docker_tag,
enable_report_scheduling=False):
""" Starts the report-master deployment and service.
cloud_project_prefix {string}: For example "google.com"
@@ -585,12 +495,12 @@
within the specified project to be used by the Report Master.
static_ip_address {string}: A static IP address that has already been
reserved on the GKE cluster.
- version {string}: The version of the docker image to use. If none is provided,
- latest will be used.
+ docker_tag {string}: The docker_tag of the docker image to use. If none is
+ provided, latest will be used.
enable_report_scheduling {bool}: Should report scheduling be enabled?
"""
image_uri = _image_registry_uri(cloud_project_prefix, cloud_project_name,
- REPORT_MASTER_IMAGE_NAME, version)
+ REPORT_MASTER_IMAGE_NAME, docker_tag)
bigtable_project_name = compound_project_name(cloud_project_prefix,
cloud_project_name)
@@ -638,7 +548,7 @@
cluster_zone, cluster_name,
gce_pd_name,
static_ip_address,
- version,
+ docker_tag,
use_memstore=False,
danger_danger_delete_all_data_at_startup=False):
""" Starts the shuffler deployment and service.
@@ -650,11 +560,11 @@
storage so that the data persists between Shuffler updates.
static_ip_address {string}: A static IP address that has already been
reserved on the GKE cluster.
- version {string}: The version of the docker image to use. If none is provided,
- latest will be used.
+ docker_tag {string}: The docker_tag of the docker image to use. If none is
+ provided, latest will be used.
"""
image_uri = _image_registry_uri(cloud_project_prefix, cloud_project_name,
- SHUFFLER_IMAGE_NAME, version)
+ SHUFFLER_IMAGE_NAME, docker_tag)
# These are the token replacements that must be made inside the deployment
# template file.
use_memstore_string = 'false'
diff --git a/tools/production_util.py b/tools/production_util.py
new file mode 100644
index 0000000..3564901
--- /dev/null
+++ b/tools/production_util.py
@@ -0,0 +1,114 @@
+# Copyright 2018 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file includes macros to make it easier to log in a standardized format
+# so that we can create logs-based metrics for stackdriver.
+#
+# https://cloud.google.com/logging/docs/logs-based-metrics/
+
+"""A library to help with building/deploying production cobalt packages."""
+
+import subprocess
+import tempfile
+import shutil
+import os
+
+import container_util
+
+COBALT_REPO_CLONE_URL = "https://fuchsia.googlesource.com/cobalt"
+
+def _cobaltb(*args):
+ cmd = ['./cobaltb.py']
+ cmd.extend(args)
+ subprocess.check_call(cmd)
+
+def _select_git_revision():
+ tags = subprocess.check_output(['git', 'tag', '-l',
+ '--sort=-version:refname']).strip().split('\n')[:5]
+ tags.append('HEAD')
+
+ while True:
+ print
+ print
+ print('Which version would you like to build for?')
+ for i, tag in enumerate(tags):
+ print('({}) {}'.format(i, tag))
+
+ selection = raw_input('? ')
+ try:
+ selection = int(selection)
+ if selection < len(tags):
+ return tags[selection]
+ except:
+ print("Invalid selection")
+
+def build_and_push_production_docker_images(cloud_project_name, production_dir,
+ git_revision):
+ """ Builds and pushes production-ready docker images from a clean git repo.
+ cloud_project_name {sring}: For example "fuchsia-cobalt". The name is used
+ when forming the URI to the image in the registry and also the bigtable
+ project name.
+ production_dir {string}: The directory of the production config files.
+ git_revision {string}: A git revision passed in from the command line, if none
+ is provided, the user will be prompted to select one.
+ latest will be used.
+ """
+
+ clean_repo_dir = tempfile.mkdtemp('-cobalt-production-build')
+
+ try:
+ subprocess.check_call(['git', 'clone', COBALT_REPO_CLONE_URL,
+ clean_repo_dir])
+
+ wd = os.getcwd()
+ os.chdir(clean_repo_dir)
+
+ if not git_revision:
+ git_revision = _select_git_revision()
+
+ if git_revision is 'HEAD':
+ git_revision = subprocess.check_output(['git', 'rev-parse',
+ 'HEAD']).strip()
+
+ subprocess.check_call(['git', 'checkout', git_revision])
+ describe = subprocess.check_output(['git', 'describe']).strip()
+ full_rev = subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip()
+
+ _cobaltb('setup')
+ _cobaltb('clean', '--full')
+ _cobaltb('build')
+ _cobaltb('test')
+
+ p = subprocess.Popen(['./cobaltb.py', 'deploy', 'build',
+ '--production_dir=%s' % production_dir],
+ stdin=subprocess.PIPE)
+ p.communicate('yes')
+
+ tags_to_apply = ['latest', full_rev]
+ if describe is not git_revision:
+ tags_to_apply.append(describe)
+ subrev = ''
+ # This will construct a series of tags based on the version. e.g. if the
+ # version is v1.2.3, it would create the tags 'v1', 'v1.2', and 'v1.2.3'
+ for part in git_revision.split('.'):
+ if subrev is not '':
+ subrev += '.'
+ subrev += part
+ tags_to_apply.append(subrev)
+
+ for tag in tags_to_apply:
+ container_util.push_shuffler_to_container_registry(cloud_project_prefix,
+ cloud_project_name, tag)
+ container_util.push_analyzer_service_to_container_registry(cloud_project_prefix,
+ cloud_project_name, tag)
+ container_util.push_report_master_to_container_registry(cloud_project_prefix,
+ cloud_project_name, tag)
+
+ os.chdir(wd)
+
+ return full_rev
+ finally:
+ raw_input("Press enter to finish build and delete directory...")
+ print("Cleaning up")
+ shutil.rmtree(clean_repo_dir)