blob: 67c728150509c7d3d60f78966742404cfe38c54e [file] [log] [blame]
#!/bin/bash
# Copyright 2021 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# In Python this is a triple-quoted string, but when this script is run by Bash,
# it ignores the four quotes (interpreting them as two single-quoted empty
# strings) and re-execs the script using the prebuilt Python interpreter,
# ensuring that the script won't fail if run on a machine that has an outdated
# version of Python.
#
# TODO(fxbug.dev/79905): Remove this workaround once `fx` itself uses
# hermetic-env to exec subcommands.
# pylint: disable=pointless-string-statement
''''exec "$FUCHSIA_DIR/scripts/hermetic-env" python3 -u -- "$0" ${1+"$@"} # '''
#### CATEGORY=Source tree
### Sync the local Fuchsia source tree to a given state
import argparse
import enum
import functools
import json
import os
import pathlib
import re
import shutil
import subprocess
import sys
import tempfile
from typing import List, Union
import urllib.parse
HELP = """\
## usage: fx sync-to [-h|--help] <STATE>
##
## To reproduce builds at a given repo state, or bisect bugs among a series of
## checkins, this command synchronizes the local Fuchsia source tree to a given
## state. The state can be specified in one of the following ways:
##
## BUILD_ID: a large number like '8938070794014050064' (preceded or not by a
## 'b' letter), which is the "Build #" in a builder's "Ended builds" page,
## for example:
## https://luci-milo.appspot.com/p/fuchsia/builders/try/core.x64-asan
##
## RELEASE_TAG: a string like "releases/0.20190927.1.1", representing a
## git tag in the //integration repository. To find available tags, run
## `git tags -l` in your local integration repository.
##
## BRANCH_NAME: a string like "refs/heads/releases/f1", representing a
## git branch (head) in the //integration repository.
##
## JIRI_HISTORY_TIMESTAMP: a timestamp like "2019-03-21T15:30:00-07:00".
## This is local to your tree, and represents a moment where you
## previously ran `jiri update`. To find available timestamps, look at
## files in ${FUCHSIA_DIR}/.jiri_root/update_history/
##
## INTEGRATION_GIT_COMMIT: a 3-40 character commit hash like "e9d97d1" in the
## integration repo. Can be optionally prefixed with 'git:' (e.g.
## "git:e9d97d1") to disambiguate from BUILD_ID. To find valid commits,
## look at your integration commit history:
## git -C ${FUCHSIA_DIR}/integration log --oneline
##
## "reset":
## Use "reset" to return to the top of the tree. This is equivalent to:
## git -C ${FUCHSIA_DIR}/integration checkout JIRI_HEAD && jiri update -gc
##
## Known limitations:
## - Does not work on CI builds triggered on repos other than integration (very
## rare). It works on all CQ builds and all CI builds triggered by
## integration.git commits.
## - Does not respect `attributes`. Local attributes will not be overridden (so
## the checkout may contain some extra repositories), and
## attributes used by an infra build will not be reproduced
##
## Examples:
##
## # Sync to the source used by build https://ci.chromium.org/b/8835832080588336881
## fx sync-to 8835832080588336881
##
## # Sync to the source tagged as release 0.20210822.2.5:
## fx sync-to releases/0.20210822.2.5
##
## # Sync to the same tree updated in 2021-08-28T14:26:22-07:00 (this is
## # local to your local tree - to reproduce, look for timestamps in your
## # own .jiri_root/update_history directory):
## fx sync-to 2021-08-28T14:26:22-07:00
##
## # Sync to integration commit 901ed5b
## # (https://fuchsia.googlesource.com/integration/+/901ed5bf7db253bb6feb4832ac1a752248e2361d):
## fx sync-to 901ed5b
##
## # Sync to release branch f1:
## fx sync-to refs/heads/releases/f1
##
## # Restore your source to the top of the tree:
## fx sync-to reset
##
"""
# The name of the temporary integration.git branch that we'll use for checking
# out an integration revision to sync to.
TEMP_BRANCH_NAME = "_fx-sync-to"
# Matches release branch names in the integration repository.
RELEASE_BRANCH_REGEX = r"^refs/heads/([/0-9.A-Za-z-]+)$"
# Matches release tags in the integration repository.
RELEASE_TAG_REGEX = r"^releases\/[0-9.A-Z]+$"
# Matches a timestamp of the form that Jiri uses for names of snapshot files in
# the .jiri_root/update_history dir.
TIMESTAMP_REGEX = r"^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9:]+[-+][0-9:]+$"
# Matches a git revision, with an optional "git:" prefix to distinguish an
# all-digit revision from a LUCI build ID.
GIT_REVISION_REGEX = r"^(git:)?[0-9a-f]{3,40}$"
# Matches a LUCI build ID, with an optional leading "b" like is included in LUCI
# build URLs.
BUILD_ID_REGEX = r"^b?[0-9]+$"
class Error(Exception):
"""Raise this type to present a user-friendly error instead of a stacktrace.
By default, causes the program to return a nonzero return code. To cause the
program to emit a return code of zero, pass `fail=False`.
"""
def __init__(self, msg: str, fail: bool = True):
self.msg = msg
self.fail = fail
class Color(enum.Enum):
RESET = "\33[0m"
RED = "\33[31m"
GREEN = "\33[32m"
YELLOW = "\33[33m"
def colorize(text: str, color: Color) -> str:
"""Wrap `text` in terminal color directives.
The return value will show up as the given color when printed in a terminal.
"""
return f"{color.value}{text}{Color.RESET.value}"
class SyncToCommand:
def __init__(self, args: argparse.Namespace, fuchsia_dir: str):
self.fuchsia_dir = pathlib.Path(fuchsia_dir)
self.help: bool = args.help
self.state: str = args.state
self.dry_run: bool = args.dry_run
self.force: bool = args.force
@property
def integration_dir(self) -> pathlib.Path:
return self.fuchsia_dir.joinpath("integration")
@functools.cached_property
def gsutil_path(self) -> str:
gsutil = shutil.which("gsutil")
if not gsutil:
raise Error(
"Cannot find gsutil. Please install gcloud and run 'gcloud init'.\n"
"See https://cloud.google.com/storage/docs/gsutil_install")
return gsutil
def run(self) -> None:
if self.help:
for line in HELP.splitlines():
print(line[3:])
return
if not self.state:
raise Error("A single positional argument is required.")
if self.dry_run:
print(
colorize(
"Running in dry-run mode. No changes will be made to your checkout.",
Color.GREEN))
self.confirm_ok_to_change_source_code()
# Always update the integration repo to avoid missing refs. This should
# be a safe operation, and it avoids false negatives.
print("Fetching refs in the integration repo...")
self.run_command(
"git",
"-C",
self.integration_dir,
"fetch",
"--quiet",
"origin",
stderr=subprocess.PIPE,
dry_run_safe=True)
# Force-checkout every repository at JIRI_HEAD to ensure subsequent
# `jiri update` calls won't skip repos not at JIRI_HEAD. This will
# intentionally fail if there are any uncommitted changes (which should
# have been detected by `confirm_ok_to_change_source_code` anyway) to
# avoid permanently deleting code.
self.jiri(
"runp", f"-exit-on-error={str(not self.force).lower()}",
"git checkout --quiet JIRI_HEAD")
if self.state == "reset":
self.reset_checkout()
elif re.match(RELEASE_BRANCH_REGEX, self.state):
self.sync_to_release_branch()
elif re.match(RELEASE_TAG_REGEX, self.state):
self.sync_to_release_tag()
elif re.match(TIMESTAMP_REGEX, self.state):
self.sync_to_timestamp()
elif not self.state.startswith("git:") and re.match(BUILD_ID_REGEX,
self.state):
self.sync_to_build_id()
# Git short SHA-1s can be any length up to 40, but assume at least three
# characters.
elif re.match(GIT_REVISION_REGEX, self.state):
self.sync_to_integration_commit()
else:
raise Error(
f"Unsupported state definition format: {self.state!r}.\n"
"Use -h for help.")
if not self.dry_run:
self.print_synced_revision()
def reset_checkout(self) -> None:
"""Sync all repos back to JIRI_HEAD."""
# TODO(olivernewman): Have `fx sync-to` create a snapshot of the
# checkout prior to syncing and have `reset` restore the exact contents
# of the old checkout instead of always going back to JIRI_HEAD.
print("Resetting the tree to the latest...")
self.jiri("update", "-gc")
def sync_to_release_branch(self) -> None:
print(f"{self.state!r} looks like a branch name")
branch = "origin/" + remove_prefix(self.state, "refs/heads/")
if not self.is_valid_integration_rev(branch):
raise Error(
f"Invalid remote branch, is {self.state!r} a valid head in"
" an integration repo other than the one your source is using?\n\n"
"See valid branches by running:\n\n"
" git -C ${FUCHSIA_DIR}/integration ls-remote --heads")
self.sync_to_integration_ref(branch)
def sync_to_release_tag(self) -> None:
print(f"{self.state!r} looks like a release tag")
if not self.is_valid_integration_rev(self.state):
raise Error(
f"Invalid release tag, is {self.state!r} a tag from"
" an integration repo other than the one your source is using?\n\n"
"See valid tags by running:\n\n"
" git -C ${FUCHSIA_DIR}/integration tag -l")
self.sync_to_integration_ref(f"tags/{self.state}")
def sync_to_integration_commit(self) -> None:
print(f"{self.state!r} looks like an integration git commit")
revision = remove_prefix(self.state, "git:")
if not self.is_valid_integration_rev(revision):
raise Error(
f"Commit not found, is {self.state!r} a commit from a repo other"
" than your local integration repo?"
"See valid commits by running:\n"
" git -C ${FUCHSIA_DIR}/integration log --oneline")
self.sync_to_integration_ref(revision)
def sync_to_timestamp(self) -> None:
"""Sync the checkout based on a Jiri snapshot XML file from a past timestamp."""
print(f"{self.state!r} looks like a local Jiri update timestamp")
update_history_dir = self.fuchsia_dir.joinpath(
".jiri_root", "update_history")
snapshot_file = update_history_dir.joinpath(self.state)
if not os.path.exists(snapshot_file):
raise Error(
"Invalid Jiri history timestamp. See valid options by running:\n"
f" ls {update_history_dir}")
def sync_to_snapshot(self, snapshot_file: pathlib.Path) -> None:
integration_remote = self.jiri(
"manifest",
"-element=integration",
"-template={{.Remote}}",
snapshot_file,
dry_run_safe=True,
stdout=subprocess.PIPE,
).stdout.strip()
self.assert_correct_integration_repo(integration_remote)
print(f"Syncing to Jiri snapshot {str(snapshot_file)!r}")
self.jiri("update", "-local-manifest", "-gc", snapshot_file)
def sync_to_integration_ref(self, ref: str) -> None:
"""Sync to a branch, tag, or revision of the integration repo."""
print(f"Checking out integration repo at {ref}")
self.run_command(
"git",
"-C",
self.integration_dir,
"checkout",
# Create a new branch; if we checked out without branching, `jiri
# update` would reset integration back to JIRI_HEAD.
"-B",
TEMP_BRANCH_NAME,
# Don't auto-track any remote branch. Otherwise `fx sync-to reset`
# might fail to switch off a branch that's behind the remote branch
# it's tracking.
"--no-track",
ref,
)
try:
self.jiri("update", "-local-manifest", "-gc")
except subprocess.CalledProcessError:
print("Sync failed, restoring the integration repo to JIRI_HEAD")
self.run_command(
"git", "-C", self.integration_dir, "checkout", "JIRI_HEAD")
raise
def is_valid_integration_rev(self, rev: str) -> bool:
"""Determine whether `rev` exists in the local integration repo."""
return (
self.run_command(
"git",
"-C",
self.integration_dir,
"rev-parse",
"--verify",
"-q",
rev,
check=False,
dry_run_safe=True,
).returncode == 0)
def assert_correct_integration_repo(self, remote: str) -> None:
jiri_manifest = self.fuchsia_dir.joinpath(".jiri_manifest")
local_remote = self.jiri(
"manifest",
"-element=integration",
"-template={{.Remote}}",
jiri_manifest,
dry_run_safe=True,
stdout=subprocess.PIPE,
).stdout.strip()
if local_remote != remote:
raise Error(
f"Integration remote {remote!r} does not match local"
f" checkout remote {local_remote!r}."
f"\nIf syncing to a public build ID, use a public checkout."
f" You can create a public checkout in a separate directory"
f" by following the directions here:"
f"\nhttps://fuchsia.dev/fuchsia-src/get-started/get_fuchsia_source#download-the-fuchsia-source-code"
f"\n"
f"\nThen cd into the public checkout directory and run:"
f"\n ./scripts/fx sync-to {self.state}"
)
def assert_correct_integration_manifest(self, checkout_info: dict) -> None:
jiri_manifest = self.fuchsia_dir.joinpath(".jiri_manifest")
local_manifest = self.jiri(
"manifest",
"-element=integration",
"-template={{.Manifest}}",
jiri_manifest,
dry_run_safe=True,
stdout=subprocess.PIPE,
).stdout.strip()
manifest = checkout_info["manifest"]
if local_manifest != manifest:
workaround = (
f"create a separate Fuchsia checkout that uses the {manifest!r} manifest and "
f"re-run your command there."
)
if not checkout_info.get("patches"):
rev = checkout_info["base_manifest_revision"]
workaround += (
f"\nAlternatively, you can ignore this error by running `fx sync-to {rev}`, "
f"which will sync to the correct revision but may not include the same "
f"repositories and packages used by the build."
)
raise Error(
f"Manifest {manifest!r} does not match local manifest {local_manifest!r}, so "
f"unable to reproduce the build's checkout.\n"
f"As a workaround, you can {workaround}"
)
def sync_to_build_id(self) -> None:
"""Sync to the checkout version used by an infra build."""
print(f"{self.state!r} looks like a build ID")
build_id = remove_prefix(self.state, "b")
print(
f"Syncing to state used by build https://ci.chromium.org/b/{build_id}"
)
bb = self.fuchsia_dir.joinpath("prebuilt", "tools", "buildbucket", "bb")
proc = self.run_command(
bb,
"auth-info",
dry_run_safe=True,
check=False,
# Silence stdout to avoid printing the auth info. We
# only care whether the command passes or fails.
stdout=subprocess.DEVNULL)
if proc.returncode:
print("Please login to Buildbucket first.")
self.run_command(bb, "auth-login")
proc = self.run_command(
bb,
"get",
build_id,
"--json",
"--fields",
"status,builder,output.properties",
dry_run_safe=True,
capture_output=True,
)
build = json.loads(proc.stdout)
checkout_info = build["output"]["properties"].get("checkout_info")
if not checkout_info:
# `checkout_info` may not available if the build ran against a
# release branch that used an old version of recipes that didn't
# emit the `checkout_info` property. So fall back to legacy behavior
# of downloading a Jiri snapshot from GCS.
self.sync_to_legacy_build_id(build_id)
return
integration_remote = checkout_info["manifest_remote"]
self.assert_correct_integration_repo(integration_remote)
self.assert_correct_integration_manifest(checkout_info)
self.sync_to_integration_ref(checkout_info["base_manifest_revision"])
has_integration_patch = False
for patch in checkout_info.get("patches", []):
project = patch["project"]
if project == "integration":
has_integration_patch = True
args = ["patch"]
# Some old versions of recipes didn't emit `base_revision`. If no
# `base_revision` is available then we can't reproduce the checkout
# exactly (the build's checkout may have included some extra commits
# between the commit pinned in integration and the patched commit)
# but patching on top of the integration-pinned commit is probably
# close enough in most cases.
if "base_revision" in patch:
args.extend(
["-rebase", "-rebase-revision", patch["base_revision"]])
else:
warning = colorize("WARNING", Color.YELLOW)
print(
f"{warning}: No base revision available for project {project},"
" rebasing on current main branch instead.")
args.extend(
[
"-host",
patch["host"],
"-project",
project,
patch["ref"],
])
self.jiri(*args)
# If we patched in a change to the integration repo, we have to `jiri
# update` again to make sure we respect any project/package pin updates.
if has_integration_patch:
self.jiri("update", "-local-manifest", "-rebase-tracked", "-gc")
def sync_to_legacy_build_id(self, build_id: str) -> None:
"""Sync to a build's Jiri snapshot from GCS.
This is necessary for builds on old release branches that don't emit the
`checkout_info` output property.
"""
print("Build did not emit checkout info, checking GCS for snapshots...")
tempdir = tempfile.TemporaryDirectory()
try:
snapshot_file = self.download_build_snapshot(
pathlib.Path(tempdir.name), build_id)
except Exception:
tempdir.cleanup()
raise
try:
self.sync_to_snapshot(snapshot_file)
except Exception:
print(
f"Preserving the snapshot directory for debugging: {tempdir.name}"
)
raise
tempdir.cleanup()
def download_build_snapshot(
self, dest_dir: pathlib.Path, build_id: str) -> pathlib.Path:
"""Download a Jiri snapshot produced by an infra build from GCS.
Depending on the nature of the infra build, the snapshot file may come
from one of several buckets that have different schemas.
"""
possible_urls = [
# Buckets where each build ID file is the snapshot file itself.
f"gs://fuchsia-snapshots/{build_id}",
] + [
# Buckets where each build is a folder with a jiri_snapshot.xml
# inside.
f"gs://{bucket}/builds/{build_id}/jiri_snapshot.xml" for bucket in [
"fuchsia-artifacts-release",
"fuchsia-artifacts-internal",
"fuchsia-artifacts",
]
]
snapshot_url = ""
for url in possible_urls:
proc = self.gsutil(
"ls",
url,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
check=False,
dry_run_safe=True)
if proc.returncode == 0:
# Found the bucket that contains the snapshot, so no need to
# check the remaining buckets.
snapshot_url = url
break
if not snapshot_url:
locations = "\n".join(f" {url}" for url in possible_urls)
raise Error(
f"Cannot find a valid snapshot in any of the following locations:\n{locations}\n"
f"Possible reasons:\n"
f" - You are not logged in to gcloud. Try running 'gcloud auth list' and,\n"
f" if necessary, 'gcloud auth login'\n"
f" - The build {build_id} may have ended prematurely, so the step that\n"
f" uploads its artifacts to GCS did not execute.\n"
f" Compare https://ci.chromium.org/b/{build_id} with a successful\n"
f" build of the same builder and see if the failed build stopped before\n"
f" running a step like 'upload artifacts'")
bucket = urllib.parse.urlparse(snapshot_url).hostname
print(f"Downloading Jiri snapshot from {bucket}...")
snapshot_file = dest_dir.joinpath("jiri_snapshot.xml")
self.gsutil(
"-q", # Quiet (no progress indicator)
"-m", # Download files in parallel
"cp",
snapshot_url,
snapshot_file,
dry_run_safe=True)
return snapshot_file
def print_synced_revision(self) -> None:
print()
if self.state == "reset":
print("Reset success! You are now tracking JIRI_HEAD.")
else:
print("Success! To switch back to the latest, use `fx sync-to reset`.")
print("//integration HEAD is now at:")
self.run_command(
"git",
"-C",
self.integration_dir,
"--no-pager",
"show",
"--no-patch",
"--decorate",
"--format=format: %h - (%ar) %an\n %s\n",
dry_run_safe=True,
log=False,
)
def run_command(
self,
*args: Union[str, pathlib.Path],
dry_run_safe: bool = False,
check: bool = True,
log: bool = True,
**kwargs,
) -> subprocess.CompletedProcess:
cmd = [str(a) for a in args] # Convert any Path objects to strings.
cmd_str = " ".join(cmd)
if self.dry_run and not dry_run_safe:
print("Dry-run: %s" % colorize(cmd_str, Color.GREEN))
return subprocess.CompletedProcess(args=cmd, returncode=0)
if log:
print("Running: %s" % colorize(" ".join(cmd), Color.YELLOW))
try:
return subprocess.run(cmd, check=check, text=True, **kwargs)
except subprocess.CalledProcessError as e:
# Override CalledProcessError to make a more user-friendly error
# message and hide the stacktrace.
raise Error(
f"Command {cmd_str!r} returned non-zero exit status {e.returncode}."
)
def gsutil(
self,
*args: Union[str, pathlib.Path],
**kwargs,
) -> subprocess.CompletedProcess:
return self.run_command(self.gsutil_path, *args, **kwargs)
def jiri(
self,
*args: Union[str, pathlib.Path],
**kwargs,
) -> subprocess.CompletedProcess:
# Prefer to run Jiri via $PATH so that logs are concise and don't
# include the full path to Jiri, but fall back to using Jiri from the
# local checkout if it's not on $PATH.
jiri_path = "jiri"
if not shutil.which(jiri_path):
jiri_path = os.path.join(self.fuchsia_dir, ".jiri_root", "bin", "jiri")
return self.run_command(jiri_path, "-color=always", *args, **kwargs)
def confirm_ok_to_change_source_code(self) -> None:
if self.dry_run:
# In dry-run mode we're not actually going to change the source
# code, so we shouldn't bother getting the user's approval or doing
# any safety checks.
return
# Check if any repo has uncommitted changes and exit early if so,
# because we can't sync without discarding the uncommitted changes,
# which could be bad.
#
# First we must update all indexes; otherwise `diff-index` might report
# false positive diffs.
self.jiri("runp", "git update-index -q --refresh")
output = self.jiri(
"runp", "git diff-index --quiet HEAD",
capture_output=True).stdout or ""
dirty_repos = self.jiri_runp_failures(output)
if dirty_repos:
repos_list = "\n".join(f" - {repo}" for repo in dirty_repos)
if self.force:
warning = colorize("WARNING", Color.YELLOW)
print(
f"{warning}: "
" rebasing on current main branch instead.")
else:
discard_cmd = f"jiri runp 'git checkout -f HEAD'"
force_cmd = f"fx sync-to --force {self.state!r}"
# Suggest running `git checkout JIRI_HEAD` *after* rerunning fx
# sync-to instead of automatically discarding changes before,
# because the workarounds for some Jiri bugs require making
# temporary local modifications before syncing.
# TODO(olivernewman): Run this automatically if the sync fails
# with an "untracked changes" error.
fixup_cmd = "jiri runp -uncommitted=true 'git checkout --force JIRI_HEAD'"
raise Error(
f"Cannot sync to a different version as the following projects"
f" have uncommitted changes:\n{repos_list}"
f"\nCommit or discard these changes and try again."
f"\n To discard changes in all projects, run the following and then"
f" retry `fx sync-to`:"
f"\n {colorize(discard_cmd, Color.RED)}"
f"\nOr if that still doesn't work, run:"
f"\n {colorize(force_cmd, Color.RED)}"
f"\n {colorize(fixup_cmd, Color.RED)}")
print("Dry-run: nothing will be changed on your local repo.")
while True: # Loop until we get a valid input.
if self.force:
uncomitted_info = (
f"Because you set --force, "
f"{colorize('ANY UNCOMMITTED CHANGES WILL BE DISCARDED', Color.RED)}."
)
else:
uncomitted_info = (
"No untracked changes will be discarded, but any"
" repos not currently on JIRI_HEAD will not be"
" automatically restored to their original revisions.")
print(
f"I'm about to change the state of your source code.\n{uncomitted_info}"
)
yn = input("Are you sure you want to continue [y/n]? ")
if yn.startswith(("y", "Y")):
return
elif yn.startswith(("n", "N")):
raise Error("Aborting.", fail=False)
else:
print(colorize(f"Invalid choice: {yn!r}", Color.RED))
def jiri_runp_failures(self, output: str) -> List[str]:
"""Return the projects on which `jiri runp` failed."""
failed_prefix = "FAILED: "
return [
remove_prefix(line, failed_prefix).split("=")[0]
for line in output.strip().splitlines()
if line.startswith(failed_prefix)
]
def remove_prefix(s, prefix: str) -> str:
if s.startswith(prefix):
return s[len(prefix):]
return s
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument("-h", "--help", action="store_true")
parser.add_argument("-n", "--dry-run", dest="dry_run", action="store_true")
parser.add_argument("-f", "--force", dest="force", action="store_true")
parser.add_argument("state", nargs="?", default="")
return parser.parse_args()
def main() -> None:
fuchsia_dir = os.getenv("FUCHSIA_DIR")
if fuchsia_dir is None:
raise Error("FUCHSIA_DIR must be set")
os.chdir(fuchsia_dir)
args = parse_args()
SyncToCommand(args, fuchsia_dir).run()
if __name__ == "__main__":
try:
main()
except Error as e:
if e.fail:
prefix = colorize("ERROR", Color.RED)
print(f"{prefix}: {e.msg}")
sys.exit(1)
else:
print(e.msg)