Update Go DP Lib and Privacy on Beam dependencies

Go DP Lib:
* Update dependencies

Privacy on Beam:
* Update dependencies

Clustering:
* Add mechanism calibration option.

Accounting:
* More precise error messages if an accountant cannot process some event.

Change-Id: I74da9701ea35931fbecfc6da0c930c21327ab0e3
GitOrigin-RevId: 0ad3bc29eb78a24cc17bcab4113d3683622f2ace
diff --git a/cc/algorithms/BUILD b/cc/algorithms/BUILD
index 9a9ffe3..79b7d8c 100644
--- a/cc/algorithms/BUILD
+++ b/cc/algorithms/BUILD
@@ -378,7 +378,6 @@
         "@com_google_absl//absl/status",
         "@com_google_absl//absl/status:statusor",
         "@com_google_absl//absl/strings",
-        "@com_google_absl//absl/types:optional",
         "@com_google_cc_differential_privacy//base:status",
     ],
 )
diff --git a/cc/algorithms/util.cc b/cc/algorithms/util.cc
index 6656af5..fb6dfe9 100644
--- a/cc/algorithms/util.cc
+++ b/cc/algorithms/util.cc
@@ -298,7 +298,7 @@
 absl::Status ValidateMaxPartitionsContributed(
     absl::optional<double> max_partitions_contributed) {
   return ValidateIsPositive(max_partitions_contributed,
-                            "Maximum number of partitoins that can be "
+                            "Maximum number of partitions that can be "
                             "contributed to (i.e., L0 sensitivity)");
 }
 
@@ -308,6 +308,12 @@
                             "Maximum number of contributions per partition");
 }
 
+absl::Status ValidateMaxContributions(absl::optional<int> max_contributions) {
+  return ValidateIsPositive(
+      max_contributions,
+      "Maximum number of contributions (i.e., L1 sensitivity)");
+}
+
 absl::Status ValidateTreeHeight(absl::optional<int> tree_height) {
   return ValidateIsGreaterThanOrEqualTo(tree_height, /*lower_bound=*/1,
                                         "Tree Height");
diff --git a/cc/algorithms/util.h b/cc/algorithms/util.h
index 5705578..9aeb72c 100644
--- a/cc/algorithms/util.h
+++ b/cc/algorithms/util.h
@@ -523,6 +523,7 @@
     absl::optional<double> max_partitions_contributed);
 absl::Status ValidateMaxContributionsPerPartition(
     absl::optional<double> max_contributions_per_partition);
+absl::Status ValidateMaxContributions(absl::optional<int> max_contributions);
 
 // Validates common tree parameters.
 absl::Status ValidateTreeHeight(absl::optional<int> tree_height);
diff --git a/cc/algorithms/util_test.cc b/cc/algorithms/util_test.cc
index 4642317..8802f64 100644
--- a/cc/algorithms/util_test.cc
+++ b/cc/algorithms/util_test.cc
@@ -1116,6 +1116,17 @@
               StatusIs(absl::StatusCode::kOk));
 }
 
+TEST(ValidateTest, ValidateMaxContributions) {
+  EXPECT_THAT(
+      ValidateMaxContributions(-1),
+      StatusIs(absl::StatusCode::kInvalidArgument, HasSubstr("positive")));
+  EXPECT_THAT(
+      ValidateMaxContributions(0),
+      StatusIs(absl::StatusCode::kInvalidArgument, HasSubstr("positive")));
+  EXPECT_THAT(ValidateMaxPartitionsContributed(10),
+              StatusIs(absl::StatusCode::kOk));
+}
+
 TEST(ValidateTest, ValidateMaxContributionsPerPartitionFailsForNonPositive) {
   EXPECT_THAT(
       ValidateMaxContributionsPerPartition(-1),
diff --git a/examples/zetasql/README.md b/examples/zetasql/README.md
index 65f95c5..1ae626d 100644
--- a/examples/zetasql/README.md
+++ b/examples/zetasql/README.md
@@ -62,7 +62,7 @@
 * The last argument should be the SQL query to execute on the ```data_set```.
 This query must specify values for the DP parameters ```epsilon```, ```delta```,
 and ```kappa```<sup>[1](#params)</sup> (see the
-[ZetaSQL documentation](https://github.com/google/zetasql/blob/master/docs/anonymization_syntax.md#kappa)
+[ZetaSQL documentation](https://github.com/google/zetasql/blob/master/docs/differential-privacy.md#kappa)
 for more information). In queries that contain a GROUP BY clause, ```kappa```
 is the maximum number of different groups (i.e., partitions) to which each user
 may contribute data. See the [codelab](codelab.md) for additional information
diff --git a/go/go.mod b/go/go.mod
index 6d22fe8..1e7ef47 100644
--- a/go/go.mod
+++ b/go/go.mod
@@ -4,12 +4,9 @@
 
 require (
 	github.com/golang/glog v1.0.0
-	github.com/google/go-cmp v0.5.6
+	github.com/google/go-cmp v0.5.9
 	github.com/grd/stat v0.0.0-20130623202159-138af3fd5012
-	gonum.org/v1/gonum v0.8.2
+	gonum.org/v1/gonum v0.12.0
 )
 
-require (
-	golang.org/x/exp v0.0.0-20190125153040-c74c464bbbf2 // indirect
-	golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 // indirect
-)
+require golang.org/x/exp v0.0.0-20191002040644-a1355ae1e2c3 // indirect
diff --git a/go/go_differential_privacy_deps.bzl b/go/go_differential_privacy_deps.bzl
index 7482fbe..3c853bf 100644
--- a/go/go_differential_privacy_deps.bzl
+++ b/go/go_differential_privacy_deps.bzl
@@ -27,8 +27,14 @@
     go_repository(
         name = "com_github_ajstarks_svgo",
         importpath = "github.com/ajstarks/svgo",
-        sum = "h1:wVe6/Ea46ZMeNkQjjBW6xcqyQA/j5e0D6GytH95g0gQ=",
-        version = "v0.0.0-20180226025133-644b8db467af",
+        sum = "h1:slYM766cy2nI3BwyRiyQj/Ud48djTMtMebDqepE95rw=",
+        version = "v0.0.0-20211024235047-1546f124cd8b",
+    )
+    go_repository(
+        name = "com_github_burntsushi_xgb",
+        importpath = "github.com/BurntSushi/xgb",
+        sum = "h1:1BDTz0u9nC3//pOCMdNH+CiXJVYJh5UQNCOBG7jbELc=",
+        version = "v0.0.0-20160522181843-27f122750802",
     )
 
     go_repository(
@@ -37,6 +43,30 @@
         sum = "h1:WXb3TSNmHp2vHoCroCIB1foO/yQ36swABL8aOVeDpgg=",
         version = "v1.2.1-0.20190220221249-0403632d5b90",
     )
+    go_repository(
+        name = "com_github_go_fonts_liberation",
+        importpath = "github.com/go-fonts/liberation",
+        sum = "h1:jAkAWJP4S+OsrPLZM4/eC9iW7CtHy+HBXrEwZXWo5VM=",
+        version = "v0.2.0",
+    )
+    go_repository(
+        name = "com_github_go_gl_glfw",
+        importpath = "github.com/go-gl/glfw",
+        sum = "h1:QbL/5oDUmRBzO9/Z7Seo6zf912W/a6Sr4Eu0G/3Jho0=",
+        version = "v0.0.0-20190409004039-e6da0acd62b1",
+    )
+    go_repository(
+        name = "com_github_go_latex_latex",
+        importpath = "github.com/go-latex/latex",
+        sum = "h1:6zl3BbBhdnMkpSj2YY30qV3gDcVBGtFgVsV3+/i+mKQ=",
+        version = "v0.0.0-20210823091927-c0d11ff05a81",
+    )
+    go_repository(
+        name = "com_github_go_pdf_fpdf",
+        importpath = "github.com/go-pdf/fpdf",
+        sum = "h1:MlgtGIfsdMEEQJr2le6b/HNr1ZlQwxyWr77r2aj2U/8=",
+        version = "v0.6.0",
+    )
 
     go_repository(
         name = "com_github_golang_freetype",
@@ -55,8 +85,8 @@
     go_repository(
         name = "com_github_google_go_cmp",
         importpath = "github.com/google/go-cmp",
-        sum = "h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ=",
-        version = "v0.5.6",
+        sum = "h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=",
+        version = "v0.5.9",
     )
 
     go_repository(
@@ -72,6 +102,18 @@
         sum = "h1:PJr+ZMXIecYc1Ey2zucXdR73SMBtgjPgwa31099IMv0=",
         version = "v1.0.3-0.20190309125859-24315acbbda5",
     )
+    go_repository(
+        name = "com_shuralyov_dmitri_gpu_mtl",
+        importpath = "dmitri.shuralyov.com/gpu/mtl",
+        sum = "h1:VpgP7xuJadIUuKccphEpTJnWhS2jkQyMt6Y7pJCD7fY=",
+        version = "v0.0.0-20190408044501-666a987793e9",
+    )
+    go_repository(
+        name = "ht_sr_git_sbinet_gg",
+        importpath = "git.sr.ht/~sbinet/gg",
+        sum = "h1:LNhjNn8DerC8f9DHLz6lS0YYul/b602DUxDgGkd/Aik=",
+        version = "v0.3.1",
+    )
 
     go_repository(
         name = "io_rsc_pdf",
@@ -79,40 +121,82 @@
         sum = "h1:k1MczvYDUvJBe93bYd7wrZLLUEcLZAuF824/I4e5Xr4=",
         version = "v0.1.1",
     )
+    go_repository(
+        name = "org_golang_x_crypto",
+        importpath = "golang.org/x/crypto",
+        sum = "h1:iMGN4xG0cnqj3t+zOM8wUB0BiPKHEwSxEZCvzcbZuvk=",
+        version = "v0.0.0-20190510104115-cbcb75029529",
+    )
 
     go_repository(
         name = "org_golang_x_exp",
         importpath = "golang.org/x/exp",
-        sum = "h1:y102fOLFqhV41b+4GPiJoa0k/x+pJcEi2/HB1Y5T6fU=",
-        version = "v0.0.0-20190125153040-c74c464bbbf2",
+        sum = "h1:n9HxLrNxWWtEb1cA950nuEEj3QnKbtsCJ6KjcgisNUs=",
+        version = "v0.0.0-20191002040644-a1355ae1e2c3",
     )
 
     go_repository(
         name = "org_golang_x_image",
         importpath = "golang.org/x/image",
-        sum = "h1:00VmoueYNlNz/aHIilyyQz/MHSqGoWJzpFv/HW8xpzI=",
-        version = "v0.0.0-20180708004352-c73c2afc3b81",
+        sum = "h1:TcHcE0vrmgzNH1v3ppjcMGbhG5+9fMuvOmUYwNEF4q4=",
+        version = "v0.0.0-20220302094943-723b81ca9867",
+    )
+    go_repository(
+        name = "org_golang_x_mobile",
+        importpath = "golang.org/x/mobile",
+        sum = "h1:4+4C/Iv2U4fMZBiMCc98MG1In4gJY5YRhtpDNeDeHWs=",
+        version = "v0.0.0-20190719004257-d2bd2a29d028",
+    )
+    go_repository(
+        name = "org_golang_x_mod",
+        importpath = "golang.org/x/mod",
+        sum = "h1:sfUMP1Gu8qASkorDVjnMuvgJzwFbTZSeXFiGBYAVdl4=",
+        version = "v0.1.0",
+    )
+    go_repository(
+        name = "org_golang_x_net",
+        importpath = "golang.org/x/net",
+        sum = "h1:R/3boaszxrf1GEUWTVDzSKVwLmSJpwZ1yqXm8j0v2QI=",
+        version = "v0.0.0-20190620200207-3b0461eec859",
+    )
+    go_repository(
+        name = "org_golang_x_sync",
+        importpath = "golang.org/x/sync",
+        sum = "h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU=",
+        version = "v0.0.0-20190423024810-112230192c58",
+    )
+    go_repository(
+        name = "org_golang_x_sys",
+        importpath = "golang.org/x/sys",
+        sum = "h1:+R4KGOnez64A81RvjARKc4UT5/tI9ujCIVX+P5KiHuI=",
+        version = "v0.0.0-20190412213103-97732733099d",
+    )
+    go_repository(
+        name = "org_golang_x_text",
+        importpath = "golang.org/x/text",
+        sum = "h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=",
+        version = "v0.3.7",
     )
 
     go_repository(
         name = "org_golang_x_tools",
         importpath = "golang.org/x/tools",
-        sum = "h1:Io7mpb+aUAGF0MKxbyQ7HQl1VgB+cL6ZJZUFaFNqVV4=",
-        version = "v0.0.0-20190206041539-40960b6deb8e",
+        sum = "h1:j9KsMiaP1c3B0OTQGth0/k+miLGTgLsAFUCrF2vLcF8=",
+        version = "v0.1.9",
     )
 
     go_repository(
         name = "org_golang_x_xerrors",
         importpath = "golang.org/x/xerrors",
-        sum = "h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=",
-        version = "v0.0.0-20191204190536-9bdfabe68543",
+        sum = "h1:9zdDQZ7Thm29KFXgAX/+yaf3eVbP7djjWp/dXAppNCc=",
+        version = "v0.0.0-20190717185122-a985d3407aa7",
     )
 
     go_repository(
         name = "org_gonum_v1_gonum",
         importpath = "gonum.org/v1/gonum",
-        sum = "h1:CCXrcPKiGGotvnN6jfUsKk4rRqm7q09/YbKb5xCEvtM=",
-        version = "v0.8.2",
+        sum = "h1:xKuo6hzt+gMav00meVPUlXwSdoEJP46BR+wdxQEFK2o=",
+        version = "v0.12.0",
     )
 
     go_repository(
@@ -125,6 +209,6 @@
     go_repository(
         name = "org_gonum_v1_plot",
         importpath = "gonum.org/v1/plot",
-        sum = "h1:Qh4dB5D/WpoUUp3lSod7qgoyEHbDGPUWjIbnqdqqe1k=",
-        version = "v0.0.0-20190515093506-e2840ee46a6b",
+        sum = "h1:dnifSs43YJuNMDzB7v8wV64O4ABBHReuAVAoBxqBqS4=",
+        version = "v0.10.1",
     )
diff --git a/learning/clustering/BUILD.bazel b/learning/clustering/BUILD.bazel
index ae3db46..85d2192 100644
--- a/learning/clustering/BUILD.bazel
+++ b/learning/clustering/BUILD.bazel
@@ -216,6 +216,9 @@
         ":clustering_params",
         "@com_google_python_dp_accounting//dp_accounting:dp_event",
         "@com_google_python_dp_accounting//dp_accounting:dp_event_builder",
+        "@com_google_python_dp_accounting//dp_accounting:mechanism_calibration",
+        "@com_google_python_dp_accounting//dp_accounting/pld:accountant",
+        "@com_google_python_dp_accounting//dp_accounting/pld:common",
         "@com_google_python_dp_accounting//dp_accounting/pld:pld_privacy_accountant",
         requirement("absl-py"),
         requirement("numpy"),
@@ -224,6 +227,7 @@
 
 py_test(
     name = "privacy_calculator_test",
+    size = "large",
     srcs = ["privacy_calculator_test.py"],
     python_version = "PY3",
     srcs_version = "PY3",
@@ -232,6 +236,8 @@
         ":clustering_params",
         ":privacy_calculator",
         "@com_google_python_dp_accounting//dp_accounting:dp_event",
+        "@com_google_python_dp_accounting//dp_accounting:mechanism_calibration",
+        "@com_google_python_dp_accounting//dp_accounting/pld:accountant",
         requirement("absl-py"),
         requirement("numpy"),
     ],
diff --git a/learning/clustering/README.md b/learning/clustering/README.md
index 4096157..da507cd 100644
--- a/learning/clustering/README.md
+++ b/learning/clustering/README.md
@@ -52,6 +52,11 @@
 `clustering_params.PrivacyBudgetSplit`. Note that the private count is also used
 in generating the tree itself.
 
+> **Note:** An option to use
+[mechanism calibration](https://github.com/google/differential-privacy/blob/main/python/dp_accounting/mechanism_calibration.py) is currently
+under experimentation to replace the privacy budget split and can be controlled
+by passing in an `clustering_params.PrivacyCalculatorMultiplier`.
+
 ## Usage
 
 The entry point to our algorithm is `clustering.private_lsh_clustering()`.
@@ -166,6 +171,9 @@
 *   `epsilon_to_try`: list of `epsilon` values to use when experimenting with
     varying `epsilon`.
 
+*   `use_mechanism_calibration`: whether to use the experimental mechanism
+    calibration for noise parameters.
+
 ## Benchmark Comparisons
 
 These benchmarks compare the normalized k-means objective (average squared
diff --git a/learning/clustering/clustering_algorithm.py b/learning/clustering/clustering_algorithm.py
index 4803b2e..a0f2e8d 100644
--- a/learning/clustering/clustering_algorithm.py
+++ b/learning/clustering/clustering_algorithm.py
@@ -195,6 +195,8 @@
     privacy_budget_split: typing.Optional[
         clustering_params.PrivacyBudgetSplit] = None,
     tree_param: typing.Optional[clustering_params.TreeParam] = None,
+    multipliers: typing.Optional[
+        clustering_params.PrivacyCalculatorMultiplier] = None,
     short_description: str = "CoresetParam") -> ClusteringResult:
   """Clusters data into k clusters.
 
@@ -207,6 +209,9 @@
       the clustering algorithm for fine-tuning.
     tree_param: Optional tree parameters for generating the LSH net tree for
       fine-tuning.
+    multipliers: Optional multipliers to determine ratio between noise
+      parameters for different operations in the clustering algorithm for
+      fine-tuning. When set, privacy_budget_split is ignored.
     short_description: Optional description to identify this parameter
       configuration.
 
@@ -223,12 +228,16 @@
     max_depth = tree_param.max_depth
 
   # Initialize the parameters.
-  if privacy_budget_split is None:
-    privacy_budget_split = clustering_params.PrivacyBudgetSplit()
-  pcalc = privacy_calculator.PrivacyCalculator.from_budget_split(
-      privacy_param, privacy_budget_split, data.radius, max_depth)
+  if multipliers is not None:
+    pcalc = privacy_calculator.PrivacyCalculator.from_mechanism_calibration(
+        privacy_param, data.radius, max_depth, multipliers)
+  else:
+    if privacy_budget_split is None:
+      privacy_budget_split = clustering_params.PrivacyBudgetSplit()
+    pcalc = privacy_calculator.PrivacyCalculator.from_budget_split(
+        privacy_param, privacy_budget_split, data.radius, max_depth)
 
-  logging.info("Privacy calculator: %s", pcalc)
+  logging.debug("Privacy calculator: %s", pcalc)
   pcalc.validate_accounting(privacy_param, max_depth)
 
   private_count = None
diff --git a/learning/clustering/clustering_algorithm_test.py b/learning/clustering/clustering_algorithm_test.py
index d891183..241abb4 100644
--- a/learning/clustering/clustering_algorithm_test.py
+++ b/learning/clustering/clustering_algorithm_test.py
@@ -14,14 +14,14 @@
 """Tests for clustering_algorithm."""
 
 from absl.testing import absltest
-
+from absl.testing import parameterized
 import numpy as np
 
 from clustering import clustering_algorithm
 from clustering import clustering_params
 
 
-class ClusteringTest(absltest.TestCase):
+class ClusteringTest(parameterized.TestCase):
 
   def test_clustering_result_value_errors_unequal_dim(self):
     centers = np.array([[0, 0], [100, 100]])
@@ -79,7 +79,10 @@
     self.assertListEqual(list(clustering_result.labels), [0, 1, 0])
     self.assertAlmostEqual(clustering_result.loss, 37)
 
-  def test_clipped_data_used_for_clustering_and_not_result_calculation(self):
+  @parameterized.named_parameters(('privacy_budget_split', False),
+                                  ('mechanism_calibration', True))
+  def test_clipped_data_used_for_clustering_and_not_result_calculation(
+      self, use_mechanism_calibration):
     # Clipped datapoints (radius=1): [[0.3, 0.2], [0.6, 0.8], [0.6, 0.8]]
     datapoints = np.array([[0.3, 0.2], [3, 4], [6, 8]])
     # Very small radius means the datapoint will be clipped for the center
@@ -90,7 +93,12 @@
     # No branching, the coreset will just be the average of the points
     tree_param = clustering_params.TreeParam(1, 1, 0)
     clustering_result = clustering_algorithm.private_lsh_clustering(
-        3, data, privacy_param, tree_param=tree_param)
+        3,
+        data,
+        privacy_param,
+        tree_param=tree_param,
+        multipliers=clustering_params.PrivacyCalculatorMultiplier()
+        if use_mechanism_calibration else None)
 
     # Center should be calculated using the clipped data.
     expected_center = np.array([0.5, 0.6])
@@ -143,7 +151,7 @@
     self.assertAlmostEqual(clustering_metrics.false_match_frac, 1 / 9)
 
 
-class ClusteringEdgeCaseTest(absltest.TestCase):
+class ClusteringEdgeCaseTest(parameterized.TestCase):
   baseline_k: int
   baseline_privacy_param: clustering_params.DifferentialPrivacyParam
 
@@ -152,12 +160,18 @@
     self.baseline_k = 2
     self.baseline_privacy_param = clustering_params.DifferentialPrivacyParam()
 
-  def test_small_dataset(self):
+  @parameterized.named_parameters(('privacy_budget_split', False),
+                                  ('mechanism_calibration', True))
+  def test_small_dataset(self, use_mechanism_calibration):
     datapoints = np.array([[0.3, 0.2]])
     data = clustering_params.Data(datapoints=datapoints, radius=1)
     self.assertIsNotNone(
         clustering_algorithm.private_lsh_clustering(
-            self.baseline_k, data, self.baseline_privacy_param))
+            self.baseline_k,
+            data,
+            self.baseline_privacy_param,
+            multipliers=clustering_params.PrivacyCalculatorMultiplier()
+            if use_mechanism_calibration else None))
 
 
 if __name__ == '__main__':
diff --git a/learning/clustering/clustering_params.py b/learning/clustering/clustering_params.py
index 4100ea7..af7dc05 100644
--- a/learning/clustering/clustering_params.py
+++ b/learning/clustering/clustering_params.py
@@ -16,8 +16,8 @@
 import dataclasses
 import enum
 import typing
-from absl import logging
 
+from absl import logging
 import numpy as np
 
 
@@ -61,6 +61,44 @@
 
 
 @dataclasses.dataclass
+class PrivacyCalculatorMultiplier():
+  """Multipliers to be used by mechanism calibration."""
+  gaussian_std_dev_multiplier: float = 1.0
+  laplace_param_multiplier: float = 20.0
+
+  def get_gaussian_std_dev(self, alpha: float, sensitivity: float) -> float:
+    """Returns gaussian standard deviation based on alpha.
+
+    Args:
+      alpha: parameter varied in mechanism calibration.
+      sensitivity: sensitivity of the dataset for the sum operations.
+    """
+    return self.gaussian_std_dev_multiplier * alpha * sensitivity
+
+  def get_alpha(self, gaussian_std_dev: float, sensitivity: float) -> float:
+    """Returns alpha based on gaussian standard deviation and sensitivity.
+
+    This must be the inverse of get_gaussian_std_dev with respect to alpha.
+
+    Args:
+      gaussian_std_dev: standard deviation to calculate alpha for.
+      sensitivity: sensitivity of the dataset for the sum operations.
+    """
+    return gaussian_std_dev / (sensitivity * self.gaussian_std_dev_multiplier)
+
+  def get_laplace_param(self, alpha: float) -> float:
+    """Returns laplace parameter based on alpha.
+
+    Args:
+      alpha: parameter varied in mechanism calibration.
+    """
+    inverse_laplace_param = self.laplace_param_multiplier * alpha
+    # Laplace param increases as noise decreases, so we scale it inversely with
+    # alpha.
+    return 1.0 / inverse_laplace_param
+
+
+@dataclasses.dataclass
 class TreeParam():
   """Thresholds for constructing a tree.
 
diff --git a/learning/clustering/clustering_params_test.py b/learning/clustering/clustering_params_test.py
index df54ab7..1737fbc 100644
--- a/learning/clustering/clustering_params_test.py
+++ b/learning/clustering/clustering_params_test.py
@@ -114,6 +114,16 @@
     self.assertSequenceAlmostEqual(
         clipped_datapoints[4], [4.46949207, 4.81329915, 5.15710623, 5.50091331])
 
+  def test_privacy_calculator_multiplier(self):
+    multiplier = clustering_params.PrivacyCalculatorMultiplier(
+        gaussian_std_dev_multiplier=4.2, laplace_param_multiplier=5.1)
+    alpha = 3.0
+    sensitivity = 1.4
+    std_dev = multiplier.get_gaussian_std_dev(alpha, sensitivity)
+    self.assertEqual(std_dev, 17.64)
+    self.assertEqual(multiplier.get_alpha(std_dev, sensitivity), alpha)
+    self.assertEqual(multiplier.get_laplace_param(alpha), 1.0 / 15.3)
+
 
 if __name__ == "__main__":
   absltest.main()
diff --git a/learning/clustering/demo/clustering_demo.py b/learning/clustering/demo/clustering_demo.py
index a7d9cdd..ca39f90 100644
--- a/learning/clustering/demo/clustering_demo.py
+++ b/learning/clustering/demo/clustering_demo.py
@@ -25,49 +25,55 @@
 
 FLAGS = flags.FLAGS
 
-flags.DEFINE_integer('num_points', 100000,
-                     'Number of points in synthetic dataset.')
-flags.DEFINE_integer('dim', 100, 'Dimension of points in synthetic dataset.')
-flags.DEFINE_integer('num_clusters', 64,
-                     'Number of clusters in synthetic dataset.')
-flags.DEFINE_float(
+_NUM_POINTS = flags.DEFINE_integer('num_points', 100000,
+                                   'Number of points in synthetic dataset.')
+_DIM = flags.DEFINE_integer('dim', 100,
+                            'Dimension of points in synthetic dataset.')
+_NUM_CLUSTERS = flags.DEFINE_integer(
+    'num_clusters', 64, 'Number of clusters in synthetic dataset.')
+_CLUSTER_RATIO = flags.DEFINE_float(
     'cluster_ratio', 8.0,
     'Parameter controlling the ratio of distances between points in different '
     'vs. same cluster.')
-flags.DEFINE_float(
+_RADIUS = flags.DEFINE_float(
     'radius', 1.0,
     'Radius of ball in which all points in synthetic dataset lie.')
 
-flags.DEFINE_float(
+_FIXED_EPS = flags.DEFINE_float(
     'fixed_epsilon', 1.0,
     'Value of epsilon to use when experimenting with varying k.')
-flags.DEFINE_list('k_to_try', '2, 4, 8, 16, 32, 64',
-                  'List of k values to use when experimenting with varying k.')
-flags.DEFINE_integer('fixed_k', 64,
-                     'Value of k when experimenting with varying epsilon.')
-flags.DEFINE_list(
+_K_TO_TRY = flags.DEFINE_list(
+    'k_to_try', '2, 4, 8, 16, 32, 64',
+    'List of k values to use when experimenting with varying k.')
+_FIXED_K = flags.DEFINE_integer(
+    'fixed_k', 64, 'Value of k when experimenting with varying epsilon.')
+_EPS_TO_TRY = flags.DEFINE_list(
     'epsilon_to_try', '0.1, 0.5, 1.0, 5.0, 10.0, 50.0, 100.0, inf',
     'List of epsilon values to use when experimenting with varying epsilon.')
 
+_USE_MECHANISM_CALIBRATION = flags.DEFINE_bool(
+    'use_mechanism_calibration', False,
+    'Runs demo with mechanism calibration instead of budget split')
+
 
 def main(argv: Sequence[str]) -> None:
   if len(argv) > 1:
     raise app.UsageError('Too many command-line arguments.')
 
   data: clustering_params.Data = data_generation.generate_synthetic_dataset(
-      FLAGS.num_points, FLAGS.dim, FLAGS.num_clusters, FLAGS.cluster_ratio,
-      FLAGS.radius)
+      _NUM_POINTS.value, _DIM.value, _NUM_CLUSTERS.value, _CLUSTER_RATIO.value,
+      _RADIUS.value)
   print('==== Synthentic Dataset Information ====\n'
-        f'Number of datapoints: {FLAGS.num_points}\n'
-        f'Dimensions: {FLAGS.dim}\n'
-        f'Number of clusters: {FLAGS.num_clusters}\n'
-        f'Radius: {FLAGS.radius}\n'
+        f'Number of datapoints: {_NUM_POINTS.value}\n'
+        f'Dimensions: {_DIM.value}\n'
+        f'Number of clusters: {_NUM_CLUSTERS.value}\n'
+        f'Radius: {_RADIUS.value}\n'
         f'Cluster centers drawn from: Uniform over ball of '
-        f'radius={FLAGS.radius * (1 - 1 / float(FLAGS.cluster_ratio)):.4}\n'
+        f'radius={_RADIUS.value * (1 - 1 / float(_CLUSTER_RATIO.value)):.4}\n'
         f'Each cluster drawn from: '
         f'N(cluster_center, '
-        f'{FLAGS.radius / FLAGS.cluster_ratio / np.sqrt(FLAGS.dim):.4} * I) '
-        f'clipped to ball of radius {FLAGS.radius}')
+        f'{_RADIUS.value / _CLUSTER_RATIO.value / np.sqrt(_DIM.value):.4} * I) '
+        f'clipped to ball of radius {_RADIUS.value}')
 
   eval_head = ('|  k | epsilon | clustering loss |    dominant label accuracy '
                '   | false match fraction | true non-match fraction |')
@@ -76,30 +82,36 @@
     privacy_param = clustering_params.DifferentialPrivacyParam(
         epsilon=eps, delta=1e-6)
     clustering_result: clustering_algorithm.ClusteringResult = (
-        clustering_algorithm.private_lsh_clustering(k, data, privacy_param))
+        clustering_algorithm.private_lsh_clustering(
+            k,
+            data,
+            privacy_param,
+            multipliers=clustering_params.PrivacyCalculatorMultiplier()
+            if _USE_MECHANISM_CALIBRATION.value else None))
     clustering_metrics: clustering_algorithm.ClusteringMetrics = (
         clustering_result.get_clustering_metrics())
     correct_pred = clustering_metrics.dominant_label_correct_count
     accuracy = clustering_metrics.dominant_label_accuracy
     false_match_frac = clustering_metrics.false_match_frac
     true_nonmatch_frac = clustering_metrics.true_nonmatch_frac
-    print(f'| {k:>2} | {eps:>7} '
-          f'| {clustering_result.loss:>15.8} '
-          f'| {accuracy:>6.2} ({correct_pred:>6} out of {FLAGS.num_points:>6}) '
-          f'| {false_match_frac:>20.4} '
-          f'| {true_nonmatch_frac:>23.4} |')
+    print(
+        f'| {k:>2} | {eps:>7} '
+        f'| {clustering_result.loss:>15.8} '
+        f'| {accuracy:>6.2} ({correct_pred:>6} out of {_NUM_POINTS.value:>6}) '
+        f'| {false_match_frac:>20.4} '
+        f'| {true_nonmatch_frac:>23.4} |')
 
-  print(f'\n# Evaluation with epsilon = {FLAGS.fixed_epsilon} and '
-        f'varying k in {list(map(int, FLAGS.k_to_try))}')
+  print(f'\n# Evaluation with epsilon = {_FIXED_EPS.value} and '
+        f'varying k in {list(map(int, _K_TO_TRY.value))}')
   print(eval_head)
-  for k in list(map(int, FLAGS.k_to_try)):
-    run_clustering(k, FLAGS.fixed_epsilon)
+  for k in list(map(int, _K_TO_TRY.value)):
+    run_clustering(k, _FIXED_EPS.value)
 
-  print(f'\n# Evaluation with k = {FLAGS.fixed_k} and '
-        f'varying epsilon in {list(map(float, FLAGS.epsilon_to_try))}')
+  print(f'\n# Evaluation with k = {_FIXED_K.value} and '
+        f'varying epsilon in {list(map(float, _EPS_TO_TRY.value))}')
   print(eval_head)
-  for epsilon in list(map(float, FLAGS.epsilon_to_try)):
-    run_clustering(FLAGS.fixed_k, epsilon)
+  for epsilon in list(map(float, _EPS_TO_TRY.value)):
+    run_clustering(_FIXED_K.value, epsilon)
 
   print('Note: all computations apart from cluster centers, such as loss, '
         'label accuracy, etc. above are not differentially private.')
diff --git a/learning/clustering/privacy_calculator.py b/learning/clustering/privacy_calculator.py
index d535afa..2682930 100644
--- a/learning/clustering/privacy_calculator.py
+++ b/learning/clustering/privacy_calculator.py
@@ -14,6 +14,7 @@
 """Calculates average and count privacy params."""
 
 import dataclasses
+import functools
 from typing import Type
 
 from absl import logging
@@ -23,6 +24,9 @@
 from clustering import clustering_params
 from dp_accounting import dp_event
 from dp_accounting import dp_event_builder
+from dp_accounting import mechanism_calibration
+from dp_accounting.pld import accountant
+from dp_accounting.pld import common
 from dp_accounting.pld import pld_privacy_accountant
 
 
@@ -47,6 +51,53 @@
   return builder.build()
 
 
+def make_clustering_event_from_param(
+    multipliers: clustering_params.PrivacyCalculatorMultiplier,
+    sensitivity: float, max_depth: int, alpha: float) -> dp_event.DpEvent:
+  """Returns a DpEvent for clustering with the parameter alpha.
+
+  Args:
+    multipliers: multipliers to calculate the noise parameters given alpha.
+    sensitivity: sensitivity of the dataset for the sum operations.
+    max_depth: max depth of the prefix tree for generating the coreset.
+    alpha: parameter varied in mechanism calibration.
+  """
+  logging.debug('Mechanism Calibration: Testing param alpha = %s', alpha)
+  return make_clustering_event(
+      sum_std_dev=multipliers.get_gaussian_std_dev(alpha, sensitivity),
+      count_laplace_param=multipliers.get_laplace_param(alpha),
+      sensitivity=sensitivity,
+      max_depth=max_depth)
+
+
+def get_alpha_interval(
+    privacy_param: clustering_params.DifferentialPrivacyParam, radius: float,
+    multipliers: clustering_params.PrivacyCalculatorMultiplier
+) -> mechanism_calibration.BracketInterval:
+  """Returns an interval for alpha used in mechanism calibration.
+
+  Args:
+    privacy_param: privacy parameters, epsilon must not be infinite, and delta
+      must be less than 1.
+    radius: radius of the dataset.
+    multipliers: multipliers for noise parameters.
+  """
+  if privacy_param.epsilon == np.inf or privacy_param.delta >= 1:
+    raise ValueError(
+        'get_alpha_interval should not be called for nonprivate parameters.')
+
+  # To pick a lower bound, check what the gaussian std dev would be if we
+  # used the entire privacy budget on the gaussian operation.
+  all_eps_std_dev = accountant.get_smallest_gaussian_noise(
+      privacy_parameters=common.DifferentialPrivacyParameters(
+          privacy_param.epsilon, privacy_param.delta),
+      num_queries=1,
+      sensitivity=radius)
+  lower_bound_alpha = multipliers.get_alpha(all_eps_std_dev, radius)
+  return mechanism_calibration.LowerEndpointAndGuess(lower_bound_alpha,
+                                                     2 * lower_bound_alpha)
+
+
 @dataclasses.dataclass
 class PrivacyCalculator():
   """Calculates and returns privacy parameters."""
@@ -87,11 +138,43 @@
     calculated_epsilon = acct.get_epsilon(privacy_param.delta)
     calculated_delta = acct.get_delta(privacy_param.epsilon)
 
-    logging.info('Accounted epsilon: %s', calculated_epsilon)
-    logging.info('Accounted delta: %s', calculated_delta)
+    logging.debug('Accounted epsilon: %s', calculated_epsilon)
+    logging.debug('Accounted delta: %s', calculated_delta)
 
     if (calculated_epsilon > privacy_param.epsilon or
         calculated_delta > privacy_param.delta):
       raise ValueError('Accounted privacy params greater than allowed: '
                        f'({calculated_epsilon}, {calculated_delta}) > '
                        f'({privacy_param.epsilon}, {privacy_param.delta})')
+
+  @classmethod
+  def from_mechanism_calibration(
+      cls: Type['PrivacyCalculator'],
+      privacy_param: clustering_params.DifferentialPrivacyParam, radius: float,
+      max_depth: int, multipliers: clustering_params.PrivacyCalculatorMultiplier
+  ) -> 'PrivacyCalculator':
+    """Uses mechanism calibration to calculate noise parameters."""
+    if privacy_param.privacy_model != clustering_params.PrivacyModel.CENTRAL:
+      raise NotImplementedError(
+          f'Currently unsupported privacy model: {privacy_param.privacy_model}')
+
+    if privacy_param.epsilon == np.inf or privacy_param.delta >= 1:
+      # No noise.
+      return cls(
+          central_privacy_utils.AveragePrivacyParam(0, radius),
+          central_privacy_utils.CountPrivacyParam(np.inf))
+
+    interval = get_alpha_interval(privacy_param, radius, multipliers)
+    alpha = mechanism_calibration.calibrate_dp_mechanism(
+        pld_privacy_accountant.PLDAccountant,
+        make_event_from_param=functools.partial(
+            make_clustering_event_from_param, multipliers, radius, max_depth),
+        target_epsilon=privacy_param.epsilon,
+        target_delta=privacy_param.delta,
+        bracket_interval=interval)
+
+    return cls(
+        central_privacy_utils.AveragePrivacyParam(
+            multipliers.get_gaussian_std_dev(alpha, radius), radius),
+        central_privacy_utils.CountPrivacyParam(
+            multipliers.get_laplace_param(alpha)))
diff --git a/learning/clustering/privacy_calculator_test.py b/learning/clustering/privacy_calculator_test.py
index 402087c..773a174 100644
--- a/learning/clustering/privacy_calculator_test.py
+++ b/learning/clustering/privacy_calculator_test.py
@@ -13,6 +13,8 @@
 # limitations under the License.
 """Tests for privacy_calculator."""
 
+from unittest import mock
+
 from absl.testing import absltest
 from absl.testing import parameterized
 import numpy as np
@@ -21,6 +23,9 @@
 from clustering import clustering_params
 from clustering import privacy_calculator
 from dp_accounting import dp_event
+from dp_accounting import mechanism_calibration
+from dp_accounting.pld import accountant
+from dp_accounting.pld import common
 
 
 class PrivacyCalculatorTest(parameterized.TestCase):
@@ -82,8 +87,74 @@
         dp_event.ComposedDpEvent([gaussian_event,
                                   dp_event.NonPrivateDpEvent()]))
 
-  @parameterized.named_parameters(
-      ("basic", 10, 1e-2), ("inf_eps", np.inf, 1e-2), ("one_delta", 10, 1))
+  @mock.patch.object(
+      privacy_calculator,
+      "make_clustering_event",
+      return_value=dp_event.ComposedDpEvent([
+          dp_event.GaussianDpEvent(2.4),
+          dp_event.SelfComposedDpEvent(dp_event.LaplaceDpEvent(0.8), 26)
+      ]),
+      autospec=True)
+  def test_make_clustering_event_from_param(self, mock_make_clustering_event):
+    multipliers = clustering_params.PrivacyCalculatorMultiplier(1.2, 0.2)
+    sensitivity = 2
+    max_depth = 25
+    alpha = 4.0
+    clustering_event = privacy_calculator.make_clustering_event_from_param(
+        multipliers, sensitivity, max_depth, alpha)
+    mock_args = mock_make_clustering_event.call_args[1]
+    self.assertAlmostEqual(mock_args["sum_std_dev"], 9.6)
+    self.assertAlmostEqual(mock_args["count_laplace_param"], 1.25)
+    self.assertAlmostEqual(mock_args["sensitivity"], 2)
+    self.assertAlmostEqual(mock_args["max_depth"], 25)
+    self.assertEqual(
+        clustering_event,
+        dp_event.ComposedDpEvent([
+            dp_event.GaussianDpEvent(2.4),
+            dp_event.SelfComposedDpEvent(dp_event.LaplaceDpEvent(0.8), 26)
+        ]))
+
+  @mock.patch.object(
+      accountant,
+      "get_smallest_gaussian_noise",
+      return_value=8.4,
+      autospec=True)
+  def test_get_alpha_interval(self, mock_smallest_gaussian_noise):
+    privacy_param = clustering_params.DifferentialPrivacyParam(
+        epsilon=2.0, delta=1e-6)
+    radius = 3.2
+    multipliers = clustering_params.PrivacyCalculatorMultiplier(3.5, 2.1)
+    interval = privacy_calculator.get_alpha_interval(privacy_param, radius,
+                                                     multipliers)
+
+    # Check arguments.
+    mock_args = mock_smallest_gaussian_noise.call_args[1]
+    self.assertAlmostEqual(mock_args["privacy_parameters"],
+                           common.DifferentialPrivacyParameters(2.0, 1e-6))
+    self.assertAlmostEqual(mock_args["num_queries"], 1)
+    self.assertAlmostEqual(mock_args["sensitivity"], radius)
+
+    self.assertEqual(interval,
+                     mechanism_calibration.LowerEndpointAndGuess(0.75, 1.5))
+
+  def test_get_alpha_interval_error(self):
+    radius = 3.2
+    multipliers = clustering_params.PrivacyCalculatorMultiplier(3.5, 2.1)
+    # Infinite epsilon.
+    with self.assertRaises(ValueError):
+      _ = privacy_calculator.get_alpha_interval(
+          clustering_params.DifferentialPrivacyParam(
+              epsilon=np.inf, delta=1e-6), radius, multipliers)
+
+    # Delta = 1.
+    with self.assertRaises(ValueError):
+      _ = privacy_calculator.get_alpha_interval(
+          clustering_params.DifferentialPrivacyParam(epsilon=1.0, delta=1),
+          radius, multipliers)
+
+  @parameterized.named_parameters(("basic", 10, 1e-2),
+                                  ("inf_eps", np.inf, 1e-2),
+                                  ("one_delta", 10, 1))
   def test_validate_accounting(self, epsilon, delta):
     privacy_param = clustering_params.DifferentialPrivacyParam(
         epsilon=epsilon, delta=delta)
@@ -111,6 +182,47 @@
         r".* > \(10, 0\.01\)"):
       pcalc.validate_accounting(privacy_param, depth)
 
+  @parameterized.named_parameters(("basic", 10, 1e-2),
+                                  ("inf_eps", np.inf, 1e-2),
+                                  ("one_delta", 10, 1))
+  def test_from_mechanism_calibration(self, epsilon, delta):
+    privacy_param = clustering_params.DifferentialPrivacyParam(
+        epsilon=epsilon, delta=delta)
+    radius = 3.2
+    max_depth = 12
+    multipliers = clustering_params.PrivacyCalculatorMultiplier(3.5, 2.1)
+    pcalc = privacy_calculator.PrivacyCalculator.from_mechanism_calibration(
+        privacy_param, radius, max_depth, multipliers)
+    # Result should be within the privacy budget.
+    pcalc.validate_accounting(privacy_param, max_depth)
+    self.assertEqual(pcalc.average_privacy_param.sensitivity, radius)
+
+  @parameterized.named_parameters(("basic", 10, 1e-2),
+                                  ("inf_eps", np.inf, 1e-2),
+                                  ("one_delta", 10, 1))
+  def test_from_mechanism_calibration_scaled_multipliers(self, epsilon, delta):
+    privacy_param = clustering_params.DifferentialPrivacyParam(
+        epsilon=epsilon, delta=delta)
+    radius = 3.2
+    max_depth = 12
+    multipliers = clustering_params.PrivacyCalculatorMultiplier(3.5, 2.1)
+    pcalc1 = privacy_calculator.PrivacyCalculator.from_mechanism_calibration(
+        privacy_param, radius, max_depth, multipliers)
+
+    # When we scale the multipliers by a constant, the result should be the
+    # same.
+    two_x_multipliers = clustering_params.PrivacyCalculatorMultiplier(7.0, 4.2)
+    pcalc2 = privacy_calculator.PrivacyCalculator.from_mechanism_calibration(
+        privacy_param, radius, max_depth, two_x_multipliers)
+    self.assertAlmostEqual(
+        pcalc1.average_privacy_param.gaussian_standard_deviation,
+        pcalc2.average_privacy_param.gaussian_standard_deviation,
+        delta=1e-4)
+    self.assertAlmostEqual(
+        pcalc1.count_privacy_param.laplace_param,
+        pcalc2.count_privacy_param.laplace_param,
+        delta=1e-4)
+
 
 if __name__ == "__main__":
   absltest.main()
diff --git a/privacy-on-beam/go.mod b/privacy-on-beam/go.mod
index a60da75..5a98f2a 100644
--- a/privacy-on-beam/go.mod
+++ b/privacy-on-beam/go.mod
@@ -3,30 +3,29 @@
 go 1.18
 
 require (
-	github.com/apache/beam/sdks/v2 v2.36.0
+	github.com/apache/beam/sdks/v2 v2.41.0
 	github.com/golang/glog v1.0.0
 	github.com/google/differential-privacy/go/v2 v2.0.0-pre
-	github.com/google/go-cmp v0.5.6
-	gonum.org/v1/plot v0.10.0
-	google.golang.org/protobuf v1.27.1
+	github.com/google/go-cmp v0.5.9
+	gonum.org/v1/plot v0.12.0
+	google.golang.org/protobuf v1.28.1
 )
 
 require (
-	github.com/ajstarks/svgo v0.0.0-20210923152817-c3b6e2f0c527 // indirect
-	github.com/fogleman/gg v1.3.0 // indirect
+	git.sr.ht/~sbinet/gg v0.3.1 // indirect
+	github.com/ajstarks/svgo v0.0.0-20211024235047-1546f124cd8b // indirect
 	github.com/go-fonts/liberation v0.2.0 // indirect
 	github.com/go-latex/latex v0.0.0-20210823091927-c0d11ff05a81 // indirect
-	github.com/go-pdf/fpdf v0.5.0 // indirect
+	github.com/go-pdf/fpdf v0.6.0 // indirect
 	github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 // indirect
 	github.com/golang/protobuf v1.5.2 // indirect
 	github.com/google/uuid v1.3.0 // indirect
-	golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6 // indirect
-	golang.org/x/image v0.0.0-20210628002857-a66eb6448b8d // indirect
-	golang.org/x/net v0.0.0-20210423184538-5f58ad60dda6 // indirect
-	golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c // indirect
-	golang.org/x/text v0.3.6 // indirect
-	golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
-	gonum.org/v1/gonum v0.9.3 // indirect
-	google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f // indirect
-	google.golang.org/grpc v1.40.0 // indirect
+	golang.org/x/exp v0.0.0-20220827204233-334a2380cb91 // indirect
+	golang.org/x/image v0.0.0-20220902085622-e7cb96979f69 // indirect
+	golang.org/x/net v0.0.0-20220624214902-1bab6f366d9e // indirect
+	golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f // indirect
+	golang.org/x/text v0.3.7 // indirect
+	gonum.org/v1/gonum v0.12.0 // indirect
+	google.golang.org/genproto v0.0.0-20220714211235-042d03aeabc9 // indirect
+	google.golang.org/grpc v1.48.0 // indirect
 )
diff --git a/privacy-on-beam/privacy_on_beam_deps.bzl b/privacy-on-beam/privacy_on_beam_deps.bzl
index 11fa22d..db23815 100644
--- a/privacy-on-beam/privacy_on_beam_deps.bzl
+++ b/privacy-on-beam/privacy_on_beam_deps.bzl
@@ -27,15 +27,27 @@
     go_repository(
         name = "co_honnef_go_tools",
         importpath = "honnef.co/go/tools",
-        sum = "h1:UoveltGrhghAA7ePc+e+QYDHXrBps2PqFZiHkGR/xK8=",
-        version = "v0.0.1-2020.1.4",
+        sum = "h1:qTakTkI6ni6LFD5sBwwsdSO+AQqbSIxOauHTTQKZ/7o=",
+        version = "v0.1.3",
+    )
+    go_repository(
+        name = "com_github_ajstarks_deck",
+        importpath = "github.com/ajstarks/deck",
+        sum = "h1:7kQgkwGRoLzC9K0oyXdJo7nve/bynv/KwUsxbiTlzAM=",
+        version = "v0.0.0-20200831202436-30c9fc6549a9",
+    )
+    go_repository(
+        name = "com_github_ajstarks_deck_generate",
+        importpath = "github.com/ajstarks/deck/generate",
+        sum = "h1:iXUgAaqDcIUGbRoy2TdeofRG/j1zpGRSEmNK05T+bi8=",
+        version = "v0.0.0-20210309230005-c3f852c02e19",
     )
 
     go_repository(
         name = "com_github_ajstarks_svgo",
         importpath = "github.com/ajstarks/svgo",
-        sum = "h1:NImof/JkF93OVWZY+PINgl6fPtQyF6f+hNUtZ0QZA1c=",
-        version = "v0.0.0-20210923152817-c3b6e2f0c527",
+        sum = "h1:slYM766cy2nI3BwyRiyQj/Ud48djTMtMebDqepE95rw=",
+        version = "v0.0.0-20211024235047-1546f124cd8b",
     )
     go_repository(
         name = "com_github_antihax_optional",
@@ -43,15 +55,28 @@
         sum = "h1:xK2lYat7ZLaVVcIuj82J8kIro4V6kDe0AUDFboUCwcg=",
         version = "v1.0.0",
     )
+    go_repository(
+        name = "com_github_apache_arrow_go_arrow",
+        importpath = "github.com/apache/arrow/go/arrow",
+        sum = "h1:byKBBF2CKWBjjA4J1ZL2JXttJULvWSl50LegTyRZ728=",
+        version = "v0.0.0-20200730104253-651201b0f516",
+    )
 
     go_repository(
         name = "com_github_apache_beam_sdks_v2",
         build_file_proto_mode = "disable_global",  # See https://github.com/bazelbuild/rules_go/issues/2186#issuecomment-523028281
         importpath = "github.com/apache/beam/sdks/v2",
-        sum = "h1:KyOndZIUUE0wKuXZoXf+WXXM2FiT77crvm+R6RBpf+M=",
-        version = "v2.36.0",
+        sum = "h1:h4Odc+2NJpNPs+Bunl6etq+T5lH/th2LG1rohaITZAM=",
+        version = "v2.41.0",
     )
     go_repository(
+        name = "com_github_apache_thrift",
+        importpath = "github.com/apache/thrift",
+        sum = "h1:hY4rAyg7Eqbb27GB6gkhUKrRAuc8xRjlNtJq+LseKeY=",
+        version = "v0.14.2",
+    )
+
+    go_repository(
         name = "com_github_armon_circbuf",
         importpath = "github.com/armon/circbuf",
         sum = "h1:QEF07wC0T1rKkctt1RINW/+RMTVmiwxETico2l3gxJA=",
@@ -70,6 +95,13 @@
         version = "v0.0.0-20180808171621-7fddfc383310",
     )
     go_repository(
+        name = "com_github_azure_go_ansiterm",
+        importpath = "github.com/Azure/go-ansiterm",
+        sum = "h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8=",
+        version = "v0.0.0-20210617225240-d185dfc1b5a1",
+    )
+
+    go_repository(
         name = "com_github_bgentry_speakeasy",
         importpath = "github.com/bgentry/speakeasy",
         sum = "h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY=",
@@ -101,6 +133,12 @@
         sum = "h1:1BDTz0u9nC3//pOCMdNH+CiXJVYJh5UQNCOBG7jbELc=",
         version = "v0.0.0-20160522181843-27f122750802",
     )
+    go_repository(
+        name = "com_github_cenkalti_backoff_v4",
+        importpath = "github.com/cenkalti/backoff/v4",
+        sum = "h1:6Yo7N8UP2K6LWZnW94DLVSSrbobcWdVzAYOisuDPIFo=",
+        version = "v4.1.2",
+    )
 
     go_repository(
         name = "com_github_census_instrumentation_opencensus_proto",
@@ -115,6 +153,13 @@
         version = "v1.1.0",
     )
     go_repository(
+        name = "com_github_cespare_xxhash_v2",
+        importpath = "github.com/cespare/xxhash/v2",
+        sum = "h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY=",
+        version = "v2.1.1",
+    )
+
+    go_repository(
         name = "com_github_chzyer_logex",
         importpath = "github.com/chzyer/logex",
         sum = "h1:Swpa1K6QvQznwJRcfTfQJmTE72DqScAa40E+fbHEXEE=",
@@ -142,16 +187,29 @@
     go_repository(
         name = "com_github_cncf_udpa_go",
         importpath = "github.com/cncf/udpa/go",
-        sum = "h1:cqQfy1jclcSy/FwLjemeg3SR1yaINm74aQyupQ0Bl8M=",
-        version = "v0.0.0-20201120205902-5459f2c99403",
+        sum = "h1:hzAQntlaYRkVSFEfj9OTWlVV1H155FMD8BTKktLv0QI=",
+        version = "v0.0.0-20210930031921-04548b0d99d4",
     )
     go_repository(
         name = "com_github_cncf_xds_go",
         importpath = "github.com/cncf/xds/go",
-        sum = "h1:OZmjad4L3H8ncOIR8rnb5MREYqG8ixi5+WbeUsquF0c=",
-        version = "v0.0.0-20210312221358-fbca930ec8ed",
+        sum = "h1:zH8ljVhhq7yC0MIeUL/IviMtY8hx2mK8cN9wEYb8ggw=",
+        version = "v0.0.0-20211011173535-cb28da3451f1",
     )
     go_repository(
+        name = "com_github_containerd_cgroups",
+        importpath = "github.com/containerd/cgroups",
+        sum = "h1:iJnMvco9XGvKUvNQkv88bE4uJXxRQH18efbKo9w5vHQ=",
+        version = "v1.0.1",
+    )
+    go_repository(
+        name = "com_github_containerd_containerd",
+        importpath = "github.com/containerd/containerd",
+        sum = "h1:rs6Xg1gtIxaeyG+Smsb/0xaSDu1VgFhOCKBXxMxbsF4=",
+        version = "v1.5.9",
+    )
+
+    go_repository(
         name = "com_github_coreos_go_semver",
         importpath = "github.com/coreos/go-semver",
         sum = "h1:wkHLiw0WNATZnSG7epLsujiMCgPAc9xhjJ4tgnAxmfM=",
@@ -179,15 +237,39 @@
     go_repository(
         name = "com_github_davecgh_go_spew",
         importpath = "github.com/davecgh/go-spew",
-        sum = "h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=",
-        version = "v1.1.1",
+        sum = "h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=",
+        version = "v1.1.0",
+    )
+    go_repository(
+        name = "com_github_docker_distribution",
+        importpath = "github.com/docker/distribution",
+        sum = "h1:a5mlkVzth6W5A4fOsS3D2EO5BUmsJpcB+cRlLU7cSug=",
+        version = "v2.7.1+incompatible",
+    )
+    go_repository(
+        name = "com_github_docker_docker",
+        importpath = "github.com/docker/docker",
+        sum = "h1:OqzI/g/W54LczvhnccGqniFoQghHx3pklbLuhfXpqGo=",
+        version = "v20.10.11+incompatible",
+    )
+    go_repository(
+        name = "com_github_docker_go_connections",
+        importpath = "github.com/docker/go-connections",
+        sum = "h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ=",
+        version = "v0.4.0",
+    )
+    go_repository(
+        name = "com_github_docker_go_units",
+        importpath = "github.com/docker/go-units",
+        sum = "h1:3uh0PgVws3nIA0Q+MwDC8yjEPf9zjRfZZWXZYDct3Tw=",
+        version = "v0.4.0",
     )
 
     go_repository(
         name = "com_github_envoyproxy_go_control_plane",
         importpath = "github.com/envoyproxy/go-control-plane",
-        sum = "h1:dulLQAYQFYtG5MTplgNGHWuV2D+OBD+Z8lmDBmbLg+s=",
-        version = "v0.9.9-0.20210512163311-63b5d3c536b0",
+        sum = "h1:xvqufLtNVwAhN8NMyWklVgxnWohi+wtMGQMhtxexlm0=",
+        version = "v0.10.2-0.20220325020618-49ff273808a1",
     )
 
     go_repository(
@@ -270,10 +352,17 @@
     go_repository(
         name = "com_github_go_pdf_fpdf",
         importpath = "github.com/go-pdf/fpdf",
-        sum = "h1:GHpcYsiDV2hdo77VTOuTF9k1sN8F8IY7NjnCo9x+NPY=",
-        version = "v0.5.0",
+        sum = "h1:MlgtGIfsdMEEQJr2le6b/HNr1ZlQwxyWr77r2aj2U/8=",
+        version = "v0.6.0",
     )
     go_repository(
+        name = "com_github_go_sql_driver_mysql",
+        importpath = "github.com/go-sql-driver/mysql",
+        sum = "h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE=",
+        version = "v1.6.0",
+    )
+
+    go_repository(
         name = "com_github_godbus_dbus_v5",
         importpath = "github.com/godbus/dbus/v5",
         sum = "h1:9349emZab16e7zQvpmsbtjc18ykshndd8y2PG3sgJbA=",
@@ -308,8 +397,8 @@
     go_repository(
         name = "com_github_golang_mock",
         importpath = "github.com/golang/mock",
-        sum = "h1:jlYHihg//f7RRwuPfptm04yp4s7O6Kw8EZiVYIGcH0g=",
-        version = "v1.5.0",
+        sum = "h1:G5FRp8JnTd7RQH5kemVNlMeyXQAztQ3mOWV95KxsXH8=",
+        version = "v1.1.1",
     )
 
     go_repository(
@@ -334,8 +423,8 @@
     go_repository(
         name = "com_github_google_go_cmp",
         importpath = "github.com/google/go-cmp",
-        sum = "h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ=",
-        version = "v0.5.6",
+        sum = "h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=",
+        version = "v0.5.9",
     )
     go_repository(
         name = "com_github_google_gofuzz",
@@ -375,10 +464,17 @@
         version = "v1.3.0",
     )
     go_repository(
+        name = "com_github_googleapis_enterprise_certificate_proxy",
+        importpath = "github.com/googleapis/enterprise-certificate-proxy",
+        sum = "h1:zO8WHNx/MYiAKJ3d5spxZXZE6KHmIQGQcAzwUzV7qQw=",
+        version = "v0.1.0",
+    )
+
+    go_repository(
         name = "com_github_googleapis_gax_go_v2",
         importpath = "github.com/googleapis/gax-go/v2",
-        sum = "h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM=",
-        version = "v2.0.5",
+        sum = "h1:dS9eYAjhrE2RjmzYw2XAPvcXfmcQLtFEQWn0CR82awk=",
+        version = "v2.4.0",
     )
     go_repository(
         name = "com_github_gopherjs_gopherjs",
@@ -557,6 +653,13 @@
         version = "v1.0.0",
     )
     go_repository(
+        name = "com_github_klauspost_compress",
+        importpath = "github.com/klauspost/compress",
+        sum = "h1:wXr2uRxZTJXHLly6qhJabee5JqIhTRoLBhDOA74hDEQ=",
+        version = "v1.13.1",
+    )
+
+    go_repository(
         name = "com_github_kr_fs",
         importpath = "github.com/kr/fs",
         sum = "h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8=",
@@ -581,6 +684,13 @@
         version = "v0.2.0",
     )
     go_repository(
+        name = "com_github_lib_pq",
+        importpath = "github.com/lib/pq",
+        sum = "h1:jbk+ZieJ0D7EVGJYpL9QTz7/YW6UHbmdnZWYyK5cdBs=",
+        version = "v1.10.6",
+    )
+
+    go_repository(
         name = "com_github_linkedin_goavro",
         importpath = "github.com/linkedin/goavro",
         sum = "h1:DV2aUlj2xZiuxQyvag8Dy7zjY69ENjS66bWkSfdpddY=",
@@ -605,6 +715,19 @@
         version = "v0.0.3",
     )
     go_repository(
+        name = "com_github_microsoft_go_winio",
+        importpath = "github.com/Microsoft/go-winio",
+        sum = "h1:iT12IBVClFevaf8PuVyi3UmZOVh4OqnaLxDTW2O6j3w=",
+        version = "v0.4.17",
+    )
+    go_repository(
+        name = "com_github_microsoft_hcsshim",
+        importpath = "github.com/Microsoft/hcsshim",
+        sum = "h1:47MSwtKGXet80aIn+7h4YI6fwPmwIghAnsx2aOUrG2M=",
+        version = "v0.8.23",
+    )
+
+    go_repository(
         name = "com_github_miekg_dns",
         importpath = "github.com/miekg/dns",
         sum = "h1:9jZdLNd/P4+SfEJ0TNyxYpsK8N4GtfylBLqtbYN1sbA=",
@@ -647,6 +770,25 @@
         version = "v1.4.1",
     )
     go_repository(
+        name = "com_github_moby_sys_mount",
+        importpath = "github.com/moby/sys/mount",
+        sum = "h1:WhCW5B355jtxndN5ovugJlMFJawbUODuW8fSnEH6SSM=",
+        version = "v0.2.0",
+    )
+    go_repository(
+        name = "com_github_moby_sys_mountinfo",
+        importpath = "github.com/moby/sys/mountinfo",
+        sum = "h1:2Ks8/r6lopsxWi9m58nlwjaeSzUX9iiL1vj5qB/9ObI=",
+        version = "v0.5.0",
+    )
+    go_repository(
+        name = "com_github_moby_term",
+        importpath = "github.com/moby/term",
+        sum = "h1:dcztxKSvZ4Id8iPpHERQBbIJfabdt4wUm5qy3wOL2Zc=",
+        version = "v0.0.0-20210619224110-3f7ff695adc6",
+    )
+
+    go_repository(
         name = "com_github_modern_go_concurrent",
         importpath = "github.com/modern-go/concurrent",
         sum = "h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=",
@@ -659,6 +801,13 @@
         version = "v1.0.1",
     )
     go_repository(
+        name = "com_github_morikuni_aec",
+        importpath = "github.com/morikuni/aec",
+        sum = "h1:nXxl5PrvVm2L/wCy8dQu6DMTwH4oIuGN8GJDAlqDdVE=",
+        version = "v0.0.0-20170113033406-39771216ff4c",
+    )
+
+    go_repository(
         name = "com_github_niemeyer_pretty",
         importpath = "github.com/niemeyer/pretty",
         sum = "h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=",
@@ -678,6 +827,25 @@
         version = "v1.2.2",
     )
     go_repository(
+        name = "com_github_opencontainers_go_digest",
+        importpath = "github.com/opencontainers/go-digest",
+        sum = "h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=",
+        version = "v1.0.0",
+    )
+    go_repository(
+        name = "com_github_opencontainers_image_spec",
+        importpath = "github.com/opencontainers/image-spec",
+        sum = "h1:9yCKha/T5XdGtO0q9Q9a6T5NUCsTn/DrBg0D7ufOcFM=",
+        version = "v1.0.2",
+    )
+    go_repository(
+        name = "com_github_opencontainers_runc",
+        importpath = "github.com/opencontainers/runc",
+        sum = "h1:opHZMaswlyxz1OuGpBE53Dwe4/xF7EZTY0A2L/FpCOg=",
+        version = "v1.0.2",
+    )
+
+    go_repository(
         name = "com_github_pascaldekloe_goe",
         importpath = "github.com/pascaldekloe/goe",
         sum = "h1:Lgl0gzECD8GnQ5QCWA8o6BtfL6mDH5rQgM4/fX3avOs=",
@@ -704,6 +872,13 @@
         version = "v1.0.13",
     )
     go_repository(
+        name = "com_github_pierrec_lz4_v4",
+        importpath = "github.com/pierrec/lz4/v4",
+        sum = "h1:ieHkV+i2BRzngO4Wd/3HGowuZStgq6QkPsD1eolNAO4=",
+        version = "v4.1.8",
+    )
+
+    go_repository(
         name = "com_github_pkg_errors",
         importpath = "github.com/pkg/errors",
         sum = "h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=",
@@ -736,6 +911,13 @@
         version = "v0.0.0-20190812154241-14fe0d1b01d4",
     )
     go_repository(
+        name = "com_github_proullon_ramsql",
+        importpath = "github.com/proullon/ramsql",
+        sum = "h1:mtMU7aT8cTAyNL3O4RyOfe/OOUxwCN525SIbKQoUvw0=",
+        version = "v0.0.0-20211120092837-c8d0a408b939",
+    )
+
+    go_repository(
         name = "com_github_rogpeppe_fastuuid",
         importpath = "github.com/rogpeppe/fastuuid",
         sum = "h1:Ppwyp6VYCF1nvBTXL3trRso7mXMlRrw9ooo375wvi2s=",
@@ -779,6 +961,13 @@
         version = "v1.0.0",
     )
     go_repository(
+        name = "com_github_sirupsen_logrus",
+        importpath = "github.com/sirupsen/logrus",
+        sum = "h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE=",
+        version = "v1.8.1",
+    )
+
+    go_repository(
         name = "com_github_smartystreets_assertions",
         importpath = "github.com/smartystreets/assertions",
         sum = "h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=",
@@ -812,8 +1001,8 @@
     go_repository(
         name = "com_github_spf13_cobra",
         importpath = "github.com/spf13/cobra",
-        sum = "h1:+KmjbUw1hriSNMF55oPrkZcb27aECyrj8V2ytv7kWDw=",
-        version = "v1.2.1",
+        sum = "h1:X+jTBEBqF0bHN+9cSMgmfuvv2VHJ9ezmFNf9Y/XstYU=",
+        version = "v1.5.0",
     )
     go_repository(
         name = "com_github_spf13_jwalterweatherman",
@@ -854,29 +1043,55 @@
         version = "v1.2.0",
     )
     go_repository(
+        name = "com_github_testcontainers_testcontainers_go",
+        importpath = "github.com/testcontainers/testcontainers-go",
+        sum = "h1:OUujSlEGsXVo/ykPVZk3KanBNGN0TYb/7oKIPVn15JA=",
+        version = "v0.13.0",
+    )
+    go_repository(
+        name = "com_github_xitongsys_parquet_go",
+        importpath = "github.com/xitongsys/parquet-go",
+        sum = "h1:MhCaXii4eqceKPu9BwrjLqyK10oX9WF+xGhwvwbw7xM=",
+        version = "v1.6.2",
+    )
+    go_repository(
+        name = "com_github_xitongsys_parquet_go_source",
+        importpath = "github.com/xitongsys/parquet-go-source",
+        sum = "h1:UDtocVeACpnwauljUbeHD9UOjjcvF5kLUHruww7VT9A=",
+        version = "v0.0.0-20220315005136-aec0fe3e777c",
+    )
+
+    go_repository(
         name = "com_github_yuin_goldmark",
         importpath = "github.com/yuin/goldmark",
-        sum = "h1:dPmz1Snjq0kmkz159iL7S6WzdahUTHnHB5M56WFVifs=",
-        version = "v1.3.5",
+        sum = "h1:ruQGxdhGHe7FWOJPT0mKs5+pD2Xs1Bm/kdGlHO04FmM=",
+        version = "v1.2.1",
     )
 
     go_repository(
         name = "com_google_cloud_go",
         importpath = "cloud.google.com/go",
-        sum = "h1:at8Tk2zUz63cLPR0JPWm5vp77pEZmzxEQBEfRKn1VV8=",
-        version = "v0.81.0",
+        sum = "h1:vpK6iQWv/2uUeFJth4/cBHsQAGjn1iIE6AAlxipRaA0=",
+        version = "v0.102.1",
     )
     go_repository(
         name = "com_google_cloud_go_bigquery",
         importpath = "cloud.google.com/go/bigquery",
-        sum = "h1:oq1PIpl9u1thzdsX0K9w5H8OlqH5gRu3zGc7FCk19IY=",
-        version = "v1.17.0",
+        sum = "h1:sTAW05tQycLEDbxod+zgH8LTKDkPbbb30NROx2I9XVs=",
+        version = "v1.36.0",
     )
     go_repository(
+        name = "com_google_cloud_go_compute",
+        importpath = "cloud.google.com/go/compute",
+        sum = "h1:v/k9Eueb8aAJ0vZuxKMrgm6kPhCLZU9HxFU+AFDs9Uk=",
+        version = "v1.7.0",
+    )
+
+    go_repository(
         name = "com_google_cloud_go_datastore",
         importpath = "cloud.google.com/go/datastore",
-        sum = "h1:3En8Rj64Q5GxtjsTljiqm25LTzvPFbpK+WQrgeKOUvI=",
-        version = "v1.5.0",
+        sum = "h1:2qo2G7hABSeqswa+5Ga3+QB8/ZwKOJmDsCISM9scmsU=",
+        version = "v1.8.0",
     )
     go_repository(
         name = "com_google_cloud_go_firestore",
@@ -885,16 +1100,23 @@
         version = "v1.1.0",
     )
     go_repository(
+        name = "com_google_cloud_go_iam",
+        importpath = "cloud.google.com/go/iam",
+        sum = "h1:exkAomrVUuzx9kWFI1wm3KI0uoDeUFPB4kKGzx6x+Gc=",
+        version = "v0.3.0",
+    )
+
+    go_repository(
         name = "com_google_cloud_go_pubsub",
         importpath = "cloud.google.com/go/pubsub",
-        sum = "h1:w81PfKDbPt8GQZFQePf2V3dhlld+fynrwwLuKQ1xntw=",
-        version = "v1.11.0-beta.schemas",
+        sum = "h1:aCS6wSMzrc602OeXUMA66KGlyXxpdkHdwN+FSBv/sUg=",
+        version = "v1.24.0",
     )
     go_repository(
         name = "com_google_cloud_go_storage",
         importpath = "cloud.google.com/go/storage",
-        sum = "h1:Ljj+ZXVEhCr/1+4ZhvtteN1ND7UUsNTlduGclLh8GO0=",
-        version = "v1.15.0",
+        sum = "h1:a4N0gIkx83uoVFGz8B2eAV3OhN90QoWF5OZWLKl39ig=",
+        version = "v1.24.0",
     )
 
     go_repository(
@@ -904,10 +1126,17 @@
         version = "v0.0.0-20190408044501-666a987793e9",
     )
     go_repository(
+        name = "ht_sr_git_sbinet_gg",
+        importpath = "git.sr.ht/~sbinet/gg",
+        sum = "h1:LNhjNn8DerC8f9DHLz6lS0YYul/b602DUxDgGkd/Aik=",
+        version = "v0.3.1",
+    )
+
+    go_repository(
         name = "in_gopkg_check_v1",
         importpath = "gopkg.in/check.v1",
-        sum = "h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=",
-        version = "v1.0.0-20200227125254-8fa46927fb4f",
+        sum = "h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=",
+        version = "v0.0.0-20161208181325-20d25e280405",
     )
     go_repository(
         name = "in_gopkg_errgo_v2",
@@ -927,6 +1156,12 @@
         sum = "h1:BJa69CDh0awSsLUmZ9+BowBdokpduDZSM9Zk8oKHfN4=",
         version = "v1.0.5",
     )
+    go_repository(
+        name = "in_gopkg_retry_v1",
+        importpath = "gopkg.in/retry.v1",
+        sum = "h1:a9CArYczAVv6Qs6VGoLMio99GEs7kY9UzSF9+LD+iGs=",
+        version = "v1.0.3",
+    )
 
     go_repository(
         name = "in_gopkg_yaml_v2",
@@ -1006,8 +1241,8 @@
     go_repository(
         name = "org_golang_google_api",
         importpath = "google.golang.org/api",
-        sum = "h1:pqMffJFLBVUDIoYsHcqtxgQVTsmxMDpYLOc5MT4Jrww=",
-        version = "v0.45.0",
+        sum = "h1:OUywo5UEEZ8H1eMy55mFpkL9Sy59mQ5TzYGWa+td8zo=",
+        version = "v0.89.0",
     )
 
     go_repository(
@@ -1020,16 +1255,16 @@
     go_repository(
         name = "org_golang_google_genproto",
         importpath = "google.golang.org/genproto",
-        sum = "h1:4m1jFN3fHeKo0UvpraW2ipO2O0rgp5w2ugXeggtecAk=",
-        version = "v0.0.0-20210728212813-7823e685a01f",
+        sum = "h1:zfXhTgBfGlIh3jMXN06W8qbhFGsh6MJNJiYEuhTddOI=",
+        version = "v0.0.0-20220714211235-042d03aeabc9",
     )
 
     go_repository(
         name = "org_golang_google_grpc",
         build_file_proto_mode = "disable_global",  # See https://github.com/bazelbuild/rules_go/issues/2186#issuecomment-523028281
         importpath = "google.golang.org/grpc",
-        sum = "h1:AGJ0Ih4mHjSeibYkFGh1dD9KJ/eOtZ93I6hoHhukQ5Q=",
-        version = "v1.40.0",
+        sum = "h1:rQOsyJ/8+ufEDJd/Gdsz7HG220Mh9HAhFHRGnIjda0w=",
+        version = "v1.48.0",
     )
     go_repository(
         name = "org_golang_google_grpc_cmd_protoc_gen_go_grpc",
@@ -1041,8 +1276,8 @@
     go_repository(
         name = "org_golang_google_protobuf",
         importpath = "google.golang.org/protobuf",
-        sum = "h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ=",
-        version = "v1.27.1",
+        sum = "h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w=",
+        version = "v1.28.1",
     )
 
     go_repository(
@@ -1055,21 +1290,27 @@
     go_repository(
         name = "org_golang_x_exp",
         importpath = "golang.org/x/exp",
-        sum = "h1:QE6XYQK6naiK1EPAe1g/ILLxN5RBoH5xkJk3CqlMI/Y=",
-        version = "v0.0.0-20200224162631-6cc2880d07d6",
+        sum = "h1:tnebWN09GYg9OLPss1KXj8txwZc6X6uMr6VFdcGNbHw=",
+        version = "v0.0.0-20220827204233-334a2380cb91",
+    )
+    go_repository(
+        name = "org_golang_x_exp_shiny",
+        importpath = "golang.org/x/exp/shiny",
+        sum = "h1:pkl1Ko5DrhA4ezwKwdnmO7H1sKmMy9qLuYKRjS7SlmE=",
+        version = "v0.0.0-20220722155223-a9213eeb770e",
     )
 
     go_repository(
         name = "org_golang_x_image",
         importpath = "golang.org/x/image",
-        sum = "h1:RNPAfi2nHY7C2srAV8A49jpsYr0ADedCk1wq6fTMTvs=",
-        version = "v0.0.0-20210628002857-a66eb6448b8d",
+        sum = "h1:Lj6HJGCSn5AjxRAH2+r35Mir4icalbqku+CLUtjnvXY=",
+        version = "v0.0.0-20220902085622-e7cb96979f69",
     )
     go_repository(
         name = "org_golang_x_lint",
         importpath = "golang.org/x/lint",
-        sum = "h1:VLliZ0d+/avPrXXH+OakdXhpJuEoBZuwh1m2j7U6Iug=",
-        version = "v0.0.0-20210508222113-6edffad5e616",
+        sum = "h1:XQyxROzUlZH+WIQwySDgnISgOivlhjIEwaQaJEJrrN0=",
+        version = "v0.0.0-20190313153728-d0100b6bd8b3",
     )
     go_repository(
         name = "org_golang_x_mobile",
@@ -1080,49 +1321,49 @@
     go_repository(
         name = "org_golang_x_mod",
         importpath = "golang.org/x/mod",
-        sum = "h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo=",
-        version = "v0.4.2",
+        sum = "h1:6zppjxzCulZykYSLyVDYbneBfbaBIQPYMevg0bEwv2s=",
+        version = "v0.6.0-dev.0.20220419223038-86c51ed26bb4",
     )
 
     go_repository(
         name = "org_golang_x_net",
         importpath = "golang.org/x/net",
-        sum = "h1:0PC75Fz/kyMGhL0e1QnypqK2kQMqKt9csD1GnMJR+Zk=",
-        version = "v0.0.0-20210423184538-5f58ad60dda6",
+        sum = "h1:TsQ7F31D3bUCLeqPT0u+yjp1guoArKaNKmCr22PYgTQ=",
+        version = "v0.0.0-20220624214902-1bab6f366d9e",
     )
 
     go_repository(
         name = "org_golang_x_oauth2",
         importpath = "golang.org/x/oauth2",
-        sum = "h1:3B43BWw0xEBsLZ/NO1VALz6fppU3481pik+2Ksv45z8=",
-        version = "v0.0.0-20210628180205-a41e5a781914",
+        sum = "h1:+jnHzr9VPj32ykQVai5DNahi9+NSp7yYuCsl5eAQtL0=",
+        version = "v0.0.0-20220622183110-fd043fe589d2",
     )
 
     go_repository(
         name = "org_golang_x_sync",
         importpath = "golang.org/x/sync",
-        sum = "h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ=",
-        version = "v0.0.0-20210220032951-036812b2e83c",
+        sum = "h1:Ax0t5p6N38Ga0dThY21weqDEyz2oklo4IvDkpigvkD8=",
+        version = "v0.0.0-20220601150217-0de741cfad7f",
     )
 
     go_repository(
         name = "org_golang_x_sys",
         importpath = "golang.org/x/sys",
-        sum = "h1:F1jZWGFhYfh0Ci55sIpILtKKK8p3i2/krTr0H1rg74I=",
-        version = "v0.0.0-20210630005230-0f9fa26af87c",
+        sum = "h1:v4INt8xihDGvnrfjMDVXGxw9wrfxYyCjk0KbXjhR55s=",
+        version = "v0.0.0-20220722155257-8c9f86f7a55f",
     )
     go_repository(
         name = "org_golang_x_term",
         importpath = "golang.org/x/term",
-        sum = "h1:v+OssWQX+hTHEmOBgwxdZxK4zHq3yOs8F9J7mk0PY8E=",
-        version = "v0.0.0-20201126162022-7de9c90e9dd1",
+        sum = "h1:JGgROgKl9N8DuW20oFS5gxc+lE67/N3FcwmBPMe7ArY=",
+        version = "v0.0.0-20210927222741-03fcf44c2211",
     )
 
     go_repository(
         name = "org_golang_x_text",
         importpath = "golang.org/x/text",
-        sum = "h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M=",
-        version = "v0.3.6",
+        sum = "h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=",
+        version = "v0.3.7",
     )
     go_repository(
         name = "org_golang_x_time",
@@ -1134,22 +1375,22 @@
     go_repository(
         name = "org_golang_x_tools",
         importpath = "golang.org/x/tools",
-        sum = "h1:ouewzE6p+/VEB31YYnTbEJdi8pFqKp4P4n85vwo3DHA=",
-        version = "v0.1.5",
+        sum = "h1:VveCTK38A2rkS8ZqFY25HIDFscX5X9OoEhJd3quQmXU=",
+        version = "v0.1.12",
     )
 
     go_repository(
         name = "org_golang_x_xerrors",
         importpath = "golang.org/x/xerrors",
-        sum = "h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=",
-        version = "v0.0.0-20200804184101-5ec99f83aff1",
+        sum = "h1:uF6paiQQebLeSXkrTqHqz0MXhXXS1KgF41eUdBNvxK0=",
+        version = "v0.0.0-20220609144429-65e65417b02f",
     )
 
     go_repository(
         name = "org_gonum_v1_gonum",
         importpath = "gonum.org/v1/gonum",
-        sum = "h1:DnoIG+QAMaF5NvxnGe/oKsgKcAc6PcUyl8q0VetfQ8s=",
-        version = "v0.9.3",
+        sum = "h1:xKuo6hzt+gMav00meVPUlXwSdoEJP46BR+wdxQEFK2o=",
+        version = "v0.12.0",
     )
     go_repository(
         name = "org_gonum_v1_netlib",
@@ -1161,8 +1402,8 @@
     go_repository(
         name = "org_gonum_v1_plot",
         importpath = "gonum.org/v1/plot",
-        sum = "h1:ymLukg4XJlQnYUJCp+coQq5M7BsUJFk6XQE4HPflwdw=",
-        version = "v0.10.0",
+        sum = "h1:y1ZNmfz/xHuHvtgFe8USZVyykQo5ERXPnspQNVK15Og=",
+        version = "v0.12.0",
     )
     go_repository(
         name = "org_uber_go_atomic",
diff --git a/python/dp_accounting/mechanism_calibration_test.py b/python/dp_accounting/mechanism_calibration_test.py
index 0ea8c33..5975d3e 100644
--- a/python/dp_accounting/mechanism_calibration_test.py
+++ b/python/dp_accounting/mechanism_calibration_test.py
@@ -37,10 +37,8 @@
     self._value = 0.0
     self._value_to_epsilon = value_to_epsilon
 
-  def supports(self, event: dp_event.DpEvent) -> bool:
-    return True
-
-  def _compose(self, event: dp_event.DpEvent, count: int = 1):
+  def _maybe_compose(self, event: dp_event.DpEvent, count: int,
+                     do_compose: bool):
     self._value = event.param
 
   def get_epsilon(self, target_delta: float) -> float:
diff --git a/python/dp_accounting/pld/pld_privacy_accountant.py b/python/dp_accounting/pld/pld_privacy_accountant.py
index e10b2ba..91d620c 100644
--- a/python/dp_accounting/pld/pld_privacy_accountant.py
+++ b/python/dp_accounting/pld/pld_privacy_accountant.py
@@ -14,12 +14,14 @@
 """Privacy accountant that uses Privacy Loss Distributions."""
 
 import math
+from typing import Optional
 
 from dp_accounting import dp_event
 from dp_accounting import privacy_accountant
 from dp_accounting.pld import privacy_loss_distribution
 
 NeighborRel = privacy_accountant.NeighboringRelation
+CompositionErrorDetails = privacy_accountant.PrivacyAccountant.CompositionErrorDetails
 PLD = privacy_loss_distribution
 
 
@@ -37,46 +39,29 @@
         value_discretization_interval=value_discretization_interval)
     self._value_discretization_interval = value_discretization_interval
 
-  def supports(self, event: dp_event.DpEvent) -> bool:
-    return self._maybe_compose(event, 0, False)
-
-  def _compose(self, event: dp_event.DpEvent, count: int = 1):
-    self._maybe_compose(event, count, True)
-
   def _maybe_compose(self, event: dp_event.DpEvent, count: int,
-                     do_compose: bool) -> bool:
-    """Traverses `event` and performs composition if `do_compose` is True.
-
-    If `do_compose` is False, can be used to check whether composition is
-    supported.
-
-    Args:
-      event: A `DpEvent` to process.
-      count: The number of times to compose the event.
-      do_compose: Whether to actually perform the composition.
-
-    Returns:
-      True if event is supported, otherwise False.
-    """
-
+                     do_compose: bool) -> Optional[CompositionErrorDetails]:
     if isinstance(event, dp_event.NoOpDpEvent):
-      return True
+      return None
     elif isinstance(event, dp_event.NonPrivateDpEvent):
       if do_compose:
         self._contains_non_dp_event = True
-      return True
+      return None
     elif isinstance(event, dp_event.SelfComposedDpEvent):
       return self._maybe_compose(event.event, event.count * count, do_compose)
     elif isinstance(event, dp_event.ComposedDpEvent):
-      return all(
-          self._maybe_compose(e, count, do_compose) for e in event.events)
+      for e in event.events:
+        result = self._maybe_compose(e, count, do_compose)
+        if result is not None:
+          return result
+      return None
     elif isinstance(event, dp_event.GaussianDpEvent):
       if do_compose:
         gaussian_pld = PLD.from_gaussian_mechanism(
             standard_deviation=event.noise_multiplier / math.sqrt(count),
             value_discretization_interval=self._value_discretization_interval)
         self._pld = self._pld.compose(gaussian_pld)
-      return True
+      return None
     elif isinstance(event, dp_event.LaplaceDpEvent):
       if do_compose:
         laplace_pld = PLD.from_laplace_mechanism(
@@ -84,10 +69,14 @@
             value_discretization_interval=self._value_discretization_interval
         ).self_compose(count)
         self._pld = self._pld.compose(laplace_pld)
-      return True
+      return None
     elif isinstance(event, dp_event.PoissonSampledDpEvent):
       if self.neighboring_relation != NeighborRel.ADD_OR_REMOVE_ONE:
-        return False
+        error_msg = (
+            'neighboring_relation must be `ADD_OR_REMOVE_ONE` for '
+            f'`PoissonSampledDpEvent`. Found {self._neighboring_relation}.')
+        return CompositionErrorDetails(
+            invalid_event=event, error_message=error_msg)
       if isinstance(event.event, dp_event.GaussianDpEvent):
         if do_compose:
           subsampled_gaussian_pld = PLD.from_gaussian_mechanism(
@@ -95,7 +84,7 @@
               value_discretization_interval=self._value_discretization_interval,
               sampling_prob=event.sampling_probability).self_compose(count)
           self._pld = self._pld.compose(subsampled_gaussian_pld)
-        return True
+        return None
       elif isinstance(event.event, dp_event.LaplaceDpEvent):
         if do_compose:
           subsampled_laplace_pld = PLD.from_laplace_mechanism(
@@ -103,12 +92,17 @@
               value_discretization_interval=self._value_discretization_interval,
               sampling_prob=event.sampling_probability).self_compose(count)
           self._pld = self._pld.compose(subsampled_laplace_pld)
-        return True
+        return None
       else:
-        return False
+        return CompositionErrorDetails(
+            invalid_event=event,
+            error_message=(
+                'Subevent of `PoissonSampledEvent` must be either '
+                f'`GaussianDpEvent` or `LaplaceDpEvent`. Found {event.event}.'))
     else:
       # Unsupported event (including `UnsupportedDpEvent`).
-      return False
+      return CompositionErrorDetails(
+          invalid_event=event, error_message='Unsupported event.')
 
   def get_epsilon(self, target_delta: float) -> float:
     if self._contains_non_dp_event:
diff --git a/python/dp_accounting/privacy_accountant.py b/python/dp_accounting/privacy_accountant.py
index 7d74bc2..2ea9d92 100644
--- a/python/dp_accounting/privacy_accountant.py
+++ b/python/dp_accounting/privacy_accountant.py
@@ -15,7 +15,9 @@
 
 import abc
 import enum
-from typing import Any
+from typing import Any, Optional
+
+import attr
 
 from dp_accounting import dp_event
 from dp_accounting import dp_event_builder
@@ -51,7 +53,6 @@
     """
     return self._neighboring_relation
 
-  @abc.abstractmethod
   def supports(self, event: dp_event.DpEvent) -> bool:
     """Checks whether the `DpEvent` can be processed by this accountant.
 
@@ -65,17 +66,36 @@
     Returns:
       True iff this accountant supports processing `event`.
     """
+    return self._maybe_compose(event, 0, False) is None
+
+  @attr.s(frozen=True, slots=True, auto_attribs=True)
+  class CompositionErrorDetails(object):
+    """Describes offending subevent and error in case composition fails."""
+    invalid_event: Optional[dp_event.DpEvent]
+    error_message: Optional[str]
 
   @abc.abstractmethod
-  def _compose(self, event: dp_event.DpEvent, count: int = 1):
-    """Updates internal state to account for application of a `DpEvent`.
+  def _maybe_compose(self, event: dp_event.DpEvent, count: int,
+                     do_compose: bool) -> Optional[CompositionErrorDetails]:
+    """Traverses `event` and performs composition if `do_compose` is True.
 
-    Calls to `get_epsilon` or `get_delta` after calling `_compose` will return
-    values that account for this `DpEvent`.
+    If `do_compose` is True, updates internal state to account for application
+    of a `DpEvent`. Subsequent calls to `get_epsilon` or `get_delta` will return
+    values that account for composition of this `DpEvent`.
+
+    If `do_compose` is False, traverses structure of event to check whether
+    composition is supported *without updating internal state*. Returns None if
+    composition would succeed, otherwise returns a `CompositionErrorDetails`
+    with information about why the composition would fail.
 
     Args:
       event: A `DpEvent` to process.
       count: The number of times to compose the event.
+      do_compose: Whether to actually perform the composition.
+
+    Returns:
+      None if composition is valid. If composition is not supported, returns
+      `CompositionErrorDetails` describing why the composition fails.
     """
 
   def compose(self, event: dp_event.DpEvent, count: int = 1) -> Any:
@@ -99,10 +119,14 @@
     """
     if not isinstance(event, dp_event.DpEvent):
       raise TypeError(f'`event` must be `DpEvent`. Found {type(event)}.')
-    if not self.supports(event):
-      raise UnsupportedEventError(f'Unsupported event: {event}.')
+    composition_error = self._maybe_compose(event, count, False)
+    if composition_error:
+      raise UnsupportedEventError(
+          f'Unsupported event: {event}. Error: '
+          f'[{composition_error.error_message}] caused by subevent '
+          f'{composition_error.invalid_event}.')
     self._ledger.compose(event, count)
-    self._compose(event, count)
+    self._maybe_compose(event, count, True)
     return self
 
   @property
diff --git a/python/dp_accounting/privacy_accountant_test.py b/python/dp_accounting/privacy_accountant_test.py
index bc47ae7..e930c7a 100644
--- a/python/dp_accounting/privacy_accountant_test.py
+++ b/python/dp_accounting/privacy_accountant_test.py
@@ -49,12 +49,23 @@
       pass
 
     for accountant in self._make_test_accountants():
-      for unsupported in [dp_event.UnsupportedDpEvent(), UnknownDpEvent()]:
-        self.assertFalse(accountant.supports(unsupported))
-        self.assertFalse(
-            accountant.supports(dp_event.SelfComposedDpEvent(unsupported, 10)))
-        self.assertFalse(
-            accountant.supports(dp_event.ComposedDpEvent([unsupported])))
+      for unsupported_event in [
+          dp_event.UnsupportedDpEvent(),
+          UnknownDpEvent()
+      ]:
+        for nested_unsupported_event in [
+            unsupported_event,
+            dp_event.SelfComposedDpEvent(unsupported_event, 10),
+            dp_event.ComposedDpEvent([unsupported_event])
+        ]:
+          composition_error = accountant._maybe_compose(
+              nested_unsupported_event, count=1, do_compose=False)
+          self.assertIsNotNone(composition_error)
+          self.assertEqual(composition_error.invalid_event, unsupported_event)
+          self.assertFalse(accountant.supports(nested_unsupported_event))
+          with self.assertRaisesRegex(privacy_accountant.UnsupportedEventError,
+                                      'caused by subevent'):
+            accountant.compose(nested_unsupported_event)
 
   def test_no_events(self):
     for accountant in self._make_test_accountants():
@@ -73,7 +84,7 @@
     for accountant in self._make_test_accountants():
       event = dp_event.NoOpDpEvent()
       self.assertTrue(accountant.supports(event))
-      accountant._compose(event)
+      accountant.compose(event)
       self.assertEqual(accountant.get_epsilon(1e-12), 0)
       self.assertEqual(accountant.get_epsilon(0), 0)
       self.assertEqual(accountant.get_epsilon(1), 0)
@@ -89,7 +100,7 @@
     for accountant in self._make_test_accountants():
       event = dp_event.NonPrivateDpEvent()
       self.assertTrue(accountant.supports(event))
-      accountant._compose(event)
+      accountant.compose(event)
       self.assertEqual(accountant.get_epsilon(0.99), float('inf'))
       self.assertEqual(accountant.get_epsilon(0), float('inf'))
       self.assertEqual(accountant.get_epsilon(1), float('inf'))
diff --git a/python/dp_accounting/rdp/rdp_privacy_accountant.py b/python/dp_accounting/rdp/rdp_privacy_accountant.py
index 672b2c1..f1825fb 100644
--- a/python/dp_accounting/rdp/rdp_privacy_accountant.py
+++ b/python/dp_accounting/rdp/rdp_privacy_accountant.py
@@ -24,6 +24,7 @@
 from dp_accounting import privacy_accountant
 
 NeighborRel = privacy_accountant.NeighboringRelation
+CompositionErrorDetails = privacy_accountant.PrivacyAccountant.CompositionErrorDetails
 
 
 def _log_add(logx: float, logy: float) -> float:
@@ -499,7 +500,7 @@
 
 
 def _effective_gaussian_noise_multiplier(
-    event: dp_event.DpEvent) -> Optional[float]:
+    event: dp_event.DpEvent) -> Union[float, dp_event.DpEvent]:
   """Determines the effective noise multiplier of nested structure of Gaussians.
 
   A series of Gaussian queries on the same data can be reexpressed as a single
@@ -513,10 +514,10 @@
       bottoming out in `dp_event.GaussianDpEvent`s.
 
   Returns:
-    The noise multiplier of the equivalent `dp_event.GaussianDpEvent`, or None
-    if the input event was not a `dp_event.GaussianDpEvent` or a nested
-    structure of `dp_event.ComposedDpEvent` and/or
-    `dp_event.SelfComposedDpEvent` bottoming out in `dp_event.GaussianDpEvent`s.
+    The noise multiplier of the equivalent `dp_event.GaussianDpEvent`. If the
+    input event was not a `dp_event.GaussianDpEvent` or a nested structure of
+    `dp_event.ComposedDpEvent` and/or `dp_event.SelfComposedDpEvent` bottoming
+    out in `dp_event.GaussianDpEvent`s, returns offending subevent.
   """
   if isinstance(event, dp_event.GaussianDpEvent):
     return event.noise_multiplier
@@ -524,15 +525,17 @@
     sum_sigma_inv_sq = 0
     for e in event.events:
       sigma = _effective_gaussian_noise_multiplier(e)
-      if sigma is None:
-        return None
+      if not isinstance(sigma, float):
+        return sigma
       sum_sigma_inv_sq += sigma**-2
     return sum_sigma_inv_sq**-0.5
   elif isinstance(event, dp_event.SelfComposedDpEvent):
     sigma = _effective_gaussian_noise_multiplier(event.event)
-    return None if sigma is None else (event.count * sigma**-2)**-0.5
+    if not isinstance(sigma, float):
+      return sigma
+    return (event.count * sigma**-2)**-0.5
   else:
-    return None
+    return event
 
 
 def _compute_rdp_single_epoch_tree_aggregation(
@@ -789,77 +792,82 @@
     self._orders = np.array(orders)
     self._rdp = np.zeros_like(orders, dtype=np.float64)
 
-  def supports(self, event: dp_event.DpEvent) -> bool:
-    return self._maybe_compose(event, 0, False)
-
-  def _compose(self, event: dp_event.DpEvent, count: int = 1):
-    self._maybe_compose(event, count, True)
-
   def _maybe_compose(self, event: dp_event.DpEvent, count: int,
-                     do_compose: bool) -> bool:
-    """Traverses `event` and performs composition if `do_compose` is True.
-
-    If `do_compose` is False, can be used to check whether composition is
-    supported.
-
-    Args:
-      event: A `DpEvent` to process.
-      count: The number of times to compose the event.
-      do_compose: Whether to actually perform the composition.
-
-    Returns:
-      True if event is supported, otherwise False.
-    """
+                     do_compose: bool) -> Optional[CompositionErrorDetails]:
+    """Traverses `event` and performs composition if `do_compose` is True."""
 
     if isinstance(event, dp_event.NoOpDpEvent):
-      return True
+      return None
     elif isinstance(event, dp_event.NonPrivateDpEvent):
       if do_compose:
         self._rdp += np.inf
-      return True
+      return None
     elif isinstance(event, dp_event.SelfComposedDpEvent):
       return self._maybe_compose(event.event, event.count * count, do_compose)
     elif isinstance(event, dp_event.ComposedDpEvent):
-      return all(
-          self._maybe_compose(e, count, do_compose) for e in event.events)
+      for e in event.events:
+        result = self._maybe_compose(e, count, do_compose)
+        if result is not None:
+          return result
+      return None
     elif isinstance(event, dp_event.GaussianDpEvent):
       if do_compose:
         self._rdp += count * _compute_rdp_poisson_subsampled_gaussian(
             q=1.0, noise_multiplier=event.noise_multiplier, orders=self._orders)
-      return True
+      return None
     elif isinstance(event, dp_event.PoissonSampledDpEvent):
       if self._neighboring_relation is not NeighborRel.ADD_OR_REMOVE_ONE:
-        return False
-      gaussian_noise_multiplier = _effective_gaussian_noise_multiplier(
-          event.event)
-      if gaussian_noise_multiplier is None:
-        return False
+        error_msg = (
+            'neighboring_relation must be `ADD_OR_REMOVE_ONE` for '
+            f'`PoissonSampledDpEvent`. Found {self._neighboring_relation}.')
+        return CompositionErrorDetails(
+            invalid_event=event, error_message=error_msg)
+      sigma_or_bad_event = _effective_gaussian_noise_multiplier(event.event)
+      if isinstance(sigma_or_bad_event, dp_event.DpEvent):
+        return CompositionErrorDetails(
+            invalid_event=event,
+            error_message='Subevent of `PoissonSampledDpEvent` must be a '
+            '`GaussianDpEvent` or a nested structure of `ComposedDpEvent` '
+            'and/or `SelfComposedDpEvent` bottoming out in `GaussianDpEvent`s.'
+            f' Found subevent {sigma_or_bad_event}.')
       if do_compose:
         self._rdp += count * _compute_rdp_poisson_subsampled_gaussian(
             q=event.sampling_probability,
-            noise_multiplier=gaussian_noise_multiplier,
+            noise_multiplier=sigma_or_bad_event,
             orders=self._orders)
-      return True
+      return None
     elif isinstance(event, dp_event.SampledWithoutReplacementDpEvent):
       if self._neighboring_relation is not NeighborRel.REPLACE_ONE:
-        return False
-      gaussian_noise_multiplier = _effective_gaussian_noise_multiplier(
-          event.event)
-      if gaussian_noise_multiplier is None:
-        return False
+        error_msg = ('neighboring_relation must be `REPLACE_ONE` for '
+                     '`SampledWithoutReplacementDpEvent`. Found '
+                     f'{self._neighboring_relation}.')
+        return CompositionErrorDetails(
+            invalid_event=event, error_message=error_msg)
+      sigma_or_bad_event = _effective_gaussian_noise_multiplier(event.event)
+      if isinstance(sigma_or_bad_event, dp_event.DpEvent):
+        return CompositionErrorDetails(
+            invalid_event=event,
+            error_message='Subevent of `SampledWithoutReplacementDpEvent` must '
+            'be a `GaussianDpEvent` or a nested structure of `ComposedDpEvent` '
+            'and/or `SelfComposedDpEvent` bottoming out in `GaussianDpEvent`s. '
+            f'Found subevent {sigma_or_bad_event}.')
       if do_compose:
         self._rdp += count * _compute_rdp_sample_wor_gaussian(
             q=event.sample_size / event.source_dataset_size,
-            noise_multiplier=gaussian_noise_multiplier,
+            noise_multiplier=sigma_or_bad_event,
             orders=self._orders)
-      return True
+      return None
     elif isinstance(event, dp_event.SingleEpochTreeAggregationDpEvent):
       if self._neighboring_relation is not NeighborRel.REPLACE_SPECIAL:
-        return False
+        error_msg = ('neighboring_relation must be `REPLACE_SPECIAL` for '
+                     '`SingleEpochTreeAggregationDpEvent`. Found '
+                     f'{self._neighboring_relation}.')
+        return CompositionErrorDetails(
+            invalid_event=event, error_message=error_msg)
       if do_compose:
         self._rdp += count * _compute_rdp_single_epoch_tree_aggregation(
             event.noise_multiplier, event.step_counts, self._orders)
-      return True
+      return None
     elif isinstance(event, dp_event.LaplaceDpEvent):
       if do_compose:
         # Laplace satisfies eps-DP with eps = 1 / event.noise_multiplier
@@ -868,7 +876,7 @@
         rho = 0.5 * eps * eps
         self._rdp += count * np.array(
             [min(eps, rho * order) for order in self._orders])
-      return True
+      return None
     elif isinstance(event, dp_event.RepeatAndSelectDpEvent):
       if do_compose:
         # Save the RDP values from already composed DPEvents. These will
@@ -877,14 +885,15 @@
         # DP event.
         save_rdp = self._rdp
         self._rdp = np.zeros_like(self._orders, dtype=np.float64)
-      can_compose = self._maybe_compose(event.event, 1, do_compose)
-      if can_compose and do_compose:
+      composition_error = self._maybe_compose(event.event, 1, do_compose)
+      if composition_error is None and do_compose:
         self._rdp = count * _compute_rdp_repeat_and_select(
             self._orders, self._rdp, event.mean, event.shape) + save_rdp
-      return can_compose
+      return composition_error
     else:
       # Unsupported event (including `UnsupportedDpEvent`).
-      return False
+      return CompositionErrorDetails(
+          invalid_event=event, error_message='Unsupported event.')
 
   def get_epsilon_and_optimal_order(self,
                                     target_delta: float) -> Tuple[float, int]:
diff --git a/python/dp_accounting/rdp/rdp_privacy_accountant_test.py b/python/dp_accounting/rdp/rdp_privacy_accountant_test.py
index 648b8e1..b6291b1 100644
--- a/python/dp_accounting/rdp/rdp_privacy_accountant_test.py
+++ b/python/dp_accounting/rdp/rdp_privacy_accountant_test.py
@@ -218,7 +218,14 @@
             ])))
     self.assertAlmostEqual(accountant._rdp[0], rdp)
 
-  def test_effective_gaussian_noise_multiplier(self):
+  def test_effective_gaussian_noise_multiplier_basic(self):
+    sigma = 2.71828
+    event = dp_event.GaussianDpEvent(sigma)
+    sigma_out = rdp_privacy_accountant._effective_gaussian_noise_multiplier(
+        event)
+    self.assertEqual(sigma_out, sigma)
+
+  def test_effective_gaussian_noise_multiplier_composed(self):
     np.random.seed(0xBAD5EED)
     sigmas = np.random.uniform(size=(4,))
 
@@ -236,6 +243,17 @@
     expected = sum(s**-2 for s in multi_sigmas)**-0.5
     self.assertAlmostEqual(sigma, expected)
 
+  _LAPLACE_EVENT = dp_event.LaplaceDpEvent(1.0)
+
+  @parameterized.named_parameters(
+      ('simple', _LAPLACE_EVENT),
+      ('composed', dp_event.ComposedDpEvent([_LAPLACE_EVENT])),
+      ('self_composed', dp_event.SelfComposedDpEvent(_LAPLACE_EVENT, 3)),
+  )
+  def test_effective_gaussian_noise_multiplier_invalid_event(self, event):
+    result = rdp_privacy_accountant._effective_gaussian_noise_multiplier(event)
+    self.assertEqual(result, self._LAPLACE_EVENT)
+
   def test_compute_rdp_poisson_sampled_gaussian(self):
     orders = [1.5, 2.5, 5, 50, 100, np.inf]
     noise_multiplier = 2.5
@@ -707,5 +725,6 @@
   for i in range(len(accountant._orders)):
     self.assertAlmostEqual(accountant._rdp[i], accountant._orders[i] * rho)
 
+
 if __name__ == '__main__':
   absltest.main()