Rename ir_pb2 to ir_data

The IR classes are no longer based on a protobuf implementation. This
makes that clear by migrating to a more neutral `ir_data` name. This
will also help avoid tripping up static analysis that looks for
protobuf-like things.

Part of #118.
diff --git a/compiler/back_end/cpp/BUILD b/compiler/back_end/cpp/BUILD
index 1c496d6..5bac536 100644
--- a/compiler/back_end/cpp/BUILD
+++ b/compiler/back_end/cpp/BUILD
@@ -33,7 +33,7 @@
     visibility = ["//visibility:public"],
     deps = [
         ":header_generator",
-        "//compiler/util:ir_pb2",
+        "//compiler/util:ir_data",
     ],
 )
 
@@ -53,7 +53,7 @@
         ":attributes",
         "//compiler/back_end/util:code_template",
         "//compiler/util:attribute_util",
-        "//compiler/util:ir_pb2",
+        "//compiler/util:ir_data",
         "//compiler/util:ir_util",
         "//compiler/util:name_conversion",
         "//compiler/util:resources",
diff --git a/compiler/back_end/cpp/emboss_codegen_cpp.py b/compiler/back_end/cpp/emboss_codegen_cpp.py
index 4ac71f0..0a70f41 100644
--- a/compiler/back_end/cpp/emboss_codegen_cpp.py
+++ b/compiler/back_end/cpp/emboss_codegen_cpp.py
@@ -26,7 +26,7 @@
 
 from compiler.back_end.cpp import header_generator
 from compiler.util import error
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 
 
 def _parse_command_line(argv):
@@ -82,9 +82,9 @@
 def main(flags):
   if flags.input_file:
     with open(flags.input_file) as f:
-      ir = ir_pb2.EmbossIr.from_json(f.read())
+      ir = ir_data.EmbossIr.from_json(f.read())
   else:
-    ir = ir_pb2.EmbossIr.from_json(sys.stdin.read())
+    ir = ir_data.EmbossIr.from_json(sys.stdin.read())
   config = header_generator.Config(include_enum_traits=flags.cc_enum_traits)
   header, errors = generate_headers_and_log_errors(ir, flags.color_output, config)
   if errors:
diff --git a/compiler/back_end/cpp/header_generator.py b/compiler/back_end/cpp/header_generator.py
index 8b585ca..b8c9675 100644
--- a/compiler/back_end/cpp/header_generator.py
+++ b/compiler/back_end/cpp/header_generator.py
@@ -27,7 +27,7 @@
 from compiler.back_end.util import code_template
 from compiler.util import attribute_util
 from compiler.util import error
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 from compiler.util import ir_util
 from compiler.util import name_conversion
 from compiler.util import resources
@@ -267,8 +267,8 @@
                                            buffer_type, byte_order,
                                            parent_addressable_unit):
   """Returns the adapted C++ type information needed to construct a view."""
-  if (parent_addressable_unit == ir_pb2.AddressableUnit.BYTE and
-      type_definition.addressable_unit == ir_pb2.AddressableUnit.BIT):
+  if (parent_addressable_unit == ir_data.AddressableUnit.BYTE and
+      type_definition.addressable_unit == ir_data.AddressableUnit.BIT):
     assert byte_order
     return _bytes_to_bits_convertor(buffer_type, byte_order, size_in_bits)
   else:
@@ -288,7 +288,7 @@
   C++ types of its parameters, if any.
 
   Arguments:
-      type_definition: The ir_pb2.TypeDefinition whose view should be
+      type_definition: The ir_data.TypeDefinition whose view should be
           constructed.
       size: The size, in type_definition.addressable_units, of the instantiated
           type, or None if it is not known at compile time.
@@ -346,11 +346,11 @@
     validator):
   """Returns the C++ type information needed to construct a field's view.
 
-  Returns the C++ type of an ir_pb2.Type, and the C++ types of its parameters,
+  Returns the C++ type of an ir_data.Type, and the C++ types of its parameters,
   if any.
 
   Arguments:
-      type_ir: The ir_pb2.Type whose view should be constructed.
+      type_ir: The ir_data.Type whose view should be constructed.
       size: The size, in type_definition.addressable_units, of the instantiated
           type, or None if it is not known at compile time.
       byte_order: For BIT types which are direct children of BYTE types,
@@ -429,19 +429,19 @@
 def _builtin_function_name(function):
   """Returns the C++ operator name corresponding to an Emboss operator."""
   functions = {
-      ir_pb2.FunctionMapping.ADDITION: "Sum",
-      ir_pb2.FunctionMapping.SUBTRACTION: "Difference",
-      ir_pb2.FunctionMapping.MULTIPLICATION: "Product",
-      ir_pb2.FunctionMapping.EQUALITY: "Equal",
-      ir_pb2.FunctionMapping.INEQUALITY: "NotEqual",
-      ir_pb2.FunctionMapping.AND: "And",
-      ir_pb2.FunctionMapping.OR: "Or",
-      ir_pb2.FunctionMapping.LESS: "LessThan",
-      ir_pb2.FunctionMapping.LESS_OR_EQUAL: "LessThanOrEqual",
-      ir_pb2.FunctionMapping.GREATER: "GreaterThan",
-      ir_pb2.FunctionMapping.GREATER_OR_EQUAL: "GreaterThanOrEqual",
-      ir_pb2.FunctionMapping.CHOICE: "Choice",
-      ir_pb2.FunctionMapping.MAXIMUM: "Maximum",
+      ir_data.FunctionMapping.ADDITION: "Sum",
+      ir_data.FunctionMapping.SUBTRACTION: "Difference",
+      ir_data.FunctionMapping.MULTIPLICATION: "Product",
+      ir_data.FunctionMapping.EQUALITY: "Equal",
+      ir_data.FunctionMapping.INEQUALITY: "NotEqual",
+      ir_data.FunctionMapping.AND: "And",
+      ir_data.FunctionMapping.OR: "Or",
+      ir_data.FunctionMapping.LESS: "LessThan",
+      ir_data.FunctionMapping.LESS_OR_EQUAL: "LessThanOrEqual",
+      ir_data.FunctionMapping.GREATER: "GreaterThan",
+      ir_data.FunctionMapping.GREATER_OR_EQUAL: "GreaterThanOrEqual",
+      ir_data.FunctionMapping.CHOICE: "Choice",
+      ir_data.FunctionMapping.MAXIMUM: "Maximum",
   }
   return functions[function]
 
@@ -505,9 +505,9 @@
 def _render_builtin_operation(expression, ir, field_reader, subexpressions):
   """Renders a built-in operation (+, -, &&, etc.) into C++ code."""
   assert expression.function.function not in (
-      ir_pb2.FunctionMapping.UPPER_BOUND, ir_pb2.FunctionMapping.LOWER_BOUND), (
+      ir_data.FunctionMapping.UPPER_BOUND, ir_data.FunctionMapping.LOWER_BOUND), (
           "UPPER_BOUND and LOWER_BOUND should be constant.")
-  if expression.function.function == ir_pb2.FunctionMapping.PRESENCE:
+  if expression.function.function == ir_data.FunctionMapping.PRESENCE:
     return field_reader.render_existence(expression.function.args[0],
                                          subexpressions)
   args = expression.function.args
@@ -1464,7 +1464,7 @@
   Offset should be a tuple of (start, end), which are the offsets relative to
   source_location.start.column to set the new start.column and end.column."""
 
-  new_location = ir_pb2.Location()
+  new_location = ir_data.Location()
   new_location.CopyFrom(source_location)
   new_location.start.column = source_location.start.column + offset[0]
   new_location.end.column = source_location.start.column + offset[1]
@@ -1540,10 +1540,10 @@
   errors = []
 
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Attribute], _verify_namespace_attribute,
+      ir, [ir_data.Attribute], _verify_namespace_attribute,
       parameters={"errors": errors})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Attribute], _verify_enum_case_attribute,
+      ir, [ir_data.Attribute], _verify_enum_case_attribute,
       parameters={"errors": errors})
 
   return errors
@@ -1571,8 +1571,8 @@
   # Ensure defaults are set on EnumValues for `enum_case`.
   _propagate_defaults(
       ir,
-      targets=[ir_pb2.EnumValue],
-      ancestors=[ir_pb2.Module, ir_pb2.TypeDefinition],
+      targets=[ir_data.EnumValue],
+      ancestors=[ir_data.Module, ir_data.TypeDefinition],
       add_fn=_add_missing_enum_case_attribute_on_enum_value)
 
   return []
diff --git a/compiler/back_end/cpp/header_generator_test.py b/compiler/back_end/cpp/header_generator_test.py
index e67ed50..daea7ea 100644
--- a/compiler/back_end/cpp/header_generator_test.py
+++ b/compiler/back_end/cpp/header_generator_test.py
@@ -18,7 +18,7 @@
 from compiler.back_end.cpp import header_generator
 from compiler.front_end import glue
 from compiler.util import error
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 from compiler.util import test_util
 
 def _make_ir_from_emb(emb_text, name="m.emb"):
@@ -94,7 +94,7 @@
                            '  BAZ = 2\n')
     attr = ir.module[0].type[0].attribute[0]
 
-    bad_case_source_location = ir_pb2.Location()
+    bad_case_source_location = ir_data.Location()
     bad_case_source_location.CopyFrom(attr.value.source_location)
     # Location of SHORTY_CASE in the attribute line.
     bad_case_source_location.start.column = 30
@@ -113,7 +113,7 @@
                            '  BAZ = 2\n')
     attr = ir.module[0].type[0].attribute[0]
 
-    bad_case_source_location = ir_pb2.Location()
+    bad_case_source_location = ir_data.Location()
     bad_case_source_location.CopyFrom(attr.value.source_location)
     # Location of bad_CASE in the attribute line.
     bad_case_source_location.start.column = 43
@@ -132,7 +132,7 @@
                            '  BAZ = 2\n')
     attr = ir.module[0].type[0].attribute[0]
 
-    bad_case_source_location = ir_pb2.Location()
+    bad_case_source_location = ir_data.Location()
     bad_case_source_location.CopyFrom(attr.value.source_location)
     # Location of BAD_case in the attribute line.
     bad_case_source_location.start.column = 55
@@ -151,7 +151,7 @@
                            '  BAZ = 2\n')
     attr = ir.module[0].type[0].attribute[0]
 
-    bad_case_source_location = ir_pb2.Location()
+    bad_case_source_location = ir_data.Location()
     bad_case_source_location.CopyFrom(attr.value.source_location)
     # Location of the second SHOUTY_CASE in the attribute line.
     bad_case_source_location.start.column = 43
@@ -171,7 +171,7 @@
                            '  BAZ = 2\n')
     attr = ir.module[0].type[0].attribute[0]
 
-    bad_case_source_location = ir_pb2.Location()
+    bad_case_source_location = ir_data.Location()
     bad_case_source_location.CopyFrom(attr.value.source_location)
     # Location of excess comma.
     bad_case_source_location.start.column = 42
diff --git a/compiler/front_end/BUILD b/compiler/front_end/BUILD
index 136095f..6f77145 100644
--- a/compiler/front_end/BUILD
+++ b/compiler/front_end/BUILD
@@ -70,7 +70,7 @@
     name = "module_ir",
     srcs = ["module_ir.py"],
     deps = [
-        "//compiler/util:ir_pb2",
+        "//compiler/util:ir_data",
         "//compiler/util:name_conversion",
         "//compiler/util:parser_types",
     ],
@@ -87,7 +87,7 @@
         ":module_ir",
         ":parser",
         ":tokenizer",
-        "//compiler/util:ir_pb2",
+        "//compiler/util:ir_data",
         "//compiler/util:test_util",
     ],
 )
@@ -140,7 +140,7 @@
         ":type_check",
         ":write_inference",
         "//compiler/util:error",
-        "//compiler/util:ir_pb2",
+        "//compiler/util:ir_data",
         "//compiler/util:parser_types",
         "//compiler/util:resources",
     ],
@@ -156,7 +156,7 @@
     deps = [
         ":glue",
         "//compiler/util:error",
-        "//compiler/util:ir_pb2",
+        "//compiler/util:ir_data",
         "//compiler/util:parser_types",
         "//compiler/util:test_util",
     ],
@@ -168,7 +168,7 @@
     visibility = ["//visibility:private"],
     deps = [
         "//compiler/util:expression_parser",
-        "//compiler/util:ir_pb2",
+        "//compiler/util:ir_data",
         "//compiler/util:traverse_ir",
     ],
 )
@@ -190,7 +190,7 @@
     visibility = ["//visibility:private"],
     deps = [
         "//compiler/util:error",
-        "//compiler/util:ir_pb2",
+        "//compiler/util:ir_data",
         "//compiler/util:ir_util",
         "//compiler/util:traverse_ir",
     ],
@@ -215,7 +215,7 @@
     deps = [
         ":attributes",
         ":expression_bounds",
-        "//compiler/util:ir_pb2",
+        "//compiler/util:ir_data",
         "//compiler/util:ir_util",
         "//compiler/util:traverse_ir",
     ],
@@ -228,7 +228,7 @@
     deps = [
         ":glue",
         ":write_inference",
-        "//compiler/util:ir_pb2",
+        "//compiler/util:ir_data",
         "//compiler/util:test_util",
     ],
 )
@@ -241,7 +241,7 @@
         ":type_check",
         "//compiler/util:attribute_util",
         "//compiler/util:error",
-        "//compiler/util:ir_pb2",
+        "//compiler/util:ir_data",
         "//compiler/util:ir_util",
         "//compiler/util:traverse_ir",
     ],
@@ -262,7 +262,7 @@
         ":attribute_checker",
         ":glue",
         "//compiler/util:error",
-        "//compiler/util:ir_pb2",
+        "//compiler/util:ir_data",
         "//compiler/util:ir_util",
         "//compiler/util:test_util",
     ],
@@ -274,7 +274,7 @@
     deps = [
         ":attributes",
         "//compiler/util:error",
-        "//compiler/util:ir_pb2",
+        "//compiler/util:ir_data",
         "//compiler/util:ir_util",
         "//compiler/util:traverse_ir",
     ],
@@ -300,7 +300,7 @@
     ],
     deps = [
         ":attributes",
-        "//compiler/util:ir_pb2",
+        "//compiler/util:ir_data",
         "//compiler/util:ir_util",
         "//compiler/util:traverse_ir",
     ],
@@ -326,7 +326,7 @@
     deps = [
         ":attributes",
         "//compiler/util:error",
-        "//compiler/util:ir_pb2",
+        "//compiler/util:ir_data",
         "//compiler/util:ir_util",
         "//compiler/util:resources",
         "//compiler/util:traverse_ir",
@@ -350,7 +350,7 @@
     srcs = ["dependency_checker.py"],
     deps = [
         "//compiler/util:error",
-        "//compiler/util:ir_pb2",
+        "//compiler/util:ir_data",
         "//compiler/util:ir_util",
         "//compiler/util:traverse_ir",
     ],
diff --git a/compiler/front_end/attribute_checker.py b/compiler/front_end/attribute_checker.py
index b621081..6735075 100644
--- a/compiler/front_end/attribute_checker.py
+++ b/compiler/front_end/attribute_checker.py
@@ -24,7 +24,7 @@
 from compiler.front_end import type_check
 from compiler.util import attribute_util
 from compiler.util import error
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 from compiler.util import ir_util
 from compiler.util import traverse_ir
 
@@ -105,18 +105,18 @@
 
 def _construct_integer_attribute(name, value, source_location):
   """Constructs an integer Attribute with the given name and value."""
-  attr_value = ir_pb2.AttributeValue(
-      expression=ir_pb2.Expression(
-          constant=ir_pb2.NumericConstant(value=str(value),
+  attr_value = ir_data.AttributeValue(
+      expression=ir_data.Expression(
+          constant=ir_data.NumericConstant(value=str(value),
                                           source_location=source_location),
-          type=ir_pb2.ExpressionType(
-              integer=ir_pb2.IntegerType(modular_value=str(value),
+          type=ir_data.ExpressionType(
+              integer=ir_data.IntegerType(modular_value=str(value),
                                          modulus="infinity",
                                          minimum_value=str(value),
                                          maximum_value=str(value))),
           source_location=source_location),
       source_location=source_location)
-  return ir_pb2.Attribute(name=ir_pb2.Word(text=name,
+  return ir_data.Attribute(name=ir_data.Word(text=name,
                                            source_location=source_location),
                           value=attr_value,
                           source_location=source_location)
@@ -124,14 +124,14 @@
 
 def _construct_boolean_attribute(name, value, source_location):
   """Constructs a boolean Attribute with the given name and value."""
-  attr_value = ir_pb2.AttributeValue(
-      expression=ir_pb2.Expression(
-          boolean_constant=ir_pb2.BooleanConstant(
+  attr_value = ir_data.AttributeValue(
+      expression=ir_data.Expression(
+          boolean_constant=ir_data.BooleanConstant(
               value=value, source_location=source_location),
-          type=ir_pb2.ExpressionType(boolean=ir_pb2.BooleanType(value=value)),
+          type=ir_data.ExpressionType(boolean=ir_data.BooleanType(value=value)),
           source_location=source_location),
       source_location=source_location)
-  return ir_pb2.Attribute(name=ir_pb2.Word(text=name,
+  return ir_data.Attribute(name=ir_data.Word(text=name,
                                            source_location=source_location),
                           value=attr_value,
                           source_location=source_location)
@@ -139,11 +139,11 @@
 
 def _construct_string_attribute(name, value, source_location):
   """Constructs a string Attribute with the given name and value."""
-  attr_value = ir_pb2.AttributeValue(
-      string_constant=ir_pb2.String(text=value,
+  attr_value = ir_data.AttributeValue(
+      string_constant=ir_data.String(text=value,
                                     source_location=source_location),
       source_location=source_location)
-  return ir_pb2.Attribute(name=ir_pb2.Word(text=name,
+  return ir_data.Attribute(name=ir_data.Word(text=name,
                                            source_location=source_location),
                           value=attr_value,
                           source_location=source_location)
@@ -218,7 +218,7 @@
       ir_util.get_base_type(field.type).atomic_type.reference.canonical_name,
       ir)
   assert field_type is not None
-  assert field_type.addressable_unit != ir_pb2.AddressableUnit.NONE
+  assert field_type.addressable_unit != ir_data.AddressableUnit.NONE
   return field_type.addressable_unit != type_definition.addressable_unit
 
 
@@ -279,9 +279,9 @@
   size = ir_util.get_integer_attribute(type_definition.attribute,
                                        attributes.ADDRESSABLE_UNIT_SIZE)
   if size == 1:
-    type_definition.addressable_unit = ir_pb2.AddressableUnit.BIT
+    type_definition.addressable_unit = ir_data.AddressableUnit.BIT
   elif size == 8:
-    type_definition.addressable_unit = ir_pb2.AddressableUnit.BYTE
+    type_definition.addressable_unit = ir_data.AddressableUnit.BYTE
   # If the addressable_unit_size is not in (1, 8), it will be caught by
   # _verify_addressable_unit_attribute_on_external, below.
 
@@ -400,25 +400,25 @@
 def _add_missing_attributes_on_ir(ir):
   """Adds missing attributes in a complete IR."""
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Module], _add_missing_back_ends_to_module)
+      ir, [ir_data.Module], _add_missing_back_ends_to_module)
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.External], _add_addressable_unit_to_external)
+      ir, [ir_data.External], _add_addressable_unit_to_external)
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Enum], _add_missing_width_and_sign_attributes_on_enum)
+      ir, [ir_data.Enum], _add_missing_width_and_sign_attributes_on_enum)
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Structure], _add_missing_size_attributes_on_structure,
+      ir, [ir_data.Structure], _add_missing_size_attributes_on_structure,
       incidental_actions={
-          ir_pb2.Module: attribute_util.gather_default_attributes,
-          ir_pb2.TypeDefinition: attribute_util.gather_default_attributes,
-          ir_pb2.Field: attribute_util.gather_default_attributes,
+          ir_data.Module: attribute_util.gather_default_attributes,
+          ir_data.TypeDefinition: attribute_util.gather_default_attributes,
+          ir_data.Field: attribute_util.gather_default_attributes,
       },
       parameters={"defaults": {}})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Field], _add_missing_byte_order_attribute_on_field,
+      ir, [ir_data.Field], _add_missing_byte_order_attribute_on_field,
       incidental_actions={
-          ir_pb2.Module: attribute_util.gather_default_attributes,
-          ir_pb2.TypeDefinition: attribute_util.gather_default_attributes,
-          ir_pb2.Field: attribute_util.gather_default_attributes,
+          ir_data.Module: attribute_util.gather_default_attributes,
+          ir_data.TypeDefinition: attribute_util.gather_default_attributes,
+          ir_data.Field: attribute_util.gather_default_attributes,
       },
       parameters={"defaults": {}})
   return []
@@ -454,22 +454,22 @@
   """Verifies attributes in a complete IR."""
   errors = []
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Attribute], _verify_back_end_attributes,
+      ir, [ir_data.Attribute], _verify_back_end_attributes,
       incidental_actions={
-          ir_pb2.Module: _gather_expected_back_ends,
+          ir_data.Module: _gather_expected_back_ends,
       },
       parameters={"errors": errors})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Structure], _verify_size_attributes_on_structure,
+      ir, [ir_data.Structure], _verify_size_attributes_on_structure,
       parameters={"errors": errors})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Enum], _verify_width_attribute_on_enum,
+      ir, [ir_data.Enum], _verify_width_attribute_on_enum,
       parameters={"errors": errors})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.External], _verify_addressable_unit_attribute_on_external,
+      ir, [ir_data.External], _verify_addressable_unit_attribute_on_external,
       parameters={"errors": errors})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Field], _verify_field_attributes,
+      ir, [ir_data.Field], _verify_field_attributes,
       parameters={"errors": errors})
   return errors
 
diff --git a/compiler/front_end/attribute_checker_test.py b/compiler/front_end/attribute_checker_test.py
index e54d277..4e7d8c7 100644
--- a/compiler/front_end/attribute_checker_test.py
+++ b/compiler/front_end/attribute_checker_test.py
@@ -18,7 +18,7 @@
 from compiler.front_end import attribute_checker
 from compiler.front_end import glue
 from compiler.util import error
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 from compiler.util import ir_util
 from compiler.util import test_util
 
@@ -550,14 +550,14 @@
     external_ir = _make_ir_from_emb("external Foo:\n"
                                     "  [addressable_unit_size: 1]\n")
     self.assertEqual([], attribute_checker.normalize_and_verify(external_ir))
-    self.assertEqual(ir_pb2.AddressableUnit.BIT,
+    self.assertEqual(ir_data.AddressableUnit.BIT,
                      external_ir.module[0].type[0].addressable_unit)
 
   def test_adds_byte_addressable_unit_to_external(self):
     external_ir = _make_ir_from_emb("external Foo:\n"
                                     "  [addressable_unit_size: 8]\n")
     self.assertEqual([], attribute_checker.normalize_and_verify(external_ir))
-    self.assertEqual(ir_pb2.AddressableUnit.BYTE,
+    self.assertEqual(ir_data.AddressableUnit.BYTE,
                      external_ir.module[0].type[0].addressable_unit)
 
   def test_rejects_requires_using_array(self):
diff --git a/compiler/front_end/constraints.py b/compiler/front_end/constraints.py
index 107b55e..aa62add 100644
--- a/compiler/front_end/constraints.py
+++ b/compiler/front_end/constraints.py
@@ -16,7 +16,7 @@
 
 from compiler.front_end import attributes
 from compiler.util import error
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 from compiler.util import ir_util
 from compiler.util import resources
 from compiler.util import traverse_ir
@@ -105,7 +105,7 @@
       return
     base_type_size = fixed_size
   if base_type_size % type_definition.addressable_unit != 0:
-    assert type_definition.addressable_unit == ir_pb2.AddressableUnit.BYTE
+    assert type_definition.addressable_unit == ir_data.AddressableUnit.BYTE
     errors.append([error.error(source_file_name,
                                type_ir.base_type.source_location,
                                "Array elements in structs must have sizes "
@@ -352,9 +352,9 @@
       type_ir.atomic_type.reference, ir)
   if (type_definition.addressable_unit %
       referenced_type_definition.addressable_unit != 0):
-    assert type_definition.addressable_unit == ir_pb2.AddressableUnit.BIT
+    assert type_definition.addressable_unit == ir_data.AddressableUnit.BIT
     assert (referenced_type_definition.addressable_unit ==
-            ir_pb2.AddressableUnit.BYTE)
+            ir_data.AddressableUnit.BYTE)
     errors.append([
         error.error(source_file_name, type_ir.source_location,
                     "Byte-oriented {} cannot be used in a bits field.".format(
@@ -365,7 +365,7 @@
 def _check_size_of_bits(type_ir, type_definition, source_file_name, errors):
   """Checks that `bits` types are fixed size, less than 64 bits."""
   del type_ir  # Unused
-  if type_definition.addressable_unit != ir_pb2.AddressableUnit.BIT:
+  if type_definition.addressable_unit != ir_data.AddressableUnit.BIT:
     return
   fixed_size = ir_util.get_integer_attribute(
       type_definition.attribute, attributes.FIXED_SIZE)
@@ -554,56 +554,56 @@
   Checks that only constant-size types are used in arrays.
 
   Arguments:
-    ir: An ir_pb2.EmbossIr object to check.
+    ir: An ir_data.EmbossIr object to check.
 
   Returns:
     A list of ConstraintViolations, or an empty list if there are none.
   """
   errors = []
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Structure, ir_pb2.Type], _check_allowed_in_bits,
+      ir, [ir_data.Structure, ir_data.Type], _check_allowed_in_bits,
       parameters={"errors": errors})
   traverse_ir.fast_traverse_ir_top_down(
-      # TODO(bolms): look for [ir_pb2.ArrayType], [ir_pb2.AtomicType], and
+      # TODO(bolms): look for [ir_data.ArrayType], [ir_data.AtomicType], and
       # simplify _check_that_array_base_types_are_fixed_size.
-      ir, [ir_pb2.ArrayType], _check_that_array_base_types_are_fixed_size,
+      ir, [ir_data.ArrayType], _check_that_array_base_types_are_fixed_size,
       parameters={"errors": errors})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Structure, ir_pb2.ArrayType],
+      ir, [ir_data.Structure, ir_data.ArrayType],
       _check_that_array_base_types_in_structs_are_multiples_of_bytes,
       parameters={"errors": errors})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.ArrayType, ir_pb2.ArrayType],
+      ir, [ir_data.ArrayType, ir_data.ArrayType],
       _check_that_inner_array_dimensions_are_constant,
       parameters={"errors": errors})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Structure], _check_size_of_bits,
+      ir, [ir_data.Structure], _check_size_of_bits,
       parameters={"errors": errors})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Structure, ir_pb2.Type], _check_type_requirements_for_field,
+      ir, [ir_data.Structure, ir_data.Type], _check_type_requirements_for_field,
       parameters={"errors": errors})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Field], _check_field_name_for_reserved_words,
+      ir, [ir_data.Field], _check_field_name_for_reserved_words,
       parameters={"errors": errors})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.EnumValue], _check_enum_name_for_reserved_words,
+      ir, [ir_data.EnumValue], _check_enum_name_for_reserved_words,
       parameters={"errors": errors})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.TypeDefinition], _check_type_name_for_reserved_words,
+      ir, [ir_data.TypeDefinition], _check_type_name_for_reserved_words,
       parameters={"errors": errors})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Expression], _check_constancy_of_constant_references,
+      ir, [ir_data.Expression], _check_constancy_of_constant_references,
       parameters={"errors": errors})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Enum], _check_that_enum_values_are_representable,
+      ir, [ir_data.Enum], _check_that_enum_values_are_representable,
       parameters={"errors": errors})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Expression], _check_bounds_on_runtime_integer_expressions,
-      incidental_actions={ir_pb2.Attribute: _attribute_in_attribute_action},
-      skip_descendants_of={ir_pb2.EnumValue, ir_pb2.Expression},
+      ir, [ir_data.Expression], _check_bounds_on_runtime_integer_expressions,
+      incidental_actions={ir_data.Attribute: _attribute_in_attribute_action},
+      skip_descendants_of={ir_data.EnumValue, ir_data.Expression},
       parameters={"errors": errors, "in_attribute": None})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.RuntimeParameter],
+      ir, [ir_data.RuntimeParameter],
       _check_type_requirements_for_parameter_type,
       parameters={"errors": errors})
   return errors
diff --git a/compiler/front_end/dependency_checker.py b/compiler/front_end/dependency_checker.py
index 538a679..963d8bf 100644
--- a/compiler/front_end/dependency_checker.py
+++ b/compiler/front_end/dependency_checker.py
@@ -15,7 +15,7 @@
 """Checks for dependency cycles in Emboss IR."""
 
 from compiler.util import error
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 from compiler.util import ir_util
 from compiler.util import traverse_ir
 
@@ -52,28 +52,28 @@
   dependencies = {}
   errors = []
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Reference], _add_reference_to_dependencies,
+      ir, [ir_data.Reference], _add_reference_to_dependencies,
       # TODO(bolms): Add handling for references inside of attributes, once
       # there are attributes with non-constant values.
       skip_descendants_of={
-          ir_pb2.AtomicType, ir_pb2.Attribute, ir_pb2.FieldReference
+          ir_data.AtomicType, ir_data.Attribute, ir_data.FieldReference
       },
       incidental_actions={
-          ir_pb2.Field: _add_name_to_dependencies,
-          ir_pb2.EnumValue: _add_name_to_dependencies,
-          ir_pb2.RuntimeParameter: _add_name_to_dependencies,
+          ir_data.Field: _add_name_to_dependencies,
+          ir_data.EnumValue: _add_name_to_dependencies,
+          ir_data.RuntimeParameter: _add_name_to_dependencies,
       },
       parameters={
           "dependencies": dependencies,
           "errors": errors,
       })
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.FieldReference], _add_field_reference_to_dependencies,
-      skip_descendants_of={ir_pb2.Attribute},
+      ir, [ir_data.FieldReference], _add_field_reference_to_dependencies,
+      skip_descendants_of={ir_data.Attribute},
       incidental_actions={
-          ir_pb2.Field: _add_name_to_dependencies,
-          ir_pb2.EnumValue: _add_name_to_dependencies,
-          ir_pb2.RuntimeParameter: _add_name_to_dependencies,
+          ir_data.Field: _add_name_to_dependencies,
+          ir_data.EnumValue: _add_name_to_dependencies,
+          ir_data.RuntimeParameter: _add_name_to_dependencies,
       },
       parameters={"dependencies": dependencies})
   return dependencies, errors
@@ -120,16 +120,16 @@
   # TODO(bolms): This duplicates work in _find_dependencies that could be
   # shared.
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.FieldReference], _add_field_reference_to_dependencies,
-      skip_descendants_of={ir_pb2.Attribute},
+      ir, [ir_data.FieldReference], _add_field_reference_to_dependencies,
+      skip_descendants_of={ir_data.Attribute},
       incidental_actions={
-          ir_pb2.Field: _add_name_to_dependencies,
-          ir_pb2.EnumValue: _add_name_to_dependencies,
-          ir_pb2.RuntimeParameter: _add_name_to_dependencies,
+          ir_data.Field: _add_name_to_dependencies,
+          ir_data.EnumValue: _add_name_to_dependencies,
+          ir_data.RuntimeParameter: _add_name_to_dependencies,
       },
       parameters={"dependencies": dependencies})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Structure],
+      ir, [ir_data.Structure],
       _find_dependency_ordering_for_fields_in_structure,
       parameters={"dependencies": dependencies})
 
diff --git a/compiler/front_end/expression_bounds.py b/compiler/front_end/expression_bounds.py
index 2f8d969..e9b423b 100644
--- a/compiler/front_end/expression_bounds.py
+++ b/compiler/front_end/expression_bounds.py
@@ -18,7 +18,7 @@
 import fractions
 import operator
 
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 from compiler.util import ir_util
 from compiler.util import traverse_ir
 
@@ -65,12 +65,12 @@
 def _compute_constant_value_of_constant_reference(expression, ir):
   referred_object = ir_util.find_object(
       expression.constant_reference.canonical_name, ir)
-  if isinstance(referred_object, ir_pb2.EnumValue):
+  if isinstance(referred_object, ir_data.EnumValue):
     compute_constraints_of_expression(referred_object.value, ir)
     assert ir_util.is_constant(referred_object.value)
     new_value = str(ir_util.constant_value(referred_object.value))
     expression.type.enumeration.value = new_value
-  elif isinstance(referred_object, ir_pb2.Field):
+  elif isinstance(referred_object, ir_data.Field):
     assert ir_util.field_is_virtual(referred_object), (
         "Non-virtual non-enum-value constant reference should have been caught "
         "in type_check.py")
@@ -85,22 +85,22 @@
   for arg in expression.function.args:
     compute_constraints_of_expression(arg, ir)
   op = expression.function.function
-  if op in (ir_pb2.FunctionMapping.ADDITION, ir_pb2.FunctionMapping.SUBTRACTION):
+  if op in (ir_data.FunctionMapping.ADDITION, ir_data.FunctionMapping.SUBTRACTION):
     _compute_constraints_of_additive_operator(expression)
-  elif op == ir_pb2.FunctionMapping.MULTIPLICATION:
+  elif op == ir_data.FunctionMapping.MULTIPLICATION:
     _compute_constraints_of_multiplicative_operator(expression)
-  elif op in (ir_pb2.FunctionMapping.EQUALITY, ir_pb2.FunctionMapping.INEQUALITY,
-              ir_pb2.FunctionMapping.LESS, ir_pb2.FunctionMapping.LESS_OR_EQUAL,
-              ir_pb2.FunctionMapping.GREATER, ir_pb2.FunctionMapping.GREATER_OR_EQUAL,
-              ir_pb2.FunctionMapping.AND, ir_pb2.FunctionMapping.OR):
+  elif op in (ir_data.FunctionMapping.EQUALITY, ir_data.FunctionMapping.INEQUALITY,
+              ir_data.FunctionMapping.LESS, ir_data.FunctionMapping.LESS_OR_EQUAL,
+              ir_data.FunctionMapping.GREATER, ir_data.FunctionMapping.GREATER_OR_EQUAL,
+              ir_data.FunctionMapping.AND, ir_data.FunctionMapping.OR):
     _compute_constant_value_of_comparison_operator(expression)
-  elif op == ir_pb2.FunctionMapping.CHOICE:
+  elif op == ir_data.FunctionMapping.CHOICE:
     _compute_constraints_of_choice_operator(expression)
-  elif op == ir_pb2.FunctionMapping.MAXIMUM:
+  elif op == ir_data.FunctionMapping.MAXIMUM:
     _compute_constraints_of_maximum_function(expression)
-  elif op == ir_pb2.FunctionMapping.PRESENCE:
+  elif op == ir_data.FunctionMapping.PRESENCE:
     _compute_constraints_of_existence_function(expression, ir)
-  elif op in (ir_pb2.FunctionMapping.UPPER_BOUND, ir_pb2.FunctionMapping.LOWER_BOUND):
+  elif op in (ir_data.FunctionMapping.UPPER_BOUND, ir_data.FunctionMapping.LOWER_BOUND):
     _compute_constraints_of_bound_function(expression)
   else:
     assert False, "Unknown operator {!r}".format(op)
@@ -118,7 +118,7 @@
   """Computes the constraints of a reference to a structure's field."""
   field_path = expression.field_reference.path[-1]
   field = ir_util.find_object(field_path, ir)
-  if isinstance(field, ir_pb2.Field) and ir_util.field_is_virtual(field):
+  if isinstance(field, ir_data.Field) and ir_util.field_is_virtual(field):
     # References to virtual fields should have the virtual field's constraints
     # copied over.
     compute_constraints_of_expression(field.read_transform, ir)
@@ -131,7 +131,7 @@
     expression.type.integer.modulus = "1"
     expression.type.integer.modular_value = "0"
     type_definition = ir_util.find_parent_object(field_path, ir)
-    if isinstance(field, ir_pb2.Field):
+    if isinstance(field, ir_data.Field):
       referrent_type = field.type
     else:
       referrent_type = field.physical_type_alias
@@ -317,8 +317,8 @@
 def _compute_constraints_of_additive_operator(expression):
   """Computes the modular value of an additive expression."""
   funcs = {
-      ir_pb2.FunctionMapping.ADDITION: _add,
-      ir_pb2.FunctionMapping.SUBTRACTION: _sub,
+      ir_data.FunctionMapping.ADDITION: _add,
+      ir_data.FunctionMapping.SUBTRACTION: _sub,
   }
   func = funcs[expression.function.function]
   args = expression.function.args
@@ -337,7 +337,7 @@
                                                 new_modulus)
   lmax = left.type.integer.maximum_value
   lmin = left.type.integer.minimum_value
-  if expression.function.function == ir_pb2.FunctionMapping.SUBTRACTION:
+  if expression.function.function == ir_data.FunctionMapping.SUBTRACTION:
     rmax = right.type.integer.minimum_value
     rmin = right.type.integer.maximum_value
   else:
@@ -502,14 +502,14 @@
   args = expression.function.args
   if all(ir_util.is_constant(arg) for arg in args):
     functions = {
-        ir_pb2.FunctionMapping.EQUALITY: operator.eq,
-        ir_pb2.FunctionMapping.INEQUALITY: operator.ne,
-        ir_pb2.FunctionMapping.LESS: operator.lt,
-        ir_pb2.FunctionMapping.LESS_OR_EQUAL: operator.le,
-        ir_pb2.FunctionMapping.GREATER: operator.gt,
-        ir_pb2.FunctionMapping.GREATER_OR_EQUAL: operator.ge,
-        ir_pb2.FunctionMapping.AND: operator.and_,
-        ir_pb2.FunctionMapping.OR: operator.or_,
+        ir_data.FunctionMapping.EQUALITY: operator.eq,
+        ir_data.FunctionMapping.INEQUALITY: operator.ne,
+        ir_data.FunctionMapping.LESS: operator.lt,
+        ir_data.FunctionMapping.LESS_OR_EQUAL: operator.le,
+        ir_data.FunctionMapping.GREATER: operator.gt,
+        ir_data.FunctionMapping.GREATER_OR_EQUAL: operator.ge,
+        ir_data.FunctionMapping.AND: operator.and_,
+        ir_data.FunctionMapping.OR: operator.or_,
     }
     func = functions[expression.function.function]
     expression.type.boolean.value = func(
@@ -518,9 +518,9 @@
 
 def _compute_constraints_of_bound_function(expression):
   """Computes the constraints of $upper_bound or $lower_bound."""
-  if expression.function.function == ir_pb2.FunctionMapping.UPPER_BOUND:
+  if expression.function.function == ir_data.FunctionMapping.UPPER_BOUND:
     value = expression.function.args[0].type.integer.maximum_value
-  elif expression.function.function == ir_pb2.FunctionMapping.LOWER_BOUND:
+  elif expression.function.function == ir_data.FunctionMapping.LOWER_BOUND:
     value = expression.function.args[0].type.integer.minimum_value
   else:
     assert False, "Non-bound function"
@@ -716,9 +716,9 @@
       A (possibly empty) list of errors.
   """
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Expression], compute_constraints_of_expression,
-      skip_descendants_of={ir_pb2.Expression})
+      ir, [ir_data.Expression], compute_constraints_of_expression,
+      skip_descendants_of={ir_data.Expression})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.RuntimeParameter], _compute_constraints_of_parameter,
-      skip_descendants_of={ir_pb2.Expression})
+      ir, [ir_data.RuntimeParameter], _compute_constraints_of_parameter,
+      skip_descendants_of={ir_data.Expression})
   return []
diff --git a/compiler/front_end/glue.py b/compiler/front_end/glue.py
index 0dceaf9..a1b0706 100644
--- a/compiler/front_end/glue.py
+++ b/compiler/front_end/glue.py
@@ -33,7 +33,7 @@
 from compiler.front_end import type_check
 from compiler.front_end import write_inference
 from compiler.util import error
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 from compiler.util import parser_types
 from compiler.util import resources
 
@@ -142,7 +142,7 @@
   # need to re-parse the prelude for every test .emb.
   if (source_code, file_name) in _cached_modules:
     debug_info = _cached_modules[source_code, file_name]
-    ir = ir_pb2.Module()
+    ir = ir_data.Module()
     ir.CopyFrom(debug_info.ir)
   else:
     debug_info = ModuleDebugInfo(file_name)
@@ -162,7 +162,7 @@
     ir = module_ir.build_ir(parse_result.parse_tree, used_productions)
     ir.source_text = source_code
     debug_info.used_productions = used_productions
-    debug_info.ir = ir_pb2.Module()
+    debug_info.ir = ir_data.Module()
     debug_info.ir.CopyFrom(ir)
     _cached_modules[source_code, file_name] = debug_info
   ir.source_file_name = file_name
@@ -256,7 +256,7 @@
   file_queue = [file_name]
   files = {file_name}
   debug_info = DebugInfo()
-  ir = ir_pb2.EmbossIr(module=[])
+  ir = ir_data.EmbossIr(module=[])
   while file_queue:
     file_to_parse = file_queue[0]
     del file_queue[0]
diff --git a/compiler/front_end/glue_test.py b/compiler/front_end/glue_test.py
index 5c7308b..a2b61ad 100644
--- a/compiler/front_end/glue_test.py
+++ b/compiler/front_end/glue_test.py
@@ -19,7 +19,7 @@
 
 from compiler.front_end import glue
 from compiler.util import error
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 from compiler.util import parser_types
 from compiler.util import test_util
 
@@ -33,7 +33,7 @@
     _ROOT_PACKAGE, _SPAN_SE_LOG_FILE_PATH).decode(encoding="UTF-8")
 _SPAN_SE_LOG_FILE_READER = test_util.dict_file_reader(
     {_SPAN_SE_LOG_FILE_PATH: _SPAN_SE_LOG_FILE_EMB})
-_SPAN_SE_LOG_FILE_IR = ir_pb2.Module.from_json(
+_SPAN_SE_LOG_FILE_IR = ir_data.Module.from_json(
     pkgutil.get_data(
         _ROOT_PACKAGE,
         _GOLDEN_PATH + "span_se_log_file_status.ir.txt"
@@ -140,7 +140,7 @@
     self.assertFalse(ir)
 
   def test_ir_from_parse_module(self):
-    log_file_path_ir = ir_pb2.Module()
+    log_file_path_ir = ir_data.Module()
     log_file_path_ir.CopyFrom(_SPAN_SE_LOG_FILE_IR)
     log_file_path_ir.source_file_name = _SPAN_SE_LOG_FILE_PATH
     self.assertEqual(log_file_path_ir, glue.parse_module(
diff --git a/compiler/front_end/module_ir.py b/compiler/front_end/module_ir.py
index 52db843..ed27a1e 100644
--- a/compiler/front_end/module_ir.py
+++ b/compiler/front_end/module_ir.py
@@ -25,7 +25,7 @@
 import re
 import sys
 
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 from compiler.util import name_conversion
 from compiler.util import parser_types
 
@@ -38,7 +38,7 @@
   def __init__(self, l):
     assert isinstance(l, list), "_List object must wrap list, not '%r'" % l
     self.list = l
-    self.source_location = ir_pb2.Location()
+    self.source_location = ir_data.Location()
 
 
 class _ExpressionTail(object):
@@ -53,7 +53,7 @@
   Expressions.
 
   Attributes:
-    operator: An ir_pb2.Word of the operator's name.
+    operator: An ir_data.Word of the operator's name.
     expression: The expression on the right side of the operator.
     source_location: The source location of the operation fragment.
   """
@@ -62,7 +62,7 @@
   def __init__(self, operator, expression):
     self.operator = operator
     self.expression = expression
-    self.source_location = ir_pb2.Location()
+    self.source_location = ir_data.Location()
 
 
 class _FieldWithType(object):
@@ -72,7 +72,7 @@
   def __init__(self, field, subtypes=None):
     self.field = field
     self.subtypes = subtypes or []
-    self.source_location = ir_pb2.Location()
+    self.source_location = ir_data.Location()
 
 
 def build_ir(parse_tree, used_productions=None):
@@ -181,28 +181,28 @@
 
 
 def _make_prelude_import(position):
-  """Helper function to construct a synthetic ir_pb2.Import for the prelude."""
+  """Helper function to construct a synthetic ir_data.Import for the prelude."""
   location = parser_types.make_location(position, position)
-  return ir_pb2.Import(
-      file_name=ir_pb2.String(text='', source_location=location),
-      local_name=ir_pb2.Word(text='', source_location=location),
+  return ir_data.Import(
+      file_name=ir_data.String(text='', source_location=location),
+      local_name=ir_data.Word(text='', source_location=location),
       source_location=location)
 
 
 def _text_to_operator(text):
   """Converts an operator's textual name to its corresponding enum."""
   operations = {
-      '+': ir_pb2.FunctionMapping.ADDITION,
-      '-': ir_pb2.FunctionMapping.SUBTRACTION,
-      '*': ir_pb2.FunctionMapping.MULTIPLICATION,
-      '==': ir_pb2.FunctionMapping.EQUALITY,
-      '!=': ir_pb2.FunctionMapping.INEQUALITY,
-      '&&': ir_pb2.FunctionMapping.AND,
-      '||': ir_pb2.FunctionMapping.OR,
-      '>': ir_pb2.FunctionMapping.GREATER,
-      '>=': ir_pb2.FunctionMapping.GREATER_OR_EQUAL,
-      '<': ir_pb2.FunctionMapping.LESS,
-      '<=': ir_pb2.FunctionMapping.LESS_OR_EQUAL,
+      '+': ir_data.FunctionMapping.ADDITION,
+      '-': ir_data.FunctionMapping.SUBTRACTION,
+      '*': ir_data.FunctionMapping.MULTIPLICATION,
+      '==': ir_data.FunctionMapping.EQUALITY,
+      '!=': ir_data.FunctionMapping.INEQUALITY,
+      '&&': ir_data.FunctionMapping.AND,
+      '||': ir_data.FunctionMapping.OR,
+      '>': ir_data.FunctionMapping.GREATER,
+      '>=': ir_data.FunctionMapping.GREATER_OR_EQUAL,
+      '<': ir_data.FunctionMapping.LESS,
+      '<=': ir_data.FunctionMapping.LESS_OR_EQUAL,
   }
   return operations[text]
 
@@ -210,10 +210,10 @@
 def _text_to_function(text):
   """Converts a function's textual name to its corresponding enum."""
   functions = {
-      '$max': ir_pb2.FunctionMapping.MAXIMUM,
-      '$present': ir_pb2.FunctionMapping.PRESENCE,
-      '$upper_bound': ir_pb2.FunctionMapping.UPPER_BOUND,
-      '$lower_bound': ir_pb2.FunctionMapping.LOWER_BOUND,
+      '$max': ir_data.FunctionMapping.MAXIMUM,
+      '$present': ir_data.FunctionMapping.PRESENCE,
+      '$upper_bound': ir_data.FunctionMapping.UPPER_BOUND,
+      '$lower_bound': ir_data.FunctionMapping.LOWER_BOUND,
   }
   return functions[text]
 
@@ -267,7 +267,7 @@
   else:
     module_source_location = None
 
-  return ir_pb2.Module(
+  return ir_data.Module(
       documentation=docs.list,
       foreign_import=[_make_prelude_import(position)] + imports.list,
       attribute=attributes.list,
@@ -279,7 +279,7 @@
           '    "import" string-constant "as" snake-word Comment? eol')
 def _import(import_, file_name, as_, local_name, comment, eol):
   del import_, as_, comment, eol  # Unused
-  return ir_pb2.Import(file_name=file_name, local_name=local_name)
+  return ir_data.Import(file_name=file_name, local_name=local_name)
 
 
 @_handles('doc-line -> doc Comment? eol')
@@ -299,7 +299,7 @@
   assert doc_text[0:3] == '-- ', (
       "Documentation token '{}' in unknown format.".format(
           documentation.text))
-  return ir_pb2.Documentation(text=doc_text[3:])
+  return ir_data.Documentation(text=doc_text[3:])
 
 
 # A attribute-line is just a attribute on its own line.
@@ -316,12 +316,12 @@
                attribute_value, close_bracket):
   del open_bracket, colon, close_bracket  # Unused.
   if context_specifier.list:
-    return ir_pb2.Attribute(name=name,
+    return ir_data.Attribute(name=name,
                             value=attribute_value,
                             is_default=bool(default_specifier.list),
                             back_end=context_specifier.list[0])
   else:
-    return ir_pb2.Attribute(name=name,
+    return ir_data.Attribute(name=name,
                             value=attribute_value,
                             is_default=bool(default_specifier.list))
 
@@ -334,28 +334,28 @@
 
 @_handles('attribute-value -> expression')
 def _attribute_value_expression(expression):
-  return ir_pb2.AttributeValue(expression=expression)
+  return ir_data.AttributeValue(expression=expression)
 
 
 @_handles('attribute-value -> string-constant')
 def _attribute_value_string(string):
-  return ir_pb2.AttributeValue(string_constant=string)
+  return ir_data.AttributeValue(string_constant=string)
 
 
 @_handles('boolean-constant -> BooleanConstant')
 def _boolean_constant(boolean):
-  return ir_pb2.BooleanConstant(value=(boolean.text == 'true'))
+  return ir_data.BooleanConstant(value=(boolean.text == 'true'))
 
 
 @_handles('string-constant -> String')
 def _string_constant(string):
-  """Turns a String token into an ir_pb2.String, with proper unescaping.
+  """Turns a String token into an ir_data.String, with proper unescaping.
 
   Arguments:
     string: A String token.
 
   Returns:
-    An ir_pb2.String with the "text" field set to the unescaped value of
+    An ir_data.String with the "text" field set to the unescaped value of
     string.text.
   """
   # TODO(bolms): If/when this logic becomes more complex (e.g., to handle \NNN
@@ -370,7 +370,7 @@
       result.append({'\\': '\\', '"': '"', 'n': '\n'}[substring[1]])
     else:
       result.append(substring)
-  return ir_pb2.String(text=''.join(result))
+  return ir_data.String(text=''.join(result))
 
 
 # In Emboss, '&&' and '||' may not be mixed without parentheses.  These are all
@@ -437,10 +437,10 @@
       question.source_location.start, colon.source_location.end)
   # The function_name is a bit weird, but should suffice for any error messages
   # that might need it.
-  return ir_pb2.Expression(
-      function=ir_pb2.Function(function=ir_pb2.FunctionMapping.CHOICE,
+  return ir_data.Expression(
+      function=ir_data.Function(function=ir_data.FunctionMapping.CHOICE,
                                args=[condition, if_true, if_false],
-                               function_name=ir_pb2.Word(
+                               function_name=ir_data.Word(
                                    text='?:',
                                    source_location=operator_location),
                                source_location=location))
@@ -456,8 +456,8 @@
 def _comparative_expression(left, operator, right):
   location = parser_types.make_location(
       left.source_location.start, right.source_location.end)
-  return ir_pb2.Expression(
-      function=ir_pb2.Function(function=_text_to_operator(operator.text),
+  return ir_data.Expression(
+      function=ir_data.Function(function=_text_to_operator(operator.text),
                                args=[left, right],
                                function_name=operator,
                                source_location=location))
@@ -487,21 +487,21 @@
   productions handles a different precedence level, but are identical in form.
 
   Arguments:
-    expression: An ir_pb2.Expression which is the head of the (expr, operator,
+    expression: An ir_data.Expression which is the head of the (expr, operator,
         expr, operator, expr, ...) list.
     expression_right: A list of _ExpressionTails corresponding to the (operator,
         expr, operator, expr, ...) list that comes after expression.
 
   Returns:
-    An ir_pb2.Expression with the correct recursive structure to represent a
+    An ir_data.Expression with the correct recursive structure to represent a
     list of left-associative operations.
   """
   e = expression
   for right in expression_right.list:
     location = parser_types.make_location(
         e.source_location.start, right.source_location.end)
-    e = ir_pb2.Expression(
-        function=ir_pb2.Function(
+    e = ir_data.Expression(
+        function=ir_data.Function(
             function=_text_to_operator(right.operator.text),
             args=[e, right.expression],
             function_name=right.operator,
@@ -549,13 +549,13 @@
   not allowed.
 
   Arguments:
-    expression: An ir_pb2.Expression which is the head of the (expr, operator,
+    expression: An ir_data.Expression which is the head of the (expr, operator,
         expr, operator, expr, ...) list.
     expression_right: A list of _ExpressionTails corresponding to the (operator,
         expr, operator, expr, ...) list that comes after expression.
 
   Returns:
-    An ir_pb2.Expression with the correct recursive structure to represent a
+    An ir_data.Expression with the correct recursive structure to represent a
     chain of left-associative comparison operations.
   """
   sequence = [expression]
@@ -567,8 +567,8 @@
     left, operator, right = sequence[i:i+3]
     location = parser_types.make_location(
         left.source_location.start, right.source_location.end)
-    comparisons.append(ir_pb2.Expression(
-        function=ir_pb2.Function(
+    comparisons.append(ir_data.Expression(
+        function=ir_data.Function(
             function=_text_to_operator(operator.text),
             args=[left, right],
             function_name=operator,
@@ -578,11 +578,11 @@
   for comparison in comparisons[1:]:
     location = parser_types.make_location(
         e.source_location.start, comparison.source_location.end)
-    e = ir_pb2.Expression(
-        function=ir_pb2.Function(
-            function=ir_pb2.FunctionMapping.AND,
+    e = ir_data.Expression(
+        function=ir_data.Function(
+            function=ir_data.FunctionMapping.AND,
             args=[e, comparison],
-            function_name=ir_pb2.Word(
+            function_name=ir_data.Word(
                 text='&&',
                 source_location=comparison.function.args[0].source_location),
             source_location=location),
@@ -659,18 +659,18 @@
 # allowed, but "+-5" or "-+-something" are not.
 @_handles('negation-expression -> additive-operator bottom-expression')
 def _negation_expression_with_operator(operator, expression):
-  phantom_zero_location = ir_pb2.Location(start=operator.source_location.start,
+  phantom_zero_location = ir_data.Location(start=operator.source_location.start,
                                           end=operator.source_location.start)
-  return ir_pb2.Expression(
-      function=ir_pb2.Function(
+  return ir_data.Expression(
+      function=ir_data.Function(
           function=_text_to_operator(operator.text),
-          args=[ir_pb2.Expression(
-              constant=ir_pb2.NumericConstant(
+          args=[ir_data.Expression(
+              constant=ir_data.NumericConstant(
                   value='0',
                   source_location=phantom_zero_location),
               source_location=phantom_zero_location), expression],
           function_name=operator,
-          source_location=ir_pb2.Location(
+          source_location=ir_data.Location(
               start=operator.source_location.start,
               end=expression.source_location.end)))
 
@@ -689,12 +689,12 @@
 @_handles('bottom-expression -> function-name "(" argument-list ")"')
 def _bottom_expression_function(function, open_paren, arguments, close_paren):
   del open_paren  # Unused.
-  return ir_pb2.Expression(
-      function=ir_pb2.Function(
+  return ir_data.Expression(
+      function=ir_data.Function(
           function=_text_to_function(function.text),
           args=arguments.list,
           function_name=function,
-          source_location=ir_pb2.Location(
+          source_location=ir_data.Location(
               start=function.source_location.start,
               end=close_paren.source_location.end)))
 
@@ -718,22 +718,22 @@
 
 @_handles('bottom-expression -> numeric-constant')
 def _bottom_expression_from_numeric_constant(constant):
-  return ir_pb2.Expression(constant=constant)
+  return ir_data.Expression(constant=constant)
 
 
 @_handles('bottom-expression -> constant-reference')
 def _bottom_expression_from_constant_reference(reference):
-  return ir_pb2.Expression(constant_reference=reference)
+  return ir_data.Expression(constant_reference=reference)
 
 
 @_handles('bottom-expression -> builtin-reference')
 def _bottom_expression_from_builtin(reference):
-  return ir_pb2.Expression(builtin_reference=reference)
+  return ir_data.Expression(builtin_reference=reference)
 
 
 @_handles('bottom-expression -> boolean-constant')
 def _bottom_expression_from_boolean_constant(boolean):
-  return ir_pb2.Expression(boolean_constant=boolean)
+  return ir_data.Expression(boolean_constant=boolean)
 
 
 @_handles('bottom-expression -> field-reference')
@@ -747,7 +747,7 @@
     end_location = field_references.source_location.end
   else:
     end_location = field_reference.source_location.end
-  return ir_pb2.Expression(field_reference=ir_pb2.FieldReference(
+  return ir_data.Expression(field_reference=ir_data.FieldReference(
       path=[field_reference] + field_references.list,
       source_location=parser_types.make_location(
           field_reference.source_location.start, end_location)))
@@ -771,7 +771,7 @@
     n = int(number.text.replace('_', '')[2:], 16)
   else:
     n = int(number.text.replace('_', ''), 10)
-  return ir_pb2.NumericConstant(value=str(n))
+  return ir_data.NumericConstant(value=str(n))
 
 
 @_handles('type-definition -> struct')
@@ -813,7 +813,7 @@
 @_handles('parameter-definition -> snake-name ":" type')
 def _parameter_definition(name, double_colon, parameter_type):
   del double_colon  # Unused
-  return ir_pb2.RuntimeParameter(name=name, physical_type_alias=parameter_type)
+  return ir_data.RuntimeParameter(name=name, physical_type_alias=parameter_type)
 
 
 @_handles('parameter-definition-list-tail -> "," parameter-definition')
@@ -840,13 +840,13 @@
 def _struct_body(indent, docs, attributes, types, fields, dedent):
   del indent, dedent  # Unused.
   return _structure_body(docs, attributes, types, fields,
-                         ir_pb2.AddressableUnit.BYTE)
+                         ir_data.AddressableUnit.BYTE)
 
 
 def _structure_body(docs, attributes, types, fields, addressable_unit):
   """Constructs the body of a structure (bits or struct) definition."""
-  return ir_pb2.TypeDefinition(
-      structure=ir_pb2.Structure(field=[field.field for field in fields.list]),
+  return ir_data.TypeDefinition(
+      structure=ir_data.Structure(field=[field.field for field in fields.list]),
       documentation=docs.list,
       attribute=attributes.list,
       subtype=types.list + [subtype for field in fields.list for subtype in
@@ -941,7 +941,7 @@
 def _bits_body(indent, docs, attributes, types, fields, dedent):
   del indent, dedent  # Unused.
   return _structure_body(docs, attributes, types, fields,
-                         ir_pb2.AddressableUnit.BIT)
+                         ir_data.AddressableUnit.BIT)
 
 
 # Inline bits (defined as part of a field) are more restricted than standalone
@@ -951,7 +951,7 @@
 def _anonymous_bits_body(indent, attributes, fields, dedent):
   del indent, dedent  # Unused.
   return _structure_body(_List([]), attributes, _List([]), fields,
-                         ir_pb2.AddressableUnit.BIT)
+                         ir_data.AddressableUnit.BIT)
 
 
 # A field is:
@@ -965,9 +965,9 @@
           '    Comment? eol field-body?')
 def _field(location, field_type, name, abbreviation, attributes, doc, comment,
            newline, field_body):
-  """Constructs an ir_pb2.Field from the given components."""
+  """Constructs an ir_data.Field from the given components."""
   del comment  # Unused
-  field = ir_pb2.Field(location=location,
+  field = ir_data.Field(location=location,
                        type=field_type,
                        name=name,
                        attribute=attributes.list,
@@ -994,9 +994,9 @@
 @_handles('virtual-field ->'
           '    "let" snake-name "=" expression Comment? eol field-body?')
 def _virtual_field(let, name, equals, value, comment, newline, field_body):
-  """Constructs an ir_pb2.Field from the given components."""
+  """Constructs an ir_data.Field from the given components."""
   del equals, comment  # Unused
-  field = ir_pb2.Field(read_transform=value, name=name)
+  field = ir_data.Field(read_transform=value, name=name)
   if field_body.list:
     field.attribute.extend(field_body.list[0].attribute)
     field.documentation.extend(field_body.list[0].documentation)
@@ -1021,7 +1021,7 @@
           '    enum-body')
 def _inline_enum_field(location, enum, name, abbreviation, colon, comment,
                        newline, enum_body):
-  """Constructs an ir_pb2.Field for an inline enum field."""
+  """Constructs an ir_data.Field for an inline enum field."""
   del enum, colon, comment, newline  # Unused.
   return _inline_type_field(location, name, abbreviation, enum_body)
 
@@ -1047,7 +1047,7 @@
 
 def _inline_type_field(location, name, abbreviation, body):
   """Shared implementation of _inline_enum_field and _anonymous_bit_field."""
-  field = ir_pb2.Field(location=location,
+  field = ir_data.Field(location=location,
                        name=name,
                        attribute=body.attribute,
                        documentation=body.documentation)
@@ -1055,7 +1055,7 @@
   # the user wants to use type attributes, they should create a separate type
   # definition and reference it.
   del body.attribute[:]
-  type_name = ir_pb2.NameDefinition()
+  type_name = ir_data.NameDefinition()
   type_name.CopyFrom(name)
   type_name.name.text = name_conversion.snake_to_camel(type_name.name.text)
   field.type.atomic_type.reference.source_name.extend([type_name.name])
@@ -1084,10 +1084,10 @@
           '    field-location "bits" ":" Comment? eol anonymous-bits-body')
 def _anonymous_bit_field(location, bits_keyword, colon, comment, newline,
                          bits_body):
-  """Constructs an ir_pb2.Field for an anonymous bit field."""
+  """Constructs an ir_data.Field for an anonymous bit field."""
   del colon, comment, newline  # Unused.
-  name = ir_pb2.NameDefinition(
-      name=ir_pb2.Word(
+  name = ir_data.NameDefinition(
+      name=ir_data.Word(
           text=_get_anonymous_field_name(),
           source_location=bits_keyword.source_location),
       source_location=bits_keyword.source_location,
@@ -1098,7 +1098,7 @@
 @_handles('field-body -> Indent doc-line* attribute-line* Dedent')
 def _field_body(indent, docs, attributes, dedent):
   del indent, dedent  # Unused.
-  return ir_pb2.Field(documentation=docs.list, attribute=attributes.list)
+  return ir_data.Field(documentation=docs.list, attribute=attributes.list)
 
 
 # A parenthetically-denoted abbreviation.
@@ -1127,11 +1127,11 @@
 @_handles('enum-body -> Indent doc-line* attribute-line* enum-value+ Dedent')
 def _enum_body(indent, docs, attributes, values, dedent):
   del indent, dedent  # Unused.
-  return ir_pb2.TypeDefinition(
-      enumeration=ir_pb2.Enum(value=values.list),
+  return ir_data.TypeDefinition(
+      enumeration=ir_data.Enum(value=values.list),
       documentation=docs.list,
       attribute=attributes.list,
-      addressable_unit=ir_pb2.AddressableUnit.BIT)
+      addressable_unit=ir_data.AddressableUnit.BIT)
 
 
 # name = value
@@ -1140,7 +1140,7 @@
 def _enum_value(name, equals, expression, attribute, documentation, comment, newline,
                 body):
   del equals, comment, newline  # Unused.
-  result = ir_pb2.EnumValue(name=name,
+  result = ir_data.EnumValue(name=name,
                             value=expression,
                             documentation=documentation.list,
                             attribute=attribute.list)
@@ -1153,7 +1153,7 @@
 @_handles('enum-value-body -> Indent doc-line* attribute-line* Dedent')
 def _enum_value_body(indent, docs, attributes, dedent):
   del indent, dedent  # Unused.
-  return ir_pb2.EnumValue(documentation=docs.list, attribute=attributes.list)
+  return ir_data.EnumValue(documentation=docs.list, attribute=attributes.list)
 
 
 # An external is just a declaration that a type exists and has certain
@@ -1170,10 +1170,10 @@
 # line, or it won't parse (because no Indent/Dedent tokens will be emitted).
 @_handles('external-body -> Indent doc-line* attribute-line* Dedent')
 def _external_body(indent, docs, attributes, dedent):
-  return ir_pb2.TypeDefinition(
-      external=ir_pb2.External(
+  return ir_data.TypeDefinition(
+      external=ir_data.External(
           # Set source_location here, since it won't be set automatically.
-          source_location=ir_pb2.Location(start=indent.source_location.start,
+          source_location=ir_data.Location(start=indent.source_location.start,
                                           end=dedent.source_location.end)),
       documentation=docs.list,
       attribute=attributes.list)
@@ -1182,7 +1182,7 @@
 @_handles('field-location -> expression "[" "+" expression "]"')
 def _field_location(start, open_bracket, plus, size, close_bracket):
   del open_bracket, plus, close_bracket  # Unused.
-  return ir_pb2.FieldLocation(start=start, size=size)
+  return ir_data.FieldLocation(start=start, size=size)
 
 
 @_handles('delimited-argument-list -> "(" argument-list ")"')
@@ -1212,8 +1212,8 @@
   atomic_type_location = parser_types.make_location(
       reference.source_location.start,
       atomic_type_source_location_end)
-  t = ir_pb2.Type(
-      atomic_type=ir_pb2.AtomicType(
+  t = ir_data.Type(
+      atomic_type=ir_data.AtomicType(
           reference=reference,
           source_location=atomic_type_location,
           runtime_parameter=parameters.list[0].list if parameters.list else []),
@@ -1222,15 +1222,15 @@
   for length in array_spec.list:
     location = parser_types.make_location(
         t.source_location.start, length.source_location.end)
-    if isinstance(length, ir_pb2.Expression):
-      t = ir_pb2.Type(
-          array_type=ir_pb2.ArrayType(base_type=t,
+    if isinstance(length, ir_data.Expression):
+      t = ir_data.Type(
+          array_type=ir_data.ArrayType(base_type=t,
                                       element_count=length,
                                       source_location=location),
           source_location=location)
-    elif isinstance(length, ir_pb2.Empty):
-      t = ir_pb2.Type(
-          array_type=ir_pb2.ArrayType(base_type=t,
+    elif isinstance(length, ir_data.Empty):
+      t = ir_data.Type(
+          array_type=ir_data.ArrayType(base_type=t,
                                       automatic=length,
                                       source_location=location),
           source_location=location)
@@ -1245,7 +1245,7 @@
 def _type_size_specifier(colon, numeric_constant):
   """handles the ":32" part of a type specifier like "UInt:32"."""
   del colon
-  return ir_pb2.Expression(constant=numeric_constant)
+  return ir_data.Expression(constant=numeric_constant)
 
 
 # The distinctions between different formats of NameDefinitions, Words, and
@@ -1254,7 +1254,7 @@
 @_handles('snake-name -> snake-word')
 @_handles('constant-name -> constant-word')
 def _name(word):
-  return ir_pb2.NameDefinition(name=word)
+  return ir_data.NameDefinition(name=word)
 
 
 @_handles('type-word -> CamelWord')
@@ -1285,7 +1285,7 @@
 @_handles('function-name -> "$upper_bound"')
 @_handles('function-name -> "$lower_bound"')
 def _word(word):
-  return ir_pb2.Word(text=word.text)
+  return ir_data.Word(text=word.text)
 
 
 @_handles('type-reference -> type-reference-tail')
@@ -1299,13 +1299,13 @@
 @_handles('snake-reference -> snake-word')
 @_handles('snake-reference -> builtin-field-word')
 def _reference(word):
-  return ir_pb2.Reference(source_name=[word])
+  return ir_data.Reference(source_name=[word])
 
 
 @_handles('builtin-reference -> builtin-word')
 def _builtin_reference(word):
-  return ir_pb2.Reference(source_name=[word],
-                          canonical_name=ir_pb2.CanonicalName(
+  return ir_data.Reference(source_name=[word],
+                          canonical_name=ir_data.CanonicalName(
                               object_path=[word.text]))
 
 
@@ -1357,8 +1357,8 @@
 def _auto_array_length_specifier(open_bracket, close_bracket):
   # Note that the Void's source_location is the space between the brackets (if
   # any).
-  return ir_pb2.Empty(
-      source_location=ir_pb2.Location(start=open_bracket.source_location.end,
+  return ir_data.Empty(
+      source_location=ir_data.Location(start=open_bracket.source_location.end,
                                       end=close_bracket.source_location.start))
 
 
diff --git a/compiler/front_end/module_ir_test.py b/compiler/front_end/module_ir_test.py
index b2ad69b..1f4233d 100644
--- a/compiler/front_end/module_ir_test.py
+++ b/compiler/front_end/module_ir_test.py
@@ -23,7 +23,7 @@
 from compiler.front_end import module_ir
 from compiler.front_end import parser
 from compiler.front_end import tokenizer
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 from compiler.util import test_util
 
 _TESTDATA_PATH = "testdata.golden"
@@ -31,7 +31,7 @@
         _TESTDATA_PATH, "span_se_log_file_status.emb").decode(encoding="UTF-8")
 _MINIMAL_SAMPLE = parser.parse_module(
     tokenizer.tokenize(_MINIMAL_SOURCE, "")[0]).parse_tree
-_MINIMAL_SAMPLE_IR = ir_pb2.Module.from_json(
+_MINIMAL_SAMPLE_IR = ir_data.Module.from_json(
     pkgutil.get_data(_TESTDATA_PATH, "span_se_log_file_status.ir.txt").decode(
         encoding="UTF-8")
 )
@@ -3978,7 +3978,7 @@
     name, emb, ir_text = case.split("---")
     name = name.strip()
     try:
-      ir = ir_pb2.Module.from_json(ir_text)
+      ir = ir_data.Module.from_json(ir_text)
     except Exception:
       print(name)
       raise
@@ -4110,7 +4110,7 @@
           errors.extend(
               _check_all_source_locations(i, item_path, child_start, child_end))
     else:
-      if issubclass(spec.type, ir_pb2.Message):
+      if issubclass(spec.type, ir_data.Message):
         errors.extend(_check_all_source_locations(getattr(proto, name),
                                                   field_path, child_start,
                                                   child_end))
diff --git a/compiler/front_end/symbol_resolver.py b/compiler/front_end/symbol_resolver.py
index f4fb581..6f7c030 100644
--- a/compiler/front_end/symbol_resolver.py
+++ b/compiler/front_end/symbol_resolver.py
@@ -21,7 +21,7 @@
 import collections
 
 from compiler.util import error
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 from compiler.util import ir_util
 from compiler.util import traverse_ir
 
@@ -66,7 +66,7 @@
 
 def _nested_name(canonical_name, name):
   """Creates a new CanonicalName with name appended to the object_path."""
-  return ir_pb2.CanonicalName(
+  return ir_data.CanonicalName(
       module_file=canonical_name.module_file,
       object_path=list(canonical_name.object_path) + [name])
 
@@ -84,7 +84,7 @@
   Attributes:
     canonical_name: The absolute name of this symbol; e.g. ("file.emb",
       "TypeName", "SubTypeName", "field_name")
-    source_location: The ir_pb2.SourceLocation where this symbol is defined.
+    source_location: The ir_data.SourceLocation where this symbol is defined.
     visibility: LOCAL, PRIVATE, or SEARCHABLE; see below.
     alias: If set, this name is merely a pointer to another name.
   """
@@ -146,9 +146,9 @@
     _add_name_to_scope(field.abbreviation, scope, new_scope.canonical_name,
                        _Scope.PRIVATE, errors)
 
-  value_builtin_name = ir_pb2.Word(
+  value_builtin_name = ir_data.Word(
       text="this",
-      source_location=ir_pb2.Location(is_synthetic=True),
+      source_location=ir_data.Location(is_synthetic=True),
   )
   # In "inside field" scope, the name `this` maps back to the field itself.
   # This is important for attributes like `[requires]`.
@@ -174,14 +174,14 @@
 
 
 def _set_scope_for_type_definition(type_definition, scope):
-  """Sets the current scope for an ir_pb2.TypeDefinition."""
+  """Sets the current scope for an ir_data.AddressableUnit."""
   return {"scope": scope[type_definition.name.name.text]}
 
 
 def _add_module_to_scope(module, scope):
   """Adds the name of the module to the given scope."""
   module_symbol_table = _Scope(
-      ir_pb2.CanonicalName(module_file=module.source_file_name,
+      ir_data.CanonicalName(module_file=module.source_file_name,
                            object_path=[]),
       None,
       _Scope.SEARCHABLE)
@@ -208,11 +208,11 @@
   symbol_tables = {}
   errors = []
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Module], _add_module_to_scope,
+      ir, [ir_data.Module], _add_module_to_scope,
       parameters={"errors": errors, "scope": symbol_tables})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.TypeDefinition], _add_type_name_to_scope,
-      incidental_actions={ir_pb2.Module: _set_scope_for_module},
+      ir, [ir_data.TypeDefinition], _add_type_name_to_scope,
+      incidental_actions={ir_data.Module: _set_scope_for_module},
       parameters={"errors": errors, "scope": symbol_tables})
   if errors:
     # Ideally, we would find duplicate field names elsewhere in the module, even
@@ -223,24 +223,24 @@
     return symbol_tables, errors
 
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.EnumValue], _add_enum_value_to_scope,
+      ir, [ir_data.EnumValue], _add_enum_value_to_scope,
       incidental_actions={
-          ir_pb2.Module: _set_scope_for_module,
-          ir_pb2.TypeDefinition: _set_scope_for_type_definition,
+          ir_data.Module: _set_scope_for_module,
+          ir_data.TypeDefinition: _set_scope_for_type_definition,
       },
       parameters={"errors": errors, "scope": symbol_tables})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Field], _add_struct_field_to_scope,
+      ir, [ir_data.Field], _add_struct_field_to_scope,
       incidental_actions={
-          ir_pb2.Module: _set_scope_for_module,
-          ir_pb2.TypeDefinition: _set_scope_for_type_definition,
+          ir_data.Module: _set_scope_for_module,
+          ir_data.TypeDefinition: _set_scope_for_type_definition,
       },
       parameters={"errors": errors, "scope": symbol_tables})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.RuntimeParameter], _add_parameter_name_to_scope,
+      ir, [ir_data.RuntimeParameter], _add_parameter_name_to_scope,
       incidental_actions={
-          ir_pb2.Module: _set_scope_for_module,
-          ir_pb2.TypeDefinition: _set_scope_for_type_definition,
+          ir_data.Module: _set_scope_for_module,
+          ir_data.TypeDefinition: _set_scope_for_type_definition,
       },
       parameters={"errors": errors, "scope": symbol_tables})
   return symbol_tables, errors
@@ -416,7 +416,7 @@
                                previous_reference.source_name[0].text))
       return
     assert previous_field.type.WhichOneof("type") == "atomic_type"
-    member_name = ir_pb2.CanonicalName()
+    member_name = ir_data.CanonicalName()
     member_name.CopyFrom(
         previous_field.type.atomic_type.reference.canonical_name)
     member_name.object_path.extend([ref.source_name[0].text])
@@ -446,7 +446,7 @@
 
 def _set_visible_scopes_for_module(module):
   """Sets visible_scopes for the given module."""
-  self_scope = ir_pb2.CanonicalName(module_file=module.source_file_name)
+  self_scope = ir_data.CanonicalName(module_file=module.source_file_name)
   extra_visible_scopes = []
   for foreign_import in module.foreign_import:
     # Anonymous imports are searched for top-level names; named imports are not.
@@ -454,7 +454,7 @@
     # modules must be imported with names.
     if not foreign_import.local_name.text:
       extra_visible_scopes.append(
-          ir_pb2.CanonicalName(module_file=foreign_import.file_name.text))
+          ir_data.CanonicalName(module_file=foreign_import.file_name.text))
   return {"visible_scopes": (self_scope,) + tuple(extra_visible_scopes)}
 
 
@@ -477,9 +477,9 @@
   # Symbol resolution is broken into five passes.  First, this code resolves any
   # imports, and adds import aliases to modules.
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Import], _add_import_to_scope,
+      ir, [ir_data.Import], _add_import_to_scope,
       incidental_actions={
-          ir_pb2.Module: _module_source_from_table_action,
+          ir_data.Module: _module_source_from_table_action,
       },
       parameters={"errors": errors, "table": table})
   if errors:
@@ -487,21 +487,21 @@
   # Next, this resolves all absolute references (e.g., it resolves "UInt" in
   # "0:1  UInt  field" to [prelude]::UInt).
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Reference], _resolve_reference,
-      skip_descendants_of=(ir_pb2.FieldReference,),
+      ir, [ir_data.Reference], _resolve_reference,
+      skip_descendants_of=(ir_data.FieldReference,),
       incidental_actions={
-          ir_pb2.TypeDefinition: _set_visible_scopes_for_type_definition,
-          ir_pb2.Module: _set_visible_scopes_for_module,
-          ir_pb2.Attribute: _set_visible_scopes_for_attribute,
+          ir_data.TypeDefinition: _set_visible_scopes_for_type_definition,
+          ir_data.Module: _set_visible_scopes_for_module,
+          ir_data.Attribute: _set_visible_scopes_for_attribute,
       },
       parameters={"table": table, "errors": errors, "field": None})
   # Lastly, head References to fields (e.g., the `a` of `a.b.c`) are resolved.
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.FieldReference], _resolve_head_of_field_reference,
+      ir, [ir_data.FieldReference], _resolve_head_of_field_reference,
       incidental_actions={
-          ir_pb2.TypeDefinition: _set_visible_scopes_for_type_definition,
-          ir_pb2.Module: _set_visible_scopes_for_module,
-          ir_pb2.Attribute: _set_visible_scopes_for_attribute,
+          ir_data.TypeDefinition: _set_visible_scopes_for_type_definition,
+          ir_data.Module: _set_visible_scopes_for_module,
+          ir_data.Attribute: _set_visible_scopes_for_attribute,
       },
       parameters={"table": table, "errors": errors, "field": None})
   return errors
@@ -511,11 +511,11 @@
   """Resolves structure member accesses ("field.subfield") in ir."""
   errors = []
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.FieldReference], _resolve_field_reference,
+      ir, [ir_data.FieldReference], _resolve_field_reference,
       incidental_actions={
-          ir_pb2.TypeDefinition: _set_visible_scopes_for_type_definition,
-          ir_pb2.Module: _set_visible_scopes_for_module,
-          ir_pb2.Attribute: _set_visible_scopes_for_attribute,
+          ir_data.TypeDefinition: _set_visible_scopes_for_type_definition,
+          ir_data.Module: _set_visible_scopes_for_module,
+          ir_data.Attribute: _set_visible_scopes_for_attribute,
       },
       parameters={"errors": errors, "field": None})
   return errors
diff --git a/compiler/front_end/synthetics.py b/compiler/front_end/synthetics.py
index 8a6f856..7f6aabb 100644
--- a/compiler/front_end/synthetics.py
+++ b/compiler/front_end/synthetics.py
@@ -17,20 +17,20 @@
 from compiler.front_end import attributes
 from compiler.util import error
 from compiler.util import expression_parser
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 from compiler.util import ir_util
 from compiler.util import traverse_ir
 
 
 def _mark_as_synthetic(proto):
   """Marks all source_locations in proto with is_synthetic=True."""
-  if not isinstance(proto, ir_pb2.Message):
+  if not isinstance(proto, ir_data.Message):
     return
   if hasattr(proto, "source_location"):
     proto.source_location.is_synthetic = True
   for name, value in proto.raw_fields.items():
     if name != "source_location":
-      if isinstance(value, ir_pb2.TypedScopedList):
+      if isinstance(value, ir_data.TypedScopedList):
         for i in range(len(value)):
           _mark_as_synthetic(value[i])
       else:
@@ -39,9 +39,9 @@
 
 def _skip_text_output_attribute():
   """Returns the IR for a [text_output: "Skip"] attribute."""
-  result = ir_pb2.Attribute(
-      name=ir_pb2.Word(text=attributes.TEXT_OUTPUT),
-      value=ir_pb2.AttributeValue(string_constant=ir_pb2.String(text="Skip")))
+  result = ir_data.Attribute(
+      name=ir_data.Word(text=attributes.TEXT_OUTPUT),
+      value=ir_data.AttributeValue(string_constant=ir_data.String(text="Skip")))
   _mark_as_synthetic(result)
   return result
 
@@ -79,8 +79,8 @@
   resolved -- so very little in ir_util will work at this point.
 
   Arguments:
-      structure: The ir_pb2.Structure on which to synthesize fields.
-      type_definition: The ir_pb2.TypeDefinition containing structure.
+      structure: The ir_data.Structure on which to synthesize fields.
+      type_definition: The ir_data.TypeDefinition containing structure.
 
   Returns:
       None
@@ -100,31 +100,31 @@
       assert False, ("Unable to find corresponding type {} for anonymous field "
                      "in {}.".format(
                          field.type.atomic_type.reference, type_definition))
-    anonymous_reference = ir_pb2.Reference(source_name=[field.name.name])
-    anonymous_field_reference = ir_pb2.FieldReference(
+    anonymous_reference = ir_data.Reference(source_name=[field.name.name])
+    anonymous_field_reference = ir_data.FieldReference(
         path=[anonymous_reference])
     for subfield in field_type.structure.field:
-      alias_field_reference = ir_pb2.FieldReference(
+      alias_field_reference = ir_data.FieldReference(
           path=[
               anonymous_reference,
-              ir_pb2.Reference(source_name=[subfield.name.name]),
+              ir_data.Reference(source_name=[subfield.name.name]),
           ]
       )
-      new_existence_condition = ir_pb2.Expression()
+      new_existence_condition = ir_data.Expression()
       new_existence_condition.CopyFrom(_ANONYMOUS_BITS_ALIAS_EXISTENCE_SKELETON)
       existence_clauses = new_existence_condition.function.args
       existence_clauses[0].function.args[0].field_reference.CopyFrom(
           anonymous_field_reference)
       existence_clauses[1].function.args[0].field_reference.CopyFrom(
           alias_field_reference)
-      new_read_transform = ir_pb2.Expression(
+      new_read_transform = ir_data.Expression(
           field_reference=alias_field_reference)
       # This treats *most* of the alias field as synthetic, but not its name(s):
       # leaving the name(s) as "real" means that symbol collisions with the
       # surrounding structure will be properly reported to the user.
       _mark_as_synthetic(new_existence_condition)
       _mark_as_synthetic(new_read_transform)
-      new_alias = ir_pb2.Field(
+      new_alias = ir_data.Field(
           read_transform=new_read_transform,
           existence_condition=new_existence_condition,
           name=subfield.name)
@@ -156,13 +156,13 @@
 def _add_size_bound_virtuals(structure, type_definition):
   """Adds ${min,max}_size_in_{bits,bytes} virtual fields to structure."""
   names = {
-      ir_pb2.AddressableUnit.BIT: ("$max_size_in_bits", "$min_size_in_bits"),
-      ir_pb2.AddressableUnit.BYTE: ("$max_size_in_bytes", "$min_size_in_bytes"),
+      ir_data.AddressableUnit.BIT: ("$max_size_in_bits", "$min_size_in_bits"),
+      ir_data.AddressableUnit.BYTE: ("$max_size_in_bytes", "$min_size_in_bytes"),
   }
   for name in names[type_definition.addressable_unit]:
-    bound_field = ir_pb2.Field(
+    bound_field = ir_data.Field(
         read_transform=_SIZE_BOUNDS[name],
-        name=ir_pb2.NameDefinition(name=ir_pb2.Word(text=name)),
+        name=ir_data.NameDefinition(name=ir_data.Word(text=name)),
         existence_condition=expression_parser.parse("true"),
         attribute=[_skip_text_output_attribute()]
     )
@@ -184,8 +184,8 @@
 def _add_size_virtuals(structure, type_definition):
   """Adds a $size_in_bits or $size_in_bytes virtual field to structure."""
   names = {
-      ir_pb2.AddressableUnit.BIT: "$size_in_bits",
-      ir_pb2.AddressableUnit.BYTE: "$size_in_bytes",
+      ir_data.AddressableUnit.BIT: "$size_in_bits",
+      ir_data.AddressableUnit.BYTE: "$size_in_bytes",
   }
   size_field_name = names[type_definition.addressable_unit]
   size_clauses = []
@@ -194,22 +194,22 @@
     # to the size of the structure.
     if ir_util.field_is_virtual(field):
       continue
-    size_clause = ir_pb2.Expression()
+    size_clause = ir_data.Expression()
     size_clause.CopyFrom(_SIZE_CLAUSE_SKELETON)
     # Copy the appropriate clauses into `existence_condition ? start + size : 0`
     size_clause.function.args[0].CopyFrom(field.existence_condition)
     size_clause.function.args[1].function.args[0].CopyFrom(field.location.start)
     size_clause.function.args[1].function.args[1].CopyFrom(field.location.size)
     size_clauses.append(size_clause)
-  size_expression = ir_pb2.Expression()
+  size_expression = ir_data.Expression()
   size_expression.CopyFrom(_SIZE_SKELETON)
   size_expression.function.args.extend(size_clauses)
   _mark_as_synthetic(size_expression)
-  size_field = ir_pb2.Field(
+  size_field = ir_data.Field(
       read_transform=size_expression,
-      name=ir_pb2.NameDefinition(name=ir_pb2.Word(text=size_field_name)),
-      existence_condition=ir_pb2.Expression(
-          boolean_constant=ir_pb2.BooleanConstant(value=True)
+      name=ir_data.NameDefinition(name=ir_data.Word(text=size_field_name)),
+      existence_condition=ir_data.Expression(
+          boolean_constant=ir_data.BooleanConstant(value=True)
       ),
       attribute=[_skip_text_output_attribute()]
   )
@@ -266,7 +266,7 @@
       # instead.
       continue
     traverse_ir.fast_traverse_node_top_down(
-        field.location.size, [ir_pb2.Expression],
+        field.location.size, [ir_data.Expression],
         _check_for_bad_next_keyword_in_size,
         parameters={
             "errors": new_errors,
@@ -284,7 +284,7 @@
       errors.extend(new_errors)
       return
     traverse_ir.fast_traverse_node_top_down(
-        field.location.start, [ir_pb2.Expression],
+        field.location.start, [ir_data.Expression],
         _maybe_replace_next_keyword_in_expression,
         parameters={
             "last_location": last_physical_field_location,
@@ -323,10 +323,10 @@
   """
   errors = []
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Structure], _replace_next_keyword,
+      ir, [ir_data.Structure], _replace_next_keyword,
       parameters={"errors": errors})
   if errors:
     return errors
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Structure], _add_virtuals_to_structure)
+      ir, [ir_data.Structure], _add_virtuals_to_structure)
   return []
diff --git a/compiler/front_end/synthetics_test.py b/compiler/front_end/synthetics_test.py
index bae2759..85a3dfb 100644
--- a/compiler/front_end/synthetics_test.py
+++ b/compiler/front_end/synthetics_test.py
@@ -18,7 +18,7 @@
 from compiler.front_end import glue
 from compiler.front_end import synthetics
 from compiler.util import error
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 from compiler.util import test_util
 
 
@@ -88,12 +88,12 @@
                      alias_field.existence_condition.function.args[1].function.
                      args[0].field_reference.path[1].source_name[-1].text)
     self.assertEqual(
-        ir_pb2.FunctionMapping.PRESENCE,
+        ir_data.FunctionMapping.PRESENCE,
         alias_field.existence_condition.function.args[0].function.function)
     self.assertEqual(
-        ir_pb2.FunctionMapping.PRESENCE,
+        ir_data.FunctionMapping.PRESENCE,
         alias_field.existence_condition.function.args[1].function.function)
-    self.assertEqual(ir_pb2.FunctionMapping.AND,
+    self.assertEqual(ir_data.FunctionMapping.AND,
                      alias_field.existence_condition.function.function)
 
   def test_adds_correct_read_transform(self):
@@ -177,15 +177,15 @@
     max_size_in_bytes_field = structure.field[3]
     min_size_in_bytes_field = structure.field[4]
     self.assertEqual("$size_in_bytes", size_in_bytes_field.name.name.text)
-    self.assertEqual(ir_pb2.FunctionMapping.MAXIMUM,
+    self.assertEqual(ir_data.FunctionMapping.MAXIMUM,
                      size_in_bytes_field.read_transform.function.function)
     self.assertEqual("$max_size_in_bytes",
                      max_size_in_bytes_field.name.name.text)
-    self.assertEqual(ir_pb2.FunctionMapping.UPPER_BOUND,
+    self.assertEqual(ir_data.FunctionMapping.UPPER_BOUND,
                      max_size_in_bytes_field.read_transform.function.function)
     self.assertEqual("$min_size_in_bytes",
                      min_size_in_bytes_field.name.name.text)
-    self.assertEqual(ir_pb2.FunctionMapping.LOWER_BOUND,
+    self.assertEqual(ir_data.FunctionMapping.LOWER_BOUND,
                      min_size_in_bytes_field.read_transform.function.function)
     # The correctness of $size_in_bytes et al are tested much further down
     # stream, in tests of the generated C++ code.
@@ -200,15 +200,15 @@
     max_size_in_bits_field = structure.field[3]
     min_size_in_bits_field = structure.field[4]
     self.assertEqual("$size_in_bits", size_in_bits_field.name.name.text)
-    self.assertEqual(ir_pb2.FunctionMapping.MAXIMUM,
+    self.assertEqual(ir_data.FunctionMapping.MAXIMUM,
                      size_in_bits_field.read_transform.function.function)
     self.assertEqual("$max_size_in_bits",
                      max_size_in_bits_field.name.name.text)
-    self.assertEqual(ir_pb2.FunctionMapping.UPPER_BOUND,
+    self.assertEqual(ir_data.FunctionMapping.UPPER_BOUND,
                      max_size_in_bits_field.read_transform.function.function)
     self.assertEqual("$min_size_in_bits",
                      min_size_in_bits_field.name.name.text)
-    self.assertEqual(ir_pb2.FunctionMapping.LOWER_BOUND,
+    self.assertEqual(ir_data.FunctionMapping.LOWER_BOUND,
                      min_size_in_bits_field.read_transform.function.function)
     # The correctness of $size_in_bits et al are tested much further down
     # stream, in tests of the generated C++ code.
@@ -232,7 +232,7 @@
     self.assertEqual([], synthetics.desugar(ir))
     offset_of_b = ir.module[0].type[0].structure.field[1].location.start
     self.assertTrue(offset_of_b.HasField("function"))
-    self.assertEqual(offset_of_b.function.function, ir_pb2.FunctionMapping.ADDITION)
+    self.assertEqual(offset_of_b.function.function, ir_data.FunctionMapping.ADDITION)
     self.assertEqual(offset_of_b.function.args[0].constant.value, "1")
     self.assertEqual(offset_of_b.function.args[1].constant.value, "2")
     offset_of_c = ir.module[0].type[0].structure.field[2].location.start
diff --git a/compiler/front_end/type_check.py b/compiler/front_end/type_check.py
index c15aaad..727989f 100644
--- a/compiler/front_end/type_check.py
+++ b/compiler/front_end/type_check.py
@@ -16,7 +16,7 @@
 
 from compiler.front_end import attributes
 from compiler.util import error
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 from compiler.util import ir_util
 from compiler.util import traverse_ir
 
@@ -44,11 +44,11 @@
 
 
 def _annotate_as_integer(expression):
-  expression.type.integer.CopyFrom(ir_pb2.IntegerType())
+  expression.type.integer.CopyFrom(ir_data.IntegerType())
 
 
 def _annotate_as_boolean(expression):
-  expression.type.boolean.CopyFrom(ir_pb2.BooleanType())
+  expression.type.boolean.CopyFrom(ir_data.BooleanType())
 
 
 def _type_check(expression, source_file_name, errors, type_oneof, type_name,
@@ -87,10 +87,10 @@
   """Annotates the type of a constant reference."""
   referred_name = expression.constant_reference.canonical_name
   referred_object = ir_util.find_object(referred_name, ir)
-  if isinstance(referred_object, ir_pb2.EnumValue):
+  if isinstance(referred_object, ir_data.EnumValue):
     expression.type.enumeration.name.CopyFrom(expression.constant_reference)
     del expression.type.enumeration.name.canonical_name.object_path[-1]
-  elif isinstance(referred_object, ir_pb2.Field):
+  elif isinstance(referred_object, ir_data.Field):
     if not ir_util.field_is_virtual(referred_object):
       errors.append([
           error.error(source_file_name, expression.source_location,
@@ -111,11 +111,11 @@
   for arg in expression.function.args:
     _type_check_expression(arg, source_file_name, ir, errors)
   function = expression.function.function
-  if function in (ir_pb2.FunctionMapping.EQUALITY, ir_pb2.FunctionMapping.INEQUALITY,
-                  ir_pb2.FunctionMapping.LESS, ir_pb2.FunctionMapping.LESS_OR_EQUAL,
-                  ir_pb2.FunctionMapping.GREATER, ir_pb2.FunctionMapping.GREATER_OR_EQUAL):
+  if function in (ir_data.FunctionMapping.EQUALITY, ir_data.FunctionMapping.INEQUALITY,
+                  ir_data.FunctionMapping.LESS, ir_data.FunctionMapping.LESS_OR_EQUAL,
+                  ir_data.FunctionMapping.GREATER, ir_data.FunctionMapping.GREATER_OR_EQUAL):
     _type_check_comparison_operator(expression, source_file_name, errors)
-  elif function == ir_pb2.FunctionMapping.CHOICE:
+  elif function == ir_data.FunctionMapping.CHOICE:
     _type_check_choice_operator(expression, source_file_name, errors)
   else:
     _type_check_monomorphic_operator(expression, source_file_name, errors)
@@ -132,21 +132,21 @@
   binary = ("Left argument", "Right argument")
   n_ary = ("Argument {}".format(n) for n in range(len(args)))
   functions = {
-      ir_pb2.FunctionMapping.ADDITION: (int_result, int_args, binary, 2, 2,
+      ir_data.FunctionMapping.ADDITION: (int_result, int_args, binary, 2, 2,
                                  "operator"),
-      ir_pb2.FunctionMapping.SUBTRACTION: (int_result, int_args, binary, 2, 2,
+      ir_data.FunctionMapping.SUBTRACTION: (int_result, int_args, binary, 2, 2,
                                     "operator"),
-      ir_pb2.FunctionMapping.MULTIPLICATION: (int_result, int_args, binary, 2, 2,
+      ir_data.FunctionMapping.MULTIPLICATION: (int_result, int_args, binary, 2, 2,
                                        "operator"),
-      ir_pb2.FunctionMapping.AND: (bool_result, bool_args, binary, 2, 2, "operator"),
-      ir_pb2.FunctionMapping.OR: (bool_result, bool_args, binary, 2, 2, "operator"),
-      ir_pb2.FunctionMapping.MAXIMUM: (int_result, int_args, n_ary, 1, None,
+      ir_data.FunctionMapping.AND: (bool_result, bool_args, binary, 2, 2, "operator"),
+      ir_data.FunctionMapping.OR: (bool_result, bool_args, binary, 2, 2, "operator"),
+      ir_data.FunctionMapping.MAXIMUM: (int_result, int_args, n_ary, 1, None,
                                 "function"),
-      ir_pb2.FunctionMapping.PRESENCE: (bool_result, field_args, n_ary, 1, 1,
+      ir_data.FunctionMapping.PRESENCE: (bool_result, field_args, n_ary, 1, 1,
                                  "function"),
-      ir_pb2.FunctionMapping.UPPER_BOUND: (int_result, int_args, n_ary, 1, 1,
+      ir_data.FunctionMapping.UPPER_BOUND: (int_result, int_args, n_ary, 1, 1,
                                     "function"),
-      ir_pb2.FunctionMapping.LOWER_BOUND: (int_result, int_args, n_ary, 1, 1,
+      ir_data.FunctionMapping.LOWER_BOUND: (int_result, int_args, n_ary, 1, 1,
                                     "function"),
   }
   function = expression.function.function
@@ -180,7 +180,7 @@
   """Annotates the type of a local reference."""
   referrent = ir_util.find_object(expression.field_reference.path[-1], ir)
   assert referrent, "Local reference should be non-None after name resolution."
-  if isinstance(referrent, ir_pb2.RuntimeParameter):
+  if isinstance(referrent, ir_data.RuntimeParameter):
     parameter = referrent
     _set_expression_type_from_physical_type_reference(
         expression, parameter.physical_type_alias.atomic_type.reference, ir)
@@ -192,7 +192,7 @@
     expression.type.CopyFrom(field.read_transform.type)
     return
   if not field.type.HasField("atomic_type"):
-    expression.type.opaque.CopyFrom(ir_pb2.OpaqueType())
+    expression.type.opaque.CopyFrom(ir_data.OpaqueType())
   else:
     _set_expression_type_from_physical_type_reference(
         expression, field.type.atomic_type.reference, ir)
@@ -202,10 +202,10 @@
   """Gets the ExpressionType for a field of the given TypeDefinition.
 
   Arguments:
-    type_definition: an ir_pb2.TypeDefinition.
+    type_definition: an ir_data.AddressableUnit.
 
   Returns:
-    An ir_pb2.ExpressionType with the corresponding expression type filled in:
+    An ir_data.ExpressionType with the corresponding expression type filled in:
     for example, [prelude].UInt will result in an ExpressionType with the
     `integer` field filled in.
 
@@ -214,17 +214,17 @@
   # TODO(bolms): Add a `[value_type]` attribute for `external`s.
   if ir_util.get_boolean_attribute(type_definition.attribute,
                                    attributes.IS_INTEGER):
-    return ir_pb2.ExpressionType(integer=ir_pb2.IntegerType())
+    return ir_data.ExpressionType(integer=ir_data.IntegerType())
   elif tuple(type_definition.name.canonical_name.object_path) == ("Flag",):
     # This is a hack: the Flag type should say that it is a boolean.
-    return ir_pb2.ExpressionType(boolean=ir_pb2.BooleanType())
+    return ir_data.ExpressionType(boolean=ir_data.BooleanType())
   elif type_definition.HasField("enumeration"):
-    return ir_pb2.ExpressionType(
-        enumeration=ir_pb2.EnumType(
-            name=ir_pb2.Reference(
+    return ir_data.ExpressionType(
+        enumeration=ir_data.EnumType(
+            name=ir_data.Reference(
                 canonical_name=type_definition.name.canonical_name)))
   else:
-    return ir_pb2.ExpressionType(opaque=ir_pb2.OpaqueType())
+    return ir_data.ExpressionType(opaque=ir_data.OpaqueType())
 
 
 def _set_expression_type_from_physical_type_reference(expression,
@@ -267,8 +267,8 @@
   """Checks the type of a comparison operator (==, !=, <, >, >=, <=)."""
   # Applying less than or greater than to a boolean is likely a mistake, so
   # only equality and inequality are allowed for booleans.
-  if expression.function.function in (ir_pb2.FunctionMapping.EQUALITY,
-                                      ir_pb2.FunctionMapping.INEQUALITY):
+  if expression.function.function in (ir_data.FunctionMapping.EQUALITY,
+                                      ir_data.FunctionMapping.INEQUALITY):
     acceptable_types = ("integer", "boolean", "enumeration")
     acceptable_types_for_humans = "an integer, boolean, or enum"
   else:
@@ -440,11 +440,11 @@
   """
   errors = []
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Expression], _type_check_expression,
-      skip_descendants_of={ir_pb2.Expression},
+      ir, [ir_data.Expression], _type_check_expression,
+      skip_descendants_of={ir_data.Expression},
       parameters={"errors": errors})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.RuntimeParameter], _annotate_parameter_type,
+      ir, [ir_data.RuntimeParameter], _annotate_parameter_type,
       parameters={"errors": errors})
   return errors
 
@@ -464,19 +464,19 @@
   """
   errors = []
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.FieldLocation], _type_check_field_location,
+      ir, [ir_data.FieldLocation], _type_check_field_location,
       parameters={"errors": errors})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.ArrayType, ir_pb2.Expression], _type_check_array_size,
-      skip_descendants_of={ir_pb2.AtomicType},
+      ir, [ir_data.ArrayType, ir_data.Expression], _type_check_array_size,
+      skip_descendants_of={ir_data.AtomicType},
       parameters={"errors": errors})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Field], _type_check_field_existence_condition,
+      ir, [ir_data.Field], _type_check_field_existence_condition,
       parameters={"errors": errors})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.RuntimeParameter], _type_check_parameter,
+      ir, [ir_data.RuntimeParameter], _type_check_parameter,
       parameters={"errors": errors})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.AtomicType], _type_check_passed_parameters,
+      ir, [ir_data.AtomicType], _type_check_passed_parameters,
       parameters={"errors": errors})
   return errors
diff --git a/compiler/front_end/write_inference.py b/compiler/front_end/write_inference.py
index bb5b1f4..ac58b34 100644
--- a/compiler/front_end/write_inference.py
+++ b/compiler/front_end/write_inference.py
@@ -16,7 +16,7 @@
 
 from compiler.front_end import attributes
 from compiler.front_end import expression_bounds
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 from compiler.util import ir_util
 from compiler.util import traverse_ir
 
@@ -36,7 +36,7 @@
   expression.
 
   Arguments:
-    expression: an ir_pb2.Expression to walk
+    expression: an ir_data.Expression to walk
 
   Returns:
     A list of indexes to find a field_reference, or None.
@@ -98,7 +98,7 @@
   it set `raw_value` to the appropriate value.
 
   Arguments:
-    expression: an ir_pb2.Expression to be inverted.
+    expression: an ir_data.Expression to be inverted.
     ir: the full IR, for looking up symbols.
 
   Returns:
@@ -109,20 +109,20 @@
   if reference_path is None:
     return None
   subexpression = expression
-  result = ir_pb2.Expression(
-      builtin_reference=ir_pb2.Reference(
-          canonical_name=ir_pb2.CanonicalName(
+  result = ir_data.Expression(
+      builtin_reference=ir_data.Reference(
+          canonical_name=ir_data.CanonicalName(
               module_file="",
               object_path=["$logical_value"]
           ),
-          source_name=[ir_pb2.Word(
+          source_name=[ir_data.Word(
               text="$logical_value",
-              source_location=ir_pb2.Location(is_synthetic=True)
+              source_location=ir_data.Location(is_synthetic=True)
           )],
-          source_location=ir_pb2.Location(is_synthetic=True)
+          source_location=ir_data.Location(is_synthetic=True)
       ),
       type=expression.type,
-      source_location=ir_pb2.Location(is_synthetic=True)
+      source_location=ir_data.Location(is_synthetic=True)
   )
 
   # This loop essentially starts with:
@@ -151,39 +151,39 @@
   # Note that any equation that can be solved here becomes part of Emboss's
   # contract, forever, so be conservative in expanding its solving capabilities!
   for index in reference_path:
-    if subexpression.function.function == ir_pb2.FunctionMapping.ADDITION:
-      result = ir_pb2.Expression(
-          function=ir_pb2.Function(
-              function=ir_pb2.FunctionMapping.SUBTRACTION,
+    if subexpression.function.function == ir_data.FunctionMapping.ADDITION:
+      result = ir_data.Expression(
+          function=ir_data.Function(
+              function=ir_data.FunctionMapping.SUBTRACTION,
               args=[
                   result,
                   subexpression.function.args[1 - index],
               ]
           ),
-          type=ir_pb2.ExpressionType(integer=ir_pb2.IntegerType())
+          type=ir_data.ExpressionType(integer=ir_data.IntegerType())
       )
-    elif subexpression.function.function == ir_pb2.FunctionMapping.SUBTRACTION:
+    elif subexpression.function.function == ir_data.FunctionMapping.SUBTRACTION:
       if index == 0:
-        result = ir_pb2.Expression(
-            function=ir_pb2.Function(
-                function=ir_pb2.FunctionMapping.ADDITION,
+        result = ir_data.Expression(
+            function=ir_data.Function(
+                function=ir_data.FunctionMapping.ADDITION,
                 args=[
                     result,
                     subexpression.function.args[1],
                 ]
             ),
-            type=ir_pb2.ExpressionType(integer=ir_pb2.IntegerType())
+            type=ir_data.ExpressionType(integer=ir_data.IntegerType())
         )
       else:
-        result = ir_pb2.Expression(
-            function=ir_pb2.Function(
-                function=ir_pb2.FunctionMapping.SUBTRACTION,
+        result = ir_data.Expression(
+            function=ir_data.Function(
+                function=ir_data.FunctionMapping.SUBTRACTION,
                 args=[
                     subexpression.function.args[0],
                     result,
                 ]
             ),
-            type=ir_pb2.ExpressionType(integer=ir_pb2.IntegerType())
+            type=ir_data.ExpressionType(integer=ir_data.IntegerType())
         )
     else:
       return None
@@ -204,7 +204,7 @@
   be writeable.
 
   Arguments:
-    field: an ir_pb2.Field to which to add a write_method.
+    field: an ir_data.Field to which to add a write_method.
     ir: The IR in which to look up field_references.
 
   Returns:
@@ -229,7 +229,7 @@
       field_reference, function_body = inverse
       referenced_field = ir_util.find_object(
           field_reference.field_reference.path[-1], ir)
-      if not isinstance(referenced_field, ir_pb2.Field):
+      if not isinstance(referenced_field, ir_data.Field):
         reference_is_read_only = True
       else:
         _add_write_method(referenced_field, ir)
@@ -250,7 +250,7 @@
 
   referenced_field = ir_util.find_object(
       field.read_transform.field_reference.path[-1], ir)
-  if not isinstance(referenced_field, ir_pb2.Field):
+  if not isinstance(referenced_field, ir_data.Field):
     # If the virtual field aliases a non-field (i.e., a parameter), it is
     # read-only.
     field.write_method.read_only = True
@@ -268,7 +268,7 @@
 
 
 def set_write_methods(ir):
-  """Sets the write_method member of all ir_pb2.Fields in ir.
+  """Sets the write_method member of all ir_data.Fields in ir.
 
   Arguments:
       ir: The IR to which to add write_methods.
@@ -276,5 +276,5 @@
   Returns:
       A list of errors, or an empty list.
   """
-  traverse_ir.fast_traverse_ir_top_down(ir, [ir_pb2.Field], _add_write_method)
+  traverse_ir.fast_traverse_ir_top_down(ir, [ir_data.Field], _add_write_method)
   return []
diff --git a/compiler/front_end/write_inference_test.py b/compiler/front_end/write_inference_test.py
index 9915196..d1de5f2 100644
--- a/compiler/front_end/write_inference_test.py
+++ b/compiler/front_end/write_inference_test.py
@@ -17,7 +17,7 @@
 import unittest
 from compiler.front_end import glue
 from compiler.front_end import write_inference
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 from compiler.util import test_util
 
 
@@ -101,7 +101,7 @@
     self.assertEqual(
         "x",
         transform.destination.path[0].canonical_name.object_path[-1])
-    self.assertEqual(ir_pb2.FunctionMapping.SUBTRACTION,
+    self.assertEqual(ir_data.FunctionMapping.SUBTRACTION,
                      transform.function_body.function.function)
     arg0, arg1 = transform.function_body.function.args
     self.assertEqual("$logical_value",
@@ -119,7 +119,7 @@
     self.assertEqual(
         "x",
         transform.destination.path[0].canonical_name.object_path[-1])
-    self.assertEqual(ir_pb2.FunctionMapping.ADDITION,
+    self.assertEqual(ir_data.FunctionMapping.ADDITION,
                      transform.function_body.function.function)
     arg0, arg1 = transform.function_body.function.args
     self.assertEqual("$logical_value",
@@ -137,7 +137,7 @@
     self.assertEqual(
         "x",
         transform.destination.path[0].canonical_name.object_path[-1])
-    self.assertEqual(ir_pb2.FunctionMapping.SUBTRACTION,
+    self.assertEqual(ir_data.FunctionMapping.SUBTRACTION,
                      transform.function_body.function.function)
     arg0, arg1 = transform.function_body.function.args
     self.assertEqual("$logical_value",
@@ -156,7 +156,7 @@
     self.assertEqual(
         "x",
         transform.destination.path[0].canonical_name.object_path[-1])
-    self.assertEqual(ir_pb2.FunctionMapping.SUBTRACTION,
+    self.assertEqual(ir_data.FunctionMapping.SUBTRACTION,
                      transform.function_body.function.function)
     arg0, arg1 = transform.function_body.function.args
     self.assertEqual("50", arg0.constant.value)
@@ -174,11 +174,11 @@
     self.assertEqual(
         "x",
         transform.destination.path[0].canonical_name.object_path[-1])
-    self.assertEqual(ir_pb2.FunctionMapping.SUBTRACTION,
+    self.assertEqual(ir_data.FunctionMapping.SUBTRACTION,
                      transform.function_body.function.function)
     arg0, arg1 = transform.function_body.function.args
     self.assertEqual("50", arg0.constant.value)
-    self.assertEqual(ir_pb2.FunctionMapping.SUBTRACTION, arg1.function.function)
+    self.assertEqual(ir_data.FunctionMapping.SUBTRACTION, arg1.function.function)
     arg10, arg11 = arg1.function.args
     self.assertEqual("$logical_value",
                      arg10.builtin_reference.canonical_name.object_path[0])
@@ -204,7 +204,7 @@
     self.assertEqual(
         "x",
         transform.destination.path[0].canonical_name.object_path[-1])
-    self.assertEqual(ir_pb2.FunctionMapping.ADDITION,
+    self.assertEqual(ir_data.FunctionMapping.ADDITION,
                      transform.function_body.function.function)
     args = transform.function_body.function.args
     self.assertEqual("$logical_value",
diff --git a/compiler/util/BUILD b/compiler/util/BUILD
index ee59dfe..bbc2ec0 100644
--- a/compiler/util/BUILD
+++ b/compiler/util/BUILD
@@ -22,9 +22,9 @@
 )
 
 py_library(
-    name = "ir_pb2",
+    name = "ir_data",
     srcs = [
-        "ir_pb2.py",
+        "ir_data.py",
     ],
 )
 
@@ -41,7 +41,7 @@
 py_library(
     name = "ir_util",
     srcs = ["ir_util.py"],
-    deps = [":ir_pb2"],
+    deps = [":ir_data"],
 )
 
 py_test(
@@ -50,7 +50,7 @@
     python_version = "PY3",
     deps = [
         ":expression_parser",
-        ":ir_pb2",
+        ":ir_data",
         ":ir_util",
     ],
 )
@@ -60,7 +60,7 @@
     srcs = ["attribute_util.py"],
     deps = [
         ":error",
-        ":ir_pb2",
+        ":ir_data",
         ":ir_util",
         ":traverse_ir",
     ],
@@ -94,7 +94,7 @@
     python_version = "PY3",
     deps = [
         ":test_util",
-        "//compiler/util:ir_pb2",
+        "//compiler/util:ir_data",
         "//compiler/util:parser_types",
     ],
 )
@@ -103,8 +103,8 @@
     name = "traverse_ir",
     srcs = ["traverse_ir.py"],
     deps = [
-        ":ir_pb2",
         ":simple_memoizer",
+        ":ir_data",
     ],
 )
 
@@ -113,8 +113,8 @@
     srcs = ["traverse_ir_test.py"],
     python_version = "PY3",
     deps = [
-        ":ir_pb2",
         ":traverse_ir",
+        ":ir_data",
     ],
 )
 
@@ -122,7 +122,7 @@
     name = "parser_types",
     srcs = ["parser_types.py"],
     deps = [
-        ":ir_pb2",
+        ":ir_data",
     ],
 )
 
@@ -131,8 +131,8 @@
     srcs = ["parser_types_test.py"],
     python_version = "PY3",
     deps = [
-        ":ir_pb2",
         ":parser_types",
+        ":ir_data",
     ],
 )
 
diff --git a/compiler/util/attribute_util.py b/compiler/util/attribute_util.py
index 8c4e2b7..0b72084 100644
--- a/compiler/util/attribute_util.py
+++ b/compiler/util/attribute_util.py
@@ -19,7 +19,7 @@
 """
 
 from compiler.util import error
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 from compiler.util import ir_util
 from compiler.util import traverse_ir
 
@@ -125,10 +125,10 @@
   This function calls _check_attributes on each attribute list in ir.
 
   Arguments:
-    ir: An ir_pb2.EmbossIr to check.
+    ir: An ir_data.EmbossIr to check.
     back_end: A string specifying the attribute qualifier to check (such as
         `cpp` for `[(cpp) namespace = "foo"]`), or None to check unqualified
-        attributes.  
+        attributes.
 
         Attributes with a different qualifier will not be checked.
     types: A map from attribute names to validators, such as:
@@ -167,12 +167,12 @@
 
   def check_type_definition(type_definition, source_file_name, errors):
     if type_definition.HasField("structure"):
-      if type_definition.addressable_unit == ir_pb2.AddressableUnit.BYTE:
+      if type_definition.addressable_unit == ir_data.AddressableUnit.BYTE:
         errors.extend(_check_attributes(
             type_definition.attribute, types, back_end, struct_attributes,
             "struct '{}'".format(
                 type_definition.name.name.text), source_file_name))
-      elif type_definition.addressable_unit == ir_pb2.AddressableUnit.BIT:
+      elif type_definition.addressable_unit == ir_data.AddressableUnit.BIT:
         errors.extend(_check_attributes(
             type_definition.attribute, types, back_end, bits_attributes,
             "bits '{}'".format(
@@ -212,16 +212,16 @@
   # TODO(bolms): Add a check that only known $default'ed attributes are
   # used.
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Module], check_module,
+      ir, [ir_data.Module], check_module,
       parameters={"errors": errors})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.TypeDefinition], check_type_definition,
+      ir, [ir_data.TypeDefinition], check_type_definition,
       parameters={"errors": errors})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.Field], check_struct_field,
+      ir, [ir_data.Field], check_struct_field,
       parameters={"errors": errors})
   traverse_ir.fast_traverse_ir_top_down(
-      ir, [ir_pb2.EnumValue], check_enum_value,
+      ir, [ir_data.EnumValue], check_enum_value,
       parameters={"errors": errors})
   return errors
 
@@ -234,7 +234,7 @@
   with incorrect type, and attributes whose values are not constant.
 
   Arguments:
-    attribute_list: An iterable of ir_pb2.Attribute.
+    attribute_list: An iterable of ir_data.Attribute.
     back_end: The qualifier for attributes to check, or None.
     attribute_specs: A dict of attribute names to _Attribute structures
       specifying the allowed attributes.
@@ -302,7 +302,7 @@
   defaults = defaults.copy()
   for attr in obj.attribute:
     if attr.is_default:
-      defaulted_attr = ir_pb2.Attribute()
+      defaulted_attr = ir_data.Attribute()
       defaulted_attr.CopyFrom(attr)
       defaulted_attr.is_default = False
       defaults[attr.name.text] = defaulted_attr
diff --git a/compiler/util/expression_parser.py b/compiler/util/expression_parser.py
index 41dd488..708f23b 100644
--- a/compiler/util/expression_parser.py
+++ b/compiler/util/expression_parser.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-"""Utility function to parse text into an ir_pb2.Expression."""
+"""Utility function to parse text into an ir_data.Expression."""
 
 from compiler.front_end import module_ir
 from compiler.front_end import parser
@@ -23,7 +23,7 @@
   """Parses text as an Expression.
 
   This parses text using the expression subset of the Emboss grammar, and
-  returns an ir_pb2.Expression.  The expression only needs to be syntactically
+  returns an ir_data.Expression.  The expression only needs to be syntactically
   valid; it will not go through symbol resolution or type checking.  This
   function is not intended to be called on arbitrary input; it asserts that the
   text successfully parses, but does not return errors.
@@ -32,7 +32,7 @@
     text: The text of an Emboss expression, like "4 + 5" or "$max(1, a, b)".
 
   Returns:
-    An ir_pb2.Expression corresponding to the textual form.
+    An ir_data.Expression corresponding to the textual form.
 
   Raises:
     AssertionError if text is not a well-formed Emboss expression, and
diff --git a/compiler/util/ir_pb2.py b/compiler/util/ir_data.py
similarity index 100%
rename from compiler/util/ir_pb2.py
rename to compiler/util/ir_data.py
diff --git a/compiler/util/ir_util.py b/compiler/util/ir_util.py
index 15a2def..f86ff74 100644
--- a/compiler/util/ir_util.py
+++ b/compiler/util/ir_util.py
@@ -16,7 +16,7 @@
 
 import operator
 
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 
 
 _FIXED_SIZE_ATTRIBUTE = "fixed_size_in_bits"
@@ -150,23 +150,23 @@
   # constant expression patterns built from non-constant subexpressions, such as
   # `0 * X` or `X == X` or `3 * X == X + X + X`.  I (bolms@) am not implementing
   # any further special cases because I do not see any practical use for them.
-  if function.function == ir_pb2.FunctionMapping.UNKNOWN:
+  if function.function == ir_data.FunctionMapping.UNKNOWN:
     return None
-  if function.function == ir_pb2.FunctionMapping.AND:
+  if function.function == ir_data.FunctionMapping.AND:
     if any(value is False for value in values):
       return False
     elif any(value is None for value in values):
       return None
     else:
       return True
-  elif function.function == ir_pb2.FunctionMapping.OR:
+  elif function.function == ir_data.FunctionMapping.OR:
     if any(value is True for value in values):
       return True
     elif any(value is None for value in values):
       return None
     else:
       return False
-  elif function.function == ir_pb2.FunctionMapping.CHOICE:
+  elif function.function == ir_data.FunctionMapping.CHOICE:
     if values[0] is None:
       return None
     else:
@@ -176,18 +176,18 @@
   if any(value is None for value in values):
     return None
   functions = {
-      ir_pb2.FunctionMapping.ADDITION: operator.add,
-      ir_pb2.FunctionMapping.SUBTRACTION: operator.sub,
-      ir_pb2.FunctionMapping.MULTIPLICATION: operator.mul,
-      ir_pb2.FunctionMapping.EQUALITY: operator.eq,
-      ir_pb2.FunctionMapping.INEQUALITY: operator.ne,
-      ir_pb2.FunctionMapping.LESS: operator.lt,
-      ir_pb2.FunctionMapping.LESS_OR_EQUAL: operator.le,
-      ir_pb2.FunctionMapping.GREATER: operator.gt,
-      ir_pb2.FunctionMapping.GREATER_OR_EQUAL: operator.ge,
+      ir_data.FunctionMapping.ADDITION: operator.add,
+      ir_data.FunctionMapping.SUBTRACTION: operator.sub,
+      ir_data.FunctionMapping.MULTIPLICATION: operator.mul,
+      ir_data.FunctionMapping.EQUALITY: operator.eq,
+      ir_data.FunctionMapping.INEQUALITY: operator.ne,
+      ir_data.FunctionMapping.LESS: operator.lt,
+      ir_data.FunctionMapping.LESS_OR_EQUAL: operator.le,
+      ir_data.FunctionMapping.GREATER: operator.gt,
+      ir_data.FunctionMapping.GREATER_OR_EQUAL: operator.ge,
       # Python's max([1, 2]) == 2; max(1, 2) == 2; max([1]) == 1; but max(1)
       # throws a TypeError ("'int' object is not iterable").
-      ir_pb2.FunctionMapping.MAXIMUM: lambda *x: max(x),
+      ir_data.FunctionMapping.MAXIMUM: lambda *x: max(x),
   }
   return functions[function.function](*values)
 
@@ -200,7 +200,7 @@
   """Returns a representation of reference that can be used as a dict key.
 
   Arguments:
-    reference: An ir_pb2.Reference or ir_pb2.NameDefinition.
+    reference: An ir_data.Reference or ir_data.NameDefinition.
 
   Returns:
     A tuple of the module_file and object_path.
@@ -212,7 +212,7 @@
   """Returns a representation of field_reference that can be used as a dict key.
 
   Arguments:
-    field_reference: An ir_pb2.FieldReference
+    field_reference: An ir_data.FieldReference
 
   Returns:
     A tuple of tuples of the module_files and object_paths.
@@ -289,10 +289,10 @@
 
 def find_object_or_none(name, ir):
   """Finds the object with the given canonical name, if it exists.."""
-  if (isinstance(name, ir_pb2.Reference) or
-      isinstance(name, ir_pb2.NameDefinition)):
+  if (isinstance(name, ir_data.Reference) or
+      isinstance(name, ir_data.NameDefinition)):
     path = _hashable_form_of_name(name.canonical_name)
-  elif isinstance(name, ir_pb2.CanonicalName):
+  elif isinstance(name, ir_data.CanonicalName):
     path = _hashable_form_of_name(name)
   else:
     path = name
@@ -313,10 +313,10 @@
 
 def find_parent_object(name, ir):
   """Finds the parent object of the object with the given canonical name."""
-  if (isinstance(name, ir_pb2.Reference) or
-      isinstance(name, ir_pb2.NameDefinition)):
+  if (isinstance(name, ir_data.Reference) or
+      isinstance(name, ir_data.NameDefinition)):
     path = _hashable_form_of_name(name.canonical_name)
-  elif isinstance(name, ir_pb2.CanonicalName):
+  elif isinstance(name, ir_data.CanonicalName):
     path = _hashable_form_of_name(name)
   else:
     path = name
diff --git a/compiler/util/ir_util_test.py b/compiler/util/ir_util_test.py
index 7743c10..1afed9c 100644
--- a/compiler/util/ir_util_test.py
+++ b/compiler/util/ir_util_test.py
@@ -16,7 +16,7 @@
 
 import unittest
 from compiler.util import expression_parser
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 from compiler.util import ir_util
 
 
@@ -31,58 +31,58 @@
     self.assertTrue(ir_util.is_constant(_parse_expression("6")))
     expression = _parse_expression("12")
     # The type information should be ignored for constants like this one.
-    expression.type.integer.CopyFrom(ir_pb2.IntegerType())
+    expression.type.integer.CopyFrom(ir_data.IntegerType())
     self.assertTrue(ir_util.is_constant(expression))
 
   def test_is_constant_boolean(self):
     self.assertTrue(ir_util.is_constant(_parse_expression("true")))
     expression = _parse_expression("true")
     # The type information should be ignored for constants like this one.
-    expression.type.boolean.CopyFrom(ir_pb2.BooleanType())
+    expression.type.boolean.CopyFrom(ir_data.BooleanType())
     self.assertTrue(ir_util.is_constant(expression))
 
   def test_is_constant_enum(self):
-    self.assertTrue(ir_util.is_constant(ir_pb2.Expression(
-        constant_reference=ir_pb2.Reference(),
-        type=ir_pb2.ExpressionType(enumeration=ir_pb2.EnumType(value="12")))))
+    self.assertTrue(ir_util.is_constant(ir_data.Expression(
+        constant_reference=ir_data.Reference(),
+        type=ir_data.ExpressionType(enumeration=ir_data.EnumType(value="12")))))
 
   def test_is_constant_integer_type(self):
-    self.assertFalse(ir_util.is_constant_type(ir_pb2.ExpressionType(
-        integer=ir_pb2.IntegerType(
+    self.assertFalse(ir_util.is_constant_type(ir_data.ExpressionType(
+        integer=ir_data.IntegerType(
             modulus="10",
             modular_value="5",
             minimum_value="-5",
             maximum_value="15"))))
-    self.assertTrue(ir_util.is_constant_type(ir_pb2.ExpressionType(
-        integer=ir_pb2.IntegerType(
+    self.assertTrue(ir_util.is_constant_type(ir_data.ExpressionType(
+        integer=ir_data.IntegerType(
             modulus="infinity",
             modular_value="5",
             minimum_value="5",
             maximum_value="5"))))
 
   def test_is_constant_boolean_type(self):
-    self.assertFalse(ir_util.is_constant_type(ir_pb2.ExpressionType(
-        boolean=ir_pb2.BooleanType())))
-    self.assertTrue(ir_util.is_constant_type(ir_pb2.ExpressionType(
-        boolean=ir_pb2.BooleanType(value=True))))
-    self.assertTrue(ir_util.is_constant_type(ir_pb2.ExpressionType(
-        boolean=ir_pb2.BooleanType(value=False))))
+    self.assertFalse(ir_util.is_constant_type(ir_data.ExpressionType(
+        boolean=ir_data.BooleanType())))
+    self.assertTrue(ir_util.is_constant_type(ir_data.ExpressionType(
+        boolean=ir_data.BooleanType(value=True))))
+    self.assertTrue(ir_util.is_constant_type(ir_data.ExpressionType(
+        boolean=ir_data.BooleanType(value=False))))
 
   def test_is_constant_enumeration_type(self):
-    self.assertFalse(ir_util.is_constant_type(ir_pb2.ExpressionType(
-        enumeration=ir_pb2.EnumType())))
-    self.assertTrue(ir_util.is_constant_type(ir_pb2.ExpressionType(
-        enumeration=ir_pb2.EnumType(value="0"))))
+    self.assertFalse(ir_util.is_constant_type(ir_data.ExpressionType(
+        enumeration=ir_data.EnumType())))
+    self.assertTrue(ir_util.is_constant_type(ir_data.ExpressionType(
+        enumeration=ir_data.EnumType(value="0"))))
 
   def test_is_constant_opaque_type(self):
-    self.assertFalse(ir_util.is_constant_type(ir_pb2.ExpressionType(
-        opaque=ir_pb2.OpaqueType())))
+    self.assertFalse(ir_util.is_constant_type(ir_data.ExpressionType(
+        opaque=ir_data.OpaqueType())))
 
   def test_constant_value_of_integer(self):
     self.assertEqual(6, ir_util.constant_value(_parse_expression("6")))
 
   def test_constant_value_of_none(self):
-    self.assertIsNone(ir_util.constant_value(ir_pb2.Expression()))
+    self.assertIsNone(ir_util.constant_value(ir_data.Expression()))
 
   def test_constant_value_of_addition(self):
     self.assertEqual(6, ir_util.constant_value(_parse_expression("2+4")))
@@ -146,27 +146,27 @@
     self.assertFalse(ir_util.constant_value(_parse_expression("false")))
 
   def test_constant_value_of_enum(self):
-    self.assertEqual(12, ir_util.constant_value(ir_pb2.Expression(
-        constant_reference=ir_pb2.Reference(),
-        type=ir_pb2.ExpressionType(enumeration=ir_pb2.EnumType(value="12")))))
+    self.assertEqual(12, ir_util.constant_value(ir_data.Expression(
+        constant_reference=ir_data.Reference(),
+        type=ir_data.ExpressionType(enumeration=ir_data.EnumType(value="12")))))
 
   def test_constant_value_of_integer_reference(self):
-    self.assertEqual(12, ir_util.constant_value(ir_pb2.Expression(
-        constant_reference=ir_pb2.Reference(),
-        type=ir_pb2.ExpressionType(
-            integer=ir_pb2.IntegerType(modulus="infinity",
+    self.assertEqual(12, ir_util.constant_value(ir_data.Expression(
+        constant_reference=ir_data.Reference(),
+        type=ir_data.ExpressionType(
+            integer=ir_data.IntegerType(modulus="infinity",
                                        modular_value="12")))))
 
   def test_constant_value_of_boolean_reference(self):
-    self.assertTrue(ir_util.constant_value(ir_pb2.Expression(
-        constant_reference=ir_pb2.Reference(),
-        type=ir_pb2.ExpressionType(boolean=ir_pb2.BooleanType(value=True)))))
+    self.assertTrue(ir_util.constant_value(ir_data.Expression(
+        constant_reference=ir_data.Reference(),
+        type=ir_data.ExpressionType(boolean=ir_data.BooleanType(value=True)))))
 
   def test_constant_value_of_builtin_reference(self):
     self.assertEqual(12, ir_util.constant_value(
-        ir_pb2.Expression(
-            builtin_reference=ir_pb2.Reference(
-                canonical_name=ir_pb2.CanonicalName(object_path=["$foo"]))),
+        ir_data.Expression(
+            builtin_reference=ir_data.Reference(
+                canonical_name=ir_data.CanonicalName(object_path=["$foo"]))),
         {"$foo": 12}))
 
   def test_constant_value_of_field_reference(self):
@@ -216,72 +216,72 @@
 
   def test_is_array(self):
     self.assertTrue(
-        ir_util.is_array(ir_pb2.Type(array_type=ir_pb2.ArrayType())))
+        ir_util.is_array(ir_data.Type(array_type=ir_data.ArrayType())))
     self.assertFalse(
-        ir_util.is_array(ir_pb2.Type(atomic_type=ir_pb2.AtomicType())))
+        ir_util.is_array(ir_data.Type(atomic_type=ir_data.AtomicType())))
 
   def test_get_attribute(self):
-    type_def = ir_pb2.TypeDefinition(attribute=[
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(expression=ir_pb2.Expression()),
-            name=ir_pb2.Word(text="phil")),
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(expression=_parse_expression("false")),
-            name=ir_pb2.Word(text="bob"),
+    type_def = ir_data.TypeDefinition(attribute=[
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(expression=ir_data.Expression()),
+            name=ir_data.Word(text="phil")),
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(expression=_parse_expression("false")),
+            name=ir_data.Word(text="bob"),
             is_default=True),
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(expression=_parse_expression("true")),
-            name=ir_pb2.Word(text="bob")),
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(expression=_parse_expression("false")),
-            name=ir_pb2.Word(text="bob2")),
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(expression=_parse_expression("true")),
-            name=ir_pb2.Word(text="bob2"),
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(expression=_parse_expression("true")),
+            name=ir_data.Word(text="bob")),
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(expression=_parse_expression("false")),
+            name=ir_data.Word(text="bob2")),
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(expression=_parse_expression("true")),
+            name=ir_data.Word(text="bob2"),
             is_default=True),
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(expression=_parse_expression("false")),
-            name=ir_pb2.Word(text="bob3"),
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(expression=_parse_expression("false")),
+            name=ir_data.Word(text="bob3"),
             is_default=True),
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(expression=_parse_expression("false")),
-            name=ir_pb2.Word()),
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(expression=_parse_expression("false")),
+            name=ir_data.Word()),
     ])
     self.assertEqual(
-        ir_pb2.AttributeValue(expression=_parse_expression("true")),
+        ir_data.AttributeValue(expression=_parse_expression("true")),
         ir_util.get_attribute(type_def.attribute, "bob"))
     self.assertEqual(
-        ir_pb2.AttributeValue(expression=_parse_expression("false")),
+        ir_data.AttributeValue(expression=_parse_expression("false")),
         ir_util.get_attribute(type_def.attribute, "bob2"))
     self.assertEqual(None, ir_util.get_attribute(type_def.attribute, "Bob"))
     self.assertEqual(None, ir_util.get_attribute(type_def.attribute, "bob3"))
 
   def test_get_boolean_attribute(self):
-    type_def = ir_pb2.TypeDefinition(attribute=[
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(expression=ir_pb2.Expression()),
-            name=ir_pb2.Word(text="phil")),
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(expression=_parse_expression("false")),
-            name=ir_pb2.Word(text="bob"),
+    type_def = ir_data.TypeDefinition(attribute=[
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(expression=ir_data.Expression()),
+            name=ir_data.Word(text="phil")),
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(expression=_parse_expression("false")),
+            name=ir_data.Word(text="bob"),
             is_default=True),
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(expression=_parse_expression("true")),
-            name=ir_pb2.Word(text="bob")),
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(expression=_parse_expression("false")),
-            name=ir_pb2.Word(text="bob2")),
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(expression=_parse_expression("true")),
-            name=ir_pb2.Word(text="bob2"),
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(expression=_parse_expression("true")),
+            name=ir_data.Word(text="bob")),
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(expression=_parse_expression("false")),
+            name=ir_data.Word(text="bob2")),
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(expression=_parse_expression("true")),
+            name=ir_data.Word(text="bob2"),
             is_default=True),
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(expression=_parse_expression("false")),
-            name=ir_pb2.Word(text="bob3"),
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(expression=_parse_expression("false")),
+            name=ir_data.Word(text="bob3"),
             is_default=True),
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(expression=_parse_expression("false")),
-            name=ir_pb2.Word()),
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(expression=_parse_expression("false")),
+            name=ir_data.Word()),
     ])
     self.assertTrue(ir_util.get_boolean_attribute(type_def.attribute, "bob"))
     self.assertTrue(ir_util.get_boolean_attribute(type_def.attribute,
@@ -298,86 +298,86 @@
     self.assertIsNone(ir_util.get_boolean_attribute(type_def.attribute, "bob3"))
 
   def test_get_integer_attribute(self):
-    type_def = ir_pb2.TypeDefinition(attribute=[
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(
-                expression=ir_pb2.Expression(
-                    type=ir_pb2.ExpressionType(integer=ir_pb2.IntegerType()))),
-            name=ir_pb2.Word(text="phil")),
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(
-                expression=ir_pb2.Expression(
-                    constant=ir_pb2.NumericConstant(value="20"),
-                    type=ir_pb2.ExpressionType(integer=ir_pb2.IntegerType(
+    type_def = ir_data.TypeDefinition(attribute=[
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(
+                expression=ir_data.Expression(
+                    type=ir_data.ExpressionType(integer=ir_data.IntegerType()))),
+            name=ir_data.Word(text="phil")),
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(
+                expression=ir_data.Expression(
+                    constant=ir_data.NumericConstant(value="20"),
+                    type=ir_data.ExpressionType(integer=ir_data.IntegerType(
                         modular_value="20",
                         modulus="infinity")))),
-            name=ir_pb2.Word(text="bob"),
+            name=ir_data.Word(text="bob"),
             is_default=True),
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(
-                expression=ir_pb2.Expression(
-                    constant=ir_pb2.NumericConstant(value="10"),
-                    type=ir_pb2.ExpressionType(integer=ir_pb2.IntegerType(
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(
+                expression=ir_data.Expression(
+                    constant=ir_data.NumericConstant(value="10"),
+                    type=ir_data.ExpressionType(integer=ir_data.IntegerType(
                         modular_value="10",
                         modulus="infinity")))),
-            name=ir_pb2.Word(text="bob")),
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(
-                expression=ir_pb2.Expression(
-                    constant=ir_pb2.NumericConstant(value="5"),
-                    type=ir_pb2.ExpressionType(integer=ir_pb2.IntegerType(
+            name=ir_data.Word(text="bob")),
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(
+                expression=ir_data.Expression(
+                    constant=ir_data.NumericConstant(value="5"),
+                    type=ir_data.ExpressionType(integer=ir_data.IntegerType(
                         modular_value="5",
                         modulus="infinity")))),
-            name=ir_pb2.Word(text="bob2")),
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(
-                expression=ir_pb2.Expression(
-                    constant=ir_pb2.NumericConstant(value="0"),
-                    type=ir_pb2.ExpressionType(integer=ir_pb2.IntegerType(
+            name=ir_data.Word(text="bob2")),
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(
+                expression=ir_data.Expression(
+                    constant=ir_data.NumericConstant(value="0"),
+                    type=ir_data.ExpressionType(integer=ir_data.IntegerType(
                         modular_value="0",
                         modulus="infinity")))),
-            name=ir_pb2.Word(text="bob2"),
+            name=ir_data.Word(text="bob2"),
             is_default=True),
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(
-                expression=ir_pb2.Expression(
-                    constant=ir_pb2.NumericConstant(value="30"),
-                    type=ir_pb2.ExpressionType(integer=ir_pb2.IntegerType(
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(
+                expression=ir_data.Expression(
+                    constant=ir_data.NumericConstant(value="30"),
+                    type=ir_data.ExpressionType(integer=ir_data.IntegerType(
                         modular_value="30",
                         modulus="infinity")))),
-            name=ir_pb2.Word(text="bob3"),
+            name=ir_data.Word(text="bob3"),
             is_default=True),
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(
-                expression=ir_pb2.Expression(
-                    function=ir_pb2.Function(
-                        function=ir_pb2.FunctionMapping.ADDITION,
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(
+                expression=ir_data.Expression(
+                    function=ir_data.Function(
+                        function=ir_data.FunctionMapping.ADDITION,
                         args=[
-                            ir_pb2.Expression(
-                                constant=ir_pb2.NumericConstant(value="100"),
-                                type=ir_pb2.ExpressionType(
-                                    integer=ir_pb2.IntegerType(
+                            ir_data.Expression(
+                                constant=ir_data.NumericConstant(value="100"),
+                                type=ir_data.ExpressionType(
+                                    integer=ir_data.IntegerType(
                                         modular_value="100",
                                         modulus="infinity"))),
-                            ir_pb2.Expression(
-                                constant=ir_pb2.NumericConstant(value="100"),
-                                type=ir_pb2.ExpressionType(
-                                    integer=ir_pb2.IntegerType(
+                            ir_data.Expression(
+                                constant=ir_data.NumericConstant(value="100"),
+                                type=ir_data.ExpressionType(
+                                    integer=ir_data.IntegerType(
                                         modular_value="100",
                                         modulus="infinity")))
                         ]),
-                    type=ir_pb2.ExpressionType(integer=ir_pb2.IntegerType(
+                    type=ir_data.ExpressionType(integer=ir_data.IntegerType(
                         modular_value="200",
                         modulus="infinity")))),
-            name=ir_pb2.Word(text="bob4")),
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(
-                expression=ir_pb2.Expression(
-                    constant=ir_pb2.NumericConstant(value="40"),
-                    type=ir_pb2.ExpressionType(integer=ir_pb2.IntegerType(
+            name=ir_data.Word(text="bob4")),
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(
+                expression=ir_data.Expression(
+                    constant=ir_data.NumericConstant(value="40"),
+                    type=ir_data.ExpressionType(integer=ir_data.IntegerType(
                         modular_value="40",
                         modulus="infinity")))),
-            name=ir_pb2.Word()),
+            name=ir_data.Word()),
     ])
     self.assertEqual(10,
                      ir_util.get_integer_attribute(type_def.attribute, "bob"))
@@ -392,25 +392,25 @@
                                                         "bob4"))
 
   def test_get_duplicate_attribute(self):
-    type_def = ir_pb2.TypeDefinition(attribute=[
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(expression=ir_pb2.Expression()),
-            name=ir_pb2.Word(text="phil")),
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(expression=_parse_expression("true")),
-            name=ir_pb2.Word(text="bob")),
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(expression=_parse_expression("false")),
-            name=ir_pb2.Word(text="bob")),
-        ir_pb2.Attribute(
-            value=ir_pb2.AttributeValue(expression=_parse_expression("false")),
-            name=ir_pb2.Word()),
+    type_def = ir_data.TypeDefinition(attribute=[
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(expression=ir_data.Expression()),
+            name=ir_data.Word(text="phil")),
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(expression=_parse_expression("true")),
+            name=ir_data.Word(text="bob")),
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(expression=_parse_expression("false")),
+            name=ir_data.Word(text="bob")),
+        ir_data.Attribute(
+            value=ir_data.AttributeValue(expression=_parse_expression("false")),
+            name=ir_data.Word()),
     ])
     self.assertRaises(AssertionError, ir_util.get_attribute, type_def.attribute,
                       "bob")
 
   def test_find_object(self):
-    ir = ir_pb2.EmbossIr.from_json(
+    ir = ir_data.EmbossIr.from_json(
         """{
           "module": [
             {
@@ -492,12 +492,12 @@
         }""")
 
     # Test that find_object works with any of its four "name" types.
-    canonical_name_of_foo = ir_pb2.CanonicalName(module_file="test.emb",
+    canonical_name_of_foo = ir_data.CanonicalName(module_file="test.emb",
                                                  object_path=["Foo"])
     self.assertEqual(ir.module[0].type[0], ir_util.find_object(
-        ir_pb2.Reference(canonical_name=canonical_name_of_foo), ir))
+        ir_data.Reference(canonical_name=canonical_name_of_foo), ir))
     self.assertEqual(ir.module[0].type[0], ir_util.find_object(
-        ir_pb2.NameDefinition(canonical_name=canonical_name_of_foo), ir))
+        ir_data.NameDefinition(canonical_name=canonical_name_of_foo), ir))
     self.assertEqual(ir.module[0].type[0],
                      ir_util.find_object(canonical_name_of_foo, ir))
     self.assertEqual(ir.module[0].type[0],
@@ -533,9 +533,9 @@
 
     # Test that find_parent_object works with any of its four "name" types.
     self.assertEqual(ir.module[0], ir_util.find_parent_object(
-        ir_pb2.Reference(canonical_name=canonical_name_of_foo), ir))
+        ir_data.Reference(canonical_name=canonical_name_of_foo), ir))
     self.assertEqual(ir.module[0], ir_util.find_parent_object(
-        ir_pb2.NameDefinition(canonical_name=canonical_name_of_foo), ir))
+        ir_data.NameDefinition(canonical_name=canonical_name_of_foo), ir))
     self.assertEqual(ir.module[0],
                      ir_util.find_parent_object(canonical_name_of_foo, ir))
     self.assertEqual(ir.module[0],
@@ -554,17 +554,17 @@
   def test_hashable_form_of_reference(self):
     self.assertEqual(
         ("t.emb", "Foo", "Bar"),
-        ir_util.hashable_form_of_reference(ir_pb2.Reference(
-            canonical_name=ir_pb2.CanonicalName(module_file="t.emb",
+        ir_util.hashable_form_of_reference(ir_data.Reference(
+            canonical_name=ir_data.CanonicalName(module_file="t.emb",
                                                 object_path=["Foo", "Bar"]))))
     self.assertEqual(
         ("t.emb", "Foo", "Bar"),
-        ir_util.hashable_form_of_reference(ir_pb2.NameDefinition(
-            canonical_name=ir_pb2.CanonicalName(module_file="t.emb",
+        ir_util.hashable_form_of_reference(ir_data.NameDefinition(
+            canonical_name=ir_data.CanonicalName(module_file="t.emb",
                                                 object_path=["Foo", "Bar"]))))
 
   def test_get_base_type(self):
-    array_type_ir = ir_pb2.Type.from_json(
+    array_type_ir = ir_data.Type.from_json(
         """{
           "array_type": {
             "element_count": { "constant": { "value": "20" } },
@@ -590,7 +590,7 @@
     self.assertEqual(base_type_ir, ir_util.get_base_type(base_type_ir))
 
   def test_size_of_type_in_bits(self):
-    ir = ir_pb2.EmbossIr.from_json(
+    ir = ir_data.EmbossIr.from_json(
         """{
           "module": [{
             "type": [{
@@ -638,7 +638,7 @@
           }]
         }""")
 
-    fixed_size_type = ir_pb2.Type.from_json(
+    fixed_size_type = ir_data.Type.from_json(
         """{
           "atomic_type": {
             "reference": {
@@ -648,7 +648,7 @@
         }""")
     self.assertEqual(8, ir_util.fixed_size_of_type_in_bits(fixed_size_type, ir))
 
-    explicit_size_type = ir_pb2.Type.from_json(
+    explicit_size_type = ir_data.Type.from_json(
         """{
           "atomic_type": {
             "reference": {
@@ -665,7 +665,7 @@
     self.assertEqual(32,
                      ir_util.fixed_size_of_type_in_bits(explicit_size_type, ir))
 
-    fixed_size_array = ir_pb2.Type.from_json(
+    fixed_size_array = ir_data.Type.from_json(
         """{
           "array_type": {
             "base_type": {
@@ -686,7 +686,7 @@
     self.assertEqual(40,
                      ir_util.fixed_size_of_type_in_bits(fixed_size_array, ir))
 
-    fixed_size_2d_array = ir_pb2.Type.from_json(
+    fixed_size_2d_array = ir_data.Type.from_json(
         """{
           "array_type": {
             "base_type": {
@@ -720,7 +720,7 @@
     self.assertEqual(
         80, ir_util.fixed_size_of_type_in_bits(fixed_size_2d_array, ir))
 
-    automatic_size_array = ir_pb2.Type.from_json(
+    automatic_size_array = ir_data.Type.from_json(
         """{
           "array_type": {
             "base_type": {
@@ -749,7 +749,7 @@
     self.assertIsNone(
         ir_util.fixed_size_of_type_in_bits(automatic_size_array, ir))
 
-    variable_size_type = ir_pb2.Type.from_json(
+    variable_size_type = ir_data.Type.from_json(
         """{
           "atomic_type": {
             "reference": {
@@ -760,7 +760,7 @@
     self.assertIsNone(
         ir_util.fixed_size_of_type_in_bits(variable_size_type, ir))
 
-    no_size_type = ir_pb2.Type.from_json(
+    no_size_type = ir_data.Type.from_json(
         """{
           "atomic_type": {
             "reference": {
@@ -774,21 +774,21 @@
     self.assertIsNone(ir_util.fixed_size_of_type_in_bits(no_size_type, ir))
 
   def test_field_is_virtual(self):
-    self.assertTrue(ir_util.field_is_virtual(ir_pb2.Field()))
+    self.assertTrue(ir_util.field_is_virtual(ir_data.Field()))
 
   def test_field_is_not_virtual(self):
     self.assertFalse(ir_util.field_is_virtual(
-        ir_pb2.Field(location=ir_pb2.FieldLocation())))
+        ir_data.Field(location=ir_data.FieldLocation())))
 
   def test_field_is_read_only(self):
-    self.assertTrue(ir_util.field_is_read_only(ir_pb2.Field(
-        write_method=ir_pb2.WriteMethod(read_only=True))))
+    self.assertTrue(ir_util.field_is_read_only(ir_data.Field(
+        write_method=ir_data.WriteMethod(read_only=True))))
 
   def test_field_is_not_read_only(self):
     self.assertFalse(ir_util.field_is_read_only(
-        ir_pb2.Field(location=ir_pb2.FieldLocation())))
-    self.assertFalse(ir_util.field_is_read_only(ir_pb2.Field(
-        write_method=ir_pb2.WriteMethod())))
+        ir_data.Field(location=ir_data.FieldLocation())))
+    self.assertFalse(ir_util.field_is_read_only(ir_data.Field(
+        write_method=ir_data.WriteMethod())))
 
 
 if __name__ == "__main__":
diff --git a/compiler/util/parser_types.py b/compiler/util/parser_types.py
index 98c8ee2..5b63ffa 100644
--- a/compiler/util/parser_types.py
+++ b/compiler/util/parser_types.py
@@ -21,54 +21,54 @@
 """
 
 import collections
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 
 
 def _make_position(line, column):
-  """Makes an ir_pb2.Position from line, column ints."""
+  """Makes an ir_data.Position from line, column ints."""
   if not isinstance(line, int):
     raise ValueError("Bad line {!r}".format(line))
   elif not isinstance(column, int):
     raise ValueError("Bad column {!r}".format(column))
-  return ir_pb2.Position(line=line, column=column)
+  return ir_data.Position(line=line, column=column)
 
 
 def _parse_position(text):
-  """Parses an ir_pb2.Position from "line:column" (e.g., "1:2")."""
+  """Parses an ir_data.Position from "line:column" (e.g., "1:2")."""
   line, column = text.split(":")
   return _make_position(int(line), int(column))
 
 
 def format_position(position):
-  """formats an ir_pb2.Position to "line:column" form."""
+  """formats an ir_data.Position to "line:column" form."""
   return "{}:{}".format(position.line, position.column)
 
 
 def make_location(start, end, is_synthetic=False):
-  """Makes an ir_pb2.Location from (line, column) tuples or ir_pb2.Positions."""
+  """Makes an ir_data.Location from (line, column) tuples or ir_data.Positions."""
   if isinstance(start, tuple):
     start = _make_position(*start)
   if isinstance(end, tuple):
     end = _make_position(*end)
-  if not isinstance(start, ir_pb2.Position):
+  if not isinstance(start, ir_data.Position):
     raise ValueError("Bad start {!r}".format(start))
-  elif not isinstance(end, ir_pb2.Position):
+  elif not isinstance(end, ir_data.Position):
     raise ValueError("Bad end {!r}".format(end))
   elif start.line > end.line or (
       start.line == end.line and start.column > end.column):
     raise ValueError("Start {} is after end {}".format(format_position(start),
                                                        format_position(end)))
-  return ir_pb2.Location(start=start, end=end, is_synthetic=is_synthetic)
+  return ir_data.Location(start=start, end=end, is_synthetic=is_synthetic)
 
 
 def format_location(location):
-  """Formats an ir_pb2.Location in format "1:2-3:4" ("start-end")."""
+  """Formats an ir_data.Location in format "1:2-3:4" ("start-end")."""
   return "{}-{}".format(format_position(location.start),
                         format_position(location.end))
 
 
 def parse_location(text):
-  """Parses an ir_pb2.Location from format "1:2-3:4" ("start-end")."""
+  """Parses an ir_data.Location from format "1:2-3:4" ("start-end")."""
   start, end = text.split("-")
   return make_location(_parse_position(start), _parse_position(end))
 
diff --git a/compiler/util/parser_types_test.py b/compiler/util/parser_types_test.py
index 6dbfffb..5e6fddf 100644
--- a/compiler/util/parser_types_test.py
+++ b/compiler/util/parser_types_test.py
@@ -15,7 +15,7 @@
 """Tests for parser_types."""
 
 import unittest
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 from compiler.util import parser_types
 
 
@@ -24,42 +24,42 @@
 
   def test_format_position(self):
     self.assertEqual(
-        "1:2", parser_types.format_position(ir_pb2.Position(line=1, column=2)))
+        "1:2", parser_types.format_position(ir_data.Position(line=1, column=2)))
 
 
 class LocationTest(unittest.TestCase):
   """Tests for Location-related functions in parser_types."""
 
   def test_make_location(self):
-    self.assertEqual(ir_pb2.Location(start=ir_pb2.Position(line=1,
+    self.assertEqual(ir_data.Location(start=ir_data.Position(line=1,
                                                            column=2),
-                                     end=ir_pb2.Position(line=3,
+                                     end=ir_data.Position(line=3,
                                                          column=4),
                                      is_synthetic=False),
                      parser_types.make_location((1, 2), (3, 4)))
     self.assertEqual(
-        ir_pb2.Location(start=ir_pb2.Position(line=1,
+        ir_data.Location(start=ir_data.Position(line=1,
                                               column=2),
-                        end=ir_pb2.Position(line=3,
+                        end=ir_data.Position(line=3,
                                             column=4),
                         is_synthetic=False),
-        parser_types.make_location(ir_pb2.Position(line=1,
+        parser_types.make_location(ir_data.Position(line=1,
                                                    column=2),
-                                   ir_pb2.Position(line=3,
+                                   ir_data.Position(line=3,
                                                    column=4)))
 
   def test_make_synthetic_location(self):
     self.assertEqual(
-        ir_pb2.Location(start=ir_pb2.Position(line=1, column=2),
-                        end=ir_pb2.Position(line=3, column=4),
+        ir_data.Location(start=ir_data.Position(line=1, column=2),
+                        end=ir_data.Position(line=3, column=4),
                         is_synthetic=True),
         parser_types.make_location((1, 2), (3, 4), True))
     self.assertEqual(
-        ir_pb2.Location(start=ir_pb2.Position(line=1, column=2),
-                        end=ir_pb2.Position(line=3, column=4),
+        ir_data.Location(start=ir_data.Position(line=1, column=2),
+                        end=ir_data.Position(line=3, column=4),
                         is_synthetic=True),
-        parser_types.make_location(ir_pb2.Position(line=1, column=2),
-                                   ir_pb2.Position(line=3, column=4),
+        parser_types.make_location(ir_data.Position(line=1, column=2),
+                                   ir_data.Position(line=3, column=4),
                                    True))
 
   def test_make_location_type_checks(self):
diff --git a/compiler/util/test_util.py b/compiler/util/test_util.py
index dd80343..0d33600 100644
--- a/compiler/util/test_util.py
+++ b/compiler/util/test_util.py
@@ -14,7 +14,7 @@
 
 """Utilities for test code."""
 
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 
 
 def proto_is_superset(proto, expected_values, path=""):
@@ -50,8 +50,8 @@
   for name, expected_value in expected_values.raw_fields.items():
     field_path = "{}{}".format(path, name)
     value = getattr(proto, name)
-    if issubclass(proto.field_specs[name].type, ir_pb2.Message):
-      if isinstance(proto.field_specs[name], ir_pb2.Repeated):
+    if issubclass(proto.field_specs[name].type, ir_data.Message):
+      if isinstance(proto.field_specs[name], ir_data.Repeated):
         if len(expected_value) > len(value):
           return False, "{}[{}] missing".format(field_path,
                                                 len(getattr(proto, name)))
@@ -71,9 +71,9 @@
       # Zero-length repeated fields and not-there repeated fields are "the
       # same."
       if (expected_value != value and
-          (isinstance(proto.field_specs[name], ir_pb2.Optional) or
+          (isinstance(proto.field_specs[name], ir_data.Optional) or
            len(expected_value))):
-        if isinstance(proto.field_specs[name], ir_pb2.Repeated):
+        if isinstance(proto.field_specs[name], ir_data.Repeated):
           return False, "{} differs: found {}, expected {}".format(
               field_path, list(value), list(expected_value))
         else:
diff --git a/compiler/util/test_util_test.py b/compiler/util/test_util_test.py
index a0512aa..e82f3c7 100644
--- a/compiler/util/test_util_test.py
+++ b/compiler/util/test_util_test.py
@@ -16,7 +16,7 @@
 
 import unittest
 
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 from compiler.util import parser_types
 from compiler.util import test_util
 
@@ -28,45 +28,45 @@
     self.assertEqual(
         (True, ""),
         test_util.proto_is_superset(
-            ir_pb2.Structure(
-                field=[ir_pb2.Field()],
+            ir_data.Structure(
+                field=[ir_data.Field()],
                 source_location=parser_types.parse_location("1:2-3:4")),
-            ir_pb2.Structure(field=[ir_pb2.Field()])))
+            ir_data.Structure(field=[ir_data.Field()])))
 
   def test_superset_extra_repeated_field(self):
     self.assertEqual(
         (True, ""),
         test_util.proto_is_superset(
-            ir_pb2.Structure(
-                field=[ir_pb2.Field(), ir_pb2.Field()],
+            ir_data.Structure(
+                field=[ir_data.Field(), ir_data.Field()],
                 source_location=parser_types.parse_location("1:2-3:4")),
-            ir_pb2.Structure(field=[ir_pb2.Field()])))
+            ir_data.Structure(field=[ir_data.Field()])))
 
   def test_superset_missing_empty_repeated_field(self):
     self.assertEqual(
         (False, "field[0] missing"),
         test_util.proto_is_superset(
-            ir_pb2.Structure(
+            ir_data.Structure(
                 field=[],
                 source_location=parser_types.parse_location("1:2-3:4")),
-            ir_pb2.Structure(field=[ir_pb2.Field(), ir_pb2.Field()])))
+            ir_data.Structure(field=[ir_data.Field(), ir_data.Field()])))
 
   def test_superset_missing_empty_optional_field(self):
     self.assertEqual((False, "source_location missing"),
                      test_util.proto_is_superset(
-                         ir_pb2.Structure(field=[]),
-                         ir_pb2.Structure(source_location=ir_pb2.Location())))
+                         ir_data.Structure(field=[]),
+                         ir_data.Structure(source_location=ir_data.Location())))
 
   def test_array_element_differs(self):
     self.assertEqual(
         (False,
          "field[0].source_location.start.line differs: found 1, expected 2"),
         test_util.proto_is_superset(
-            ir_pb2.Structure(
-                field=[ir_pb2.Field(source_location=parser_types.parse_location(
+            ir_data.Structure(
+                field=[ir_data.Field(source_location=parser_types.parse_location(
                     "1:2-3:4"))]),
-            ir_pb2.Structure(
-                field=[ir_pb2.Field(source_location=parser_types.parse_location(
+            ir_data.Structure(
+                field=[ir_data.Field(source_location=parser_types.parse_location(
                     "2:2-3:4"))])))
 
   def test_equal(self):
@@ -79,9 +79,9 @@
     self.assertEqual(
         (False, "source_location missing"),
         test_util.proto_is_superset(
-            ir_pb2.Structure(field=[ir_pb2.Field()]),
-            ir_pb2.Structure(
-                field=[ir_pb2.Field()],
+            ir_data.Structure(field=[ir_data.Field()]),
+            ir_data.Structure(
+                field=[ir_data.Field()],
                 source_location=parser_types.parse_location("1:2-3:4"))))
 
   def test_optional_field_differs(self):
@@ -93,8 +93,8 @@
   def test_non_message_repeated_field_equal(self):
     self.assertEqual((True, ""),
                      test_util.proto_is_superset(
-                         ir_pb2.CanonicalName(object_path=[]),
-                         ir_pb2.CanonicalName(object_path=[])))
+                         ir_data.CanonicalName(object_path=[]),
+                         ir_data.CanonicalName(object_path=[])))
 
   def test_non_message_repeated_field_missing_element(self):
     self.assertEqual(
@@ -102,8 +102,8 @@
             none=[],
             a=[u"a"])),
         test_util.proto_is_superset(
-            ir_pb2.CanonicalName(object_path=[]),
-            ir_pb2.CanonicalName(object_path=[u"a"])))
+            ir_data.CanonicalName(object_path=[]),
+            ir_data.CanonicalName(object_path=[u"a"])))
 
   def test_non_message_repeated_field_element_differs(self):
     self.assertEqual(
@@ -111,8 +111,8 @@
             aa=[u"a", u"a"],
             ab=[u"a", u"b"])),
         test_util.proto_is_superset(
-            ir_pb2.CanonicalName(object_path=[u"a", u"a"]),
-            ir_pb2.CanonicalName(object_path=[u"a", u"b"])))
+            ir_data.CanonicalName(object_path=[u"a", u"a"]),
+            ir_data.CanonicalName(object_path=[u"a", u"b"])))
 
   def test_non_message_repeated_field_extra_element(self):
     # For repeated fields of int/bool/str values, the entire list is treated as
@@ -122,16 +122,16 @@
          "object_path differs: found {!r}, expected {!r}".format(
              [u"a", u"a"], [u"a"])),
         test_util.proto_is_superset(
-            ir_pb2.CanonicalName(object_path=["a", "a"]),
-            ir_pb2.CanonicalName(object_path=["a"])))
+            ir_data.CanonicalName(object_path=["a", "a"]),
+            ir_data.CanonicalName(object_path=["a"])))
 
   def test_non_message_repeated_field_no_expected_value(self):
     # When a repeated field is empty, it is the same as if it were entirely
     # missing -- there is no way to differentiate those two conditions.
     self.assertEqual((True, ""),
                      test_util.proto_is_superset(
-                         ir_pb2.CanonicalName(object_path=["a", "a"]),
-                         ir_pb2.CanonicalName(object_path=[])))
+                         ir_data.CanonicalName(object_path=["a", "a"]),
+                         ir_data.CanonicalName(object_path=[])))
 
 
 class DictFileReaderTest(unittest.TestCase):
diff --git a/compiler/util/traverse_ir.py b/compiler/util/traverse_ir.py
index 3bd95c3..78efe46 100644
--- a/compiler/util/traverse_ir.py
+++ b/compiler/util/traverse_ir.py
@@ -16,7 +16,7 @@
 
 import inspect
 
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 from compiler.util import simple_memoizer
 
 
@@ -175,25 +175,25 @@
   # IR.  With branch culling, that goes down to 6% (0.7s out of 12.2s).
 
   # type_to_fields is a map of types to maps of field names to field types.
-  # That is, type_to_fields[ir_pb2.Module]["type"] == ir_pb2.TypeDefinition.
+  # That is, type_to_fields[ir_data.Module]["type"] == ir_data.AddressableUnit.
   type_to_fields = {}
 
   # Later, we need to know which fields are singular and which are repeated,
   # because the access methods are not uniform.  This maps (type, field_name)
-  # tuples to descriptor labels: type_fields_to_cardinality[ir_pb2.Module,
-  # "type"] == ir_pb2.Repeated.
+  # tuples to descriptor labels: type_fields_to_cardinality[ir_data.Module,
+  # "type"] == ir_data.Repeated.
   type_fields_to_cardinality = {}
 
   # Fill out the above maps by recursively walking the IR type tree, starting
   # from the root.
-  types_to_check = [ir_pb2.EmbossIr]
+  types_to_check = [ir_data.EmbossIr]
   while types_to_check:
     type_to_check = types_to_check.pop()
     if type_to_check in type_to_fields:
       continue
     fields = {}
     for field_name, field_type in type_to_check.field_specs.items():
-      if issubclass(field_type.type, ir_pb2.Message):
+      if issubclass(field_type.type, ir_data.Message):
         fields[field_name] = field_type.type
         types_to_check.append(field_type.type)
         type_fields_to_cardinality[type_to_check, field_name] = (
@@ -201,10 +201,10 @@
     type_to_fields[type_to_check] = fields
 
   # type_to_descendant_types is a map of all types that can be reached from a
-  # particular type.  After the setup, type_to_descendant_types[ir_pb2.EmbossIr]
-  # == set(<all types>) and type_to_descendant_types[ir_pb2.Reference] ==
-  # {ir_pb2.CanonicalName, ir_pb2.Word, ir_pb2.Location} and
-  # type_to_descendant_types[ir_pb2.Word] == set().
+  # particular type.  After the setup, type_to_descendant_types[ir_data.EmbossIr]
+  # == set(<all types>) and type_to_descendant_types[ir_data.Reference] ==
+  # {ir_data.CanonicalName, ir_data.Word, ir_data.Location} and
+  # type_to_descendant_types[ir_data.Word] == set().
   #
   # The while loop basically ors in the known descendants of each known
   # descendant of each type until the dict stops changing, which is a bit
@@ -240,7 +240,7 @@
           # Singular and repeated fields go to different lists, so that they can
           # be handled separately.
           if (type_fields_to_cardinality[current_node_type, field_name] ==
-              ir_pb2.Optional):
+              ir_data.Optional):
             singular_fields_to_scan.append(field_name)
           else:
             repeated_fields_to_scan.append(field_name)
@@ -273,14 +273,14 @@
   `pattern`.  For every node which matches `pattern`, `action` will be called.
 
   `pattern` is just a list of node types.  For example, to execute `print` on
-  every `ir_pb2.Word` in the IR:
+  every `ir_data.Word` in the IR:
 
-      fast_traverse_ir_top_down(ir, [ir_pb2.Word], print)
+      fast_traverse_ir_top_down(ir, [ir_data.Word], print)
 
   If more than one type is specified, then each one must be found inside the
   previous.  For example, to print only the Words inside of import statements:
 
-      fast_traverse_ir_top_down(ir, [ir_pb2.Import, ir_pb2.Word], print)
+      fast_traverse_ir_top_down(ir, [ir_data.Import, ir_data.Word], print)
 
   The optional arguments provide additional control.
 
@@ -289,8 +289,8 @@
   nodes with any ancestor node whose type is in `skip_descendants_of`.  For
   example, to `do_something` only on outermost `Expression`s:
 
-      fast_traverse_ir_top_down(ir, [ir_pb2.Expression], do_something,
-                                skip_descendants_of={ir_pb2.Expression})
+      fast_traverse_ir_top_down(ir, [ir_data.Expression], do_something,
+                                skip_descendants_of={ir_data.Expression})
 
   `parameters` specifies a dictionary of initial parameters which can be passed
   as arguments to `action` and `incidental_actions`.  Note that the parameters
@@ -303,7 +303,7 @@
           errors.append(error_for_structure(structure))
 
       errors = []
-      fast_traverse_ir_top_down(ir, [ir_pb2.Structure], check_structure,
+      fast_traverse_ir_top_down(ir, [ir_data.Structure], check_structure,
                                 parameters={"errors": errors})
       if errors:
         print("Errors: {}".format(errors))
@@ -324,21 +324,21 @@
           print("Found {} not in any field".format(expression))
 
       fast_traverse_ir_top_down(
-          ir, [ir_pb2.Expression], do_something,
-          incidental_actions={ir_pb2.Field: lambda f: {"field_name": f.name}})
+          ir, [ir_data.Expression], do_something,
+          incidental_actions={ir_data.Field: lambda f: {"field_name": f.name}})
 
   (The `action` may also return a dict in the same way.)
 
   A few `incidental_actions` are built into `fast_traverse_ir_top_down`, so
   that certain parameters are contextually available with well-known names:
 
-      ir: The complete IR (the root ir_pb2.EmbossIr node).
+      ir: The complete IR (the root ir_data.EmbossIr node).
       source_file_name: The file name from which the current node was sourced.
       type_definition: The most-immediate ancestor type definition.
       field: The field containing the current node, if any.
 
   Arguments:
-    ir: An ir_pb2.Ir object to walk.
+    ir: An ir_data.Ir object to walk.
     pattern: A list of node types to match.
     action: A callable, which will be called on nodes matching `pattern`.
     incidental_actions: A dict of node types to callables, which can be used to
@@ -351,10 +351,10 @@
     None
   """
   all_incidental_actions = {
-      ir_pb2.EmbossIr: [_emboss_ir_action],
-      ir_pb2.Module: [_module_action],
-      ir_pb2.TypeDefinition: [_type_definition_action],
-      ir_pb2.Field: [_field_action],
+      ir_data.EmbossIr: [_emboss_ir_action],
+      ir_data.Module: [_module_action],
+      ir_data.TypeDefinition: [_type_definition_action],
+      ir_data.Field: [_field_action],
   }
   if incidental_actions:
     for key, incidental_action in incidental_actions.items():
@@ -376,7 +376,7 @@
   It does not have any built-in incidental actions.
 
   Arguments:
-    node: An ir_pb2.Ir object to walk.
+    node: An ir_data.Ir object to walk.
     pattern: A list of node types to match.
     action: A callable, which will be called on nodes matching `pattern`.
     incidental_actions: A dict of node types to callables, which can be used to
diff --git a/compiler/util/traverse_ir_test.py b/compiler/util/traverse_ir_test.py
index 2e35a31..64da8f6 100644
--- a/compiler/util/traverse_ir_test.py
+++ b/compiler/util/traverse_ir_test.py
@@ -18,10 +18,10 @@
 
 import unittest
 
-from compiler.util import ir_pb2
+from compiler.util import ir_data
 from compiler.util import traverse_ir
 
-_EXAMPLE_IR = ir_pb2.EmbossIr.from_json("""{
+_EXAMPLE_IR = ir_data.EmbossIr.from_json("""{
 "module": [
   {
     "type": [
@@ -217,7 +217,7 @@
   def test_filter_on_type(self):
     constants = []
     traverse_ir.fast_traverse_ir_top_down(
-        _EXAMPLE_IR, [ir_pb2.NumericConstant], _record_constant,
+        _EXAMPLE_IR, [ir_data.NumericConstant], _record_constant,
         parameters={"constant_list": constants})
     self.assertEqual(
         _count_entries([0, 8, 8, 8, 16, 24, 32, 16, 32, 320, 1, 1, 1, 64]),
@@ -227,7 +227,7 @@
     constants = []
     traverse_ir.fast_traverse_ir_top_down(
         _EXAMPLE_IR,
-        [ir_pb2.Function, ir_pb2.Expression, ir_pb2.NumericConstant],
+        [ir_data.Function, ir_data.Expression, ir_data.NumericConstant],
         _record_constant,
         parameters={"constant_list": constants})
     self.assertEqual([1, 1], constants)
@@ -235,22 +235,22 @@
   def test_filter_on_type_star_type(self):
     struct_constants = []
     traverse_ir.fast_traverse_ir_top_down(
-        _EXAMPLE_IR, [ir_pb2.Structure, ir_pb2.NumericConstant],
+        _EXAMPLE_IR, [ir_data.Structure, ir_data.NumericConstant],
         _record_constant,
         parameters={"constant_list": struct_constants})
     self.assertEqual(_count_entries([0, 8, 8, 8, 16, 24, 32, 16, 32, 320]),
                      _count_entries(struct_constants))
     enum_constants = []
     traverse_ir.fast_traverse_ir_top_down(
-        _EXAMPLE_IR, [ir_pb2.Enum, ir_pb2.NumericConstant], _record_constant,
+        _EXAMPLE_IR, [ir_data.Enum, ir_data.NumericConstant], _record_constant,
         parameters={"constant_list": enum_constants})
     self.assertEqual(_count_entries([1, 1, 1]), _count_entries(enum_constants))
 
   def test_filter_on_not_type(self):
     notstruct_constants = []
     traverse_ir.fast_traverse_ir_top_down(
-        _EXAMPLE_IR, [ir_pb2.NumericConstant], _record_constant,
-        skip_descendants_of=(ir_pb2.Structure,),
+        _EXAMPLE_IR, [ir_data.NumericConstant], _record_constant,
+        skip_descendants_of=(ir_data.Structure,),
         parameters={"constant_list": notstruct_constants})
     self.assertEqual(_count_entries([1, 1, 1, 64]),
                      _count_entries(notstruct_constants))
@@ -258,7 +258,7 @@
   def test_field_is_populated(self):
     constants = []
     traverse_ir.fast_traverse_ir_top_down(
-        _EXAMPLE_IR, [ir_pb2.Field, ir_pb2.NumericConstant],
+        _EXAMPLE_IR, [ir_data.Field, ir_data.NumericConstant],
         _record_field_name_and_constant,
         parameters={"constant_list": constants})
     self.assertEqual(_count_entries([
@@ -270,7 +270,7 @@
   def test_file_name_is_populated(self):
     constants = []
     traverse_ir.fast_traverse_ir_top_down(
-        _EXAMPLE_IR, [ir_pb2.NumericConstant], _record_file_name_and_constant,
+        _EXAMPLE_IR, [ir_data.NumericConstant], _record_file_name_and_constant,
         parameters={"constant_list": constants})
     self.assertEqual(_count_entries([
         ("t.emb", 0), ("t.emb", 8), ("t.emb", 8), ("t.emb", 8), ("t.emb", 16),
@@ -281,7 +281,7 @@
   def test_type_definition_is_populated(self):
     constants = []
     traverse_ir.fast_traverse_ir_top_down(
-        _EXAMPLE_IR, [ir_pb2.NumericConstant], _record_kind_and_constant,
+        _EXAMPLE_IR, [ir_data.NumericConstant], _record_kind_and_constant,
         parameters={"constant_list": constants})
     self.assertEqual(_count_entries([
         ("structure", 0), ("structure", 8), ("structure", 8), ("structure", 8),
@@ -305,11 +305,11 @@
       call_counts["not"] += 1
 
     traverse_ir.fast_traverse_ir_top_down(
-        _EXAMPLE_IR, [ir_pb2.Field, ir_pb2.Type], check_field_is_populated)
+        _EXAMPLE_IR, [ir_data.Field, ir_data.Type], check_field_is_populated)
     self.assertEqual(7, call_counts["populated"])
 
     traverse_ir.fast_traverse_ir_top_down(
-        _EXAMPLE_IR, [ir_pb2.Enum, ir_pb2.EnumValue],
+        _EXAMPLE_IR, [ir_data.Enum, ir_data.EnumValue],
         check_field_is_not_populated)
     self.assertEqual(2, call_counts["not"])
 
@@ -323,9 +323,9 @@
       }
 
     traverse_ir.fast_traverse_ir_top_down(
-        _EXAMPLE_IR, [ir_pb2.NumericConstant],
+        _EXAMPLE_IR, [ir_data.NumericConstant],
         _record_location_parameter_and_constant,
-        incidental_actions={ir_pb2.Field: pass_location_down},
+        incidental_actions={ir_data.Field: pass_location_down},
         parameters={"constant_list": constants, "location": None})
     self.assertEqual(_count_entries([
         ((0, 8), 0), ((0, 8), 8), ((8, 16), 8), ((8, 16), 8), ((8, 16), 16),
diff --git a/doc/design.md b/doc/design.md
index afa3ea1..13e4b94 100644
--- a/doc/design.md
+++ b/doc/design.md
@@ -8,9 +8,9 @@
 The Emboss compiler is divided into separate "front end" and "back end"
 programs.  The front end parses Emboss files (`.emb` files) and produces a
 stable intermediate representation (IR), which is consumed by the back ends.
-This IR is defined in [public/ir_pb2.py][ir_pb2_py].
+This IR is defined in [public/ir_data.py][ir_pb2_py].
 
-[ir_pb2_py]: public/ir_pb2.py
+[ir_pb2_py]: public/ir_data.py
 
 The back ends read the IR and emit code to view and manipulate Emboss-defined
 data structures.  Currently, only a C++ back-end exists.
diff --git a/doc/design_docs/archive/next_keyword.md b/doc/design_docs/archive/next_keyword.md
index 2f4c554..d5c44da 100644
--- a/doc/design_docs/archive/next_keyword.md
+++ b/doc/design_docs/archive/next_keyword.md
@@ -87,9 +87,9 @@
 4.  Add a new compiler pass before `synthetics.synthesize_fields`, to replace
     the new symbol with the expanded representation.  This should be relatively
     straightforward -- something that uses `fast_traverse_ir_top_down()` to
-    find all `ir_pb2.Structure` elements in the IR, then iterates over the
+    find all `ir_data.Structure` elements in the IR, then iterates over the
     field offsets within each structure, and recursively replaces any
-    `ir_pb2.Expression`s with a
+    `ir_data.Expression`s with a
     `builtin_reference.canonical_name.object_path[0]` equal to
     `"$new_symbol"`.  It would probably be useful to make
     `traverse_ir._fast_traverse_proto_top_down()` into a public function, so
diff --git a/doc/design_docs/value_of_enum_function.md b/doc/design_docs/value_of_enum_function.md
index 4b73016..e28849c 100644
--- a/doc/design_docs/value_of_enum_function.md
+++ b/doc/design_docs/value_of_enum_function.md
@@ -60,7 +60,7 @@
 `$from_int()` would require changes in pretty much the same places, but a few
 of them would be significantly more complex.
 
-Basically anywhere that walks or evaluates an `ir_pb2.Expression` would need to
+Basically anywhere that walks or evaluates an `ir_data.Expression` would need to
 be updated to know about the new function.  A probably-incomplete list:
 
     compiler/back_end/header_generator.py