diff --git a/ui/grpc_generated/google/protobuf/descriptor.ts b/ui/grpc_generated/google/protobuf/descriptor.ts index 0ebf9063ac..680c10f4df 100644 --- a/ui/grpc_generated/google/protobuf/descriptor.ts +++ b/ui/grpc_generated/google/protobuf/descriptor.ts @@ -4,6 +4,98 @@ import _m0 from "protobufjs/minimal"; export const protobufPackage = "google.protobuf"; +/** The full set of known editions. */ +export enum Edition { + /** EDITION_UNKNOWN - A placeholder for an unknown edition value. */ + EDITION_UNKNOWN = 0, + /** + * EDITION_PROTO2 - Legacy syntax "editions". These pre-date editions, but behave much like + * distinct editions. These can't be used to specify the edition of proto + * files, but feature definitions must supply proto2/proto3 defaults for + * backwards compatibility. + */ + EDITION_PROTO2 = 998, + EDITION_PROTO3 = 999, + /** + * EDITION_2023 - Editions that have been released. The specific values are arbitrary and + * should not be depended on, but they will always be time-ordered for easy + * comparison. + */ + EDITION_2023 = 1000, + /** + * EDITION_1_TEST_ONLY - Placeholder editions for testing feature resolution. These should not be + * used or relyed on outside of tests. + */ + EDITION_1_TEST_ONLY = 1, + EDITION_2_TEST_ONLY = 2, + EDITION_99997_TEST_ONLY = 99997, + EDITION_99998_TEST_ONLY = 99998, + EDITION_99999_TEST_ONLY = 99999, + UNRECOGNIZED = -1, +} + +export function editionFromJSON(object: any): Edition { + switch (object) { + case 0: + case "EDITION_UNKNOWN": + return Edition.EDITION_UNKNOWN; + case 998: + case "EDITION_PROTO2": + return Edition.EDITION_PROTO2; + case 999: + case "EDITION_PROTO3": + return Edition.EDITION_PROTO3; + case 1000: + case "EDITION_2023": + return Edition.EDITION_2023; + case 1: + case "EDITION_1_TEST_ONLY": + return Edition.EDITION_1_TEST_ONLY; + case 2: + case "EDITION_2_TEST_ONLY": + return Edition.EDITION_2_TEST_ONLY; + case 99997: + case "EDITION_99997_TEST_ONLY": + return Edition.EDITION_99997_TEST_ONLY; + case 99998: + case "EDITION_99998_TEST_ONLY": + return Edition.EDITION_99998_TEST_ONLY; + case 99999: + case "EDITION_99999_TEST_ONLY": + return Edition.EDITION_99999_TEST_ONLY; + case -1: + case "UNRECOGNIZED": + default: + return Edition.UNRECOGNIZED; + } +} + +export function editionToJSON(object: Edition): string { + switch (object) { + case Edition.EDITION_UNKNOWN: + return "EDITION_UNKNOWN"; + case Edition.EDITION_PROTO2: + return "EDITION_PROTO2"; + case Edition.EDITION_PROTO3: + return "EDITION_PROTO3"; + case Edition.EDITION_2023: + return "EDITION_2023"; + case Edition.EDITION_1_TEST_ONLY: + return "EDITION_1_TEST_ONLY"; + case Edition.EDITION_2_TEST_ONLY: + return "EDITION_2_TEST_ONLY"; + case Edition.EDITION_99997_TEST_ONLY: + return "EDITION_99997_TEST_ONLY"; + case Edition.EDITION_99998_TEST_ONLY: + return "EDITION_99998_TEST_ONLY"; + case Edition.EDITION_99999_TEST_ONLY: + return "EDITION_99999_TEST_ONLY"; + case Edition.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + /** * The protocol compiler can output a FileDescriptorSet containing the .proto * files it parses. @@ -51,8 +143,8 @@ export interface FileDescriptorProto { * If `edition` is present, this value must be "editions". */ syntax: string; - /** The edition of the proto file, which is an opaque string. */ - edition: string; + /** The edition of the proto file. */ + edition: Edition; } /** Describes a message type. */ @@ -96,6 +188,84 @@ export interface DescriptorProto_ReservedRange { export interface ExtensionRangeOptions { /** The parser stores options it doesn't recognize here. See above. */ uninterpretedOption: UninterpretedOption[]; + /** + * For external users: DO NOT USE. We are in the process of open sourcing + * extension declaration and executing internal cleanups before it can be + * used externally. + */ + declaration: ExtensionRangeOptions_Declaration[]; + /** Any features defined in the specific edition. */ + features: + | FeatureSet + | undefined; + /** + * The verification state of the range. + * TODO: flip the default to DECLARATION once all empty ranges + * are marked as UNVERIFIED. + */ + verification: ExtensionRangeOptions_VerificationState; +} + +/** The verification state of the extension range. */ +export enum ExtensionRangeOptions_VerificationState { + /** DECLARATION - All the extensions of the range must be declared. */ + DECLARATION = 0, + UNVERIFIED = 1, + UNRECOGNIZED = -1, +} + +export function extensionRangeOptions_VerificationStateFromJSON(object: any): ExtensionRangeOptions_VerificationState { + switch (object) { + case 0: + case "DECLARATION": + return ExtensionRangeOptions_VerificationState.DECLARATION; + case 1: + case "UNVERIFIED": + return ExtensionRangeOptions_VerificationState.UNVERIFIED; + case -1: + case "UNRECOGNIZED": + default: + return ExtensionRangeOptions_VerificationState.UNRECOGNIZED; + } +} + +export function extensionRangeOptions_VerificationStateToJSON(object: ExtensionRangeOptions_VerificationState): string { + switch (object) { + case ExtensionRangeOptions_VerificationState.DECLARATION: + return "DECLARATION"; + case ExtensionRangeOptions_VerificationState.UNVERIFIED: + return "UNVERIFIED"; + case ExtensionRangeOptions_VerificationState.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface ExtensionRangeOptions_Declaration { + /** The extension number declared within the extension range. */ + number: number; + /** + * The fully-qualified name of the extension field. There must be a leading + * dot in front of the full name. + */ + fullName: string; + /** + * The fully-qualified type name of the extension field. Unlike + * Metadata.type, Declaration.type must have a leading dot for messages + * and enums. + */ + type: string; + /** + * If true, indicates that the number is reserved in the extension range, + * and any extension field with the number will fail to compile. Set this + * when a declared extension field is deleted. + */ + reserved: boolean; + /** + * If true, indicates that the extension must be defined as repeated. + * Otherwise the extension must be defined as optional. + */ + repeated: boolean; } /** Describes a field within a message. */ @@ -193,9 +363,10 @@ export enum FieldDescriptorProto_Type { TYPE_STRING = 9, /** * TYPE_GROUP - Tag-delimited aggregate. - * Group type is deprecated and not supported in proto3. However, Proto3 + * Group type is deprecated and not supported after google.protobuf. However, Proto3 * implementations should still be able to parse the group wire format and - * treat group fields as unknown fields. + * treat group fields as unknown fields. In Editions, the group wire format + * can be enabled via the `message_encoding` feature. */ TYPE_GROUP = 10, /** TYPE_MESSAGE - Length-delimited aggregate. */ @@ -323,8 +494,13 @@ export function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Typ export enum FieldDescriptorProto_Label { /** LABEL_OPTIONAL - 0 is reserved for errors */ LABEL_OPTIONAL = 1, - LABEL_REQUIRED = 2, LABEL_REPEATED = 3, + /** + * LABEL_REQUIRED - The required label is only allowed in google.protobuf. In proto3 and Editions + * it's explicitly prohibited. In Editions, the `field_presence` feature + * can be used to get this behavior. + */ + LABEL_REQUIRED = 2, UNRECOGNIZED = -1, } @@ -333,12 +509,12 @@ export function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptor case 1: case "LABEL_OPTIONAL": return FieldDescriptorProto_Label.LABEL_OPTIONAL; - case 2: - case "LABEL_REQUIRED": - return FieldDescriptorProto_Label.LABEL_REQUIRED; case 3: case "LABEL_REPEATED": return FieldDescriptorProto_Label.LABEL_REPEATED; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; case -1: case "UNRECOGNIZED": default: @@ -350,10 +526,10 @@ export function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_La switch (object) { case FieldDescriptorProto_Label.LABEL_OPTIONAL: return "LABEL_OPTIONAL"; - case FieldDescriptorProto_Label.LABEL_REQUIRED: - return "LABEL_REQUIRED"; case FieldDescriptorProto_Label.LABEL_REPEATED: return "LABEL_REPEATED"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; case FieldDescriptorProto_Label.UNRECOGNIZED: default: return "UNRECOGNIZED"; @@ -547,6 +723,10 @@ export interface FileOptions { * determining the ruby package. */ rubyPackage: string; + /** Any features defined in the specific edition. */ + features: + | FeatureSet + | undefined; /** * The parser stores options it doesn't recognize here. * See the documentation for the "Options" section above. @@ -665,12 +845,16 @@ export interface MessageOptions { * This should only be used as a temporary measure against broken builds due * to the change in behavior for JSON field name conflicts. * - * TODO(b/261750190) This is legacy behavior we plan to remove once downstream + * TODO This is legacy behavior we plan to remove once downstream * teams have had time to migrate. * * @deprecated */ deprecatedLegacyJsonFieldConflicts: boolean; + /** Any features defined in the specific edition. */ + features: + | FeatureSet + | undefined; /** The parser stores options it doesn't recognize here. See above. */ uninterpretedOption: UninterpretedOption[]; } @@ -679,8 +863,10 @@ export interface FieldOptions { /** * The ctype option instructs the C++ code generator to use a different * representation of the field than it normally would. See the specific - * options below. This option is not yet implemented in the open source - * release -- sorry, we'll try to include it in a future version! + * options below. This option is only implemented to support use of + * [ctype=CORD] and [ctype=STRING] (the default) on non-repeated fields of + * type "bytes" in the open source release -- sorry, we'll try to include + * other types in a future version! */ ctype: FieldOptions_CType; /** @@ -688,7 +874,9 @@ export interface FieldOptions { * a more efficient representation on the wire. Rather than repeatedly * writing the tag and type for each element, the entire array is encoded as * a single length-delimited blob. In proto3, only explicit setting it to - * false will avoid using packed encoding. + * false will avoid using packed encoding. This option is prohibited in + * Editions, but the `repeated_field_encoding` feature can be used to control + * the behavior. */ packed: boolean; /** @@ -759,7 +947,12 @@ export interface FieldOptions { */ debugRedact: boolean; retention: FieldOptions_OptionRetention; - target: FieldOptions_OptionTargetType; + targets: FieldOptions_OptionTargetType[]; + editionDefaults: FieldOptions_EditionDefault[]; + /** Any features defined in the specific edition. */ + features: + | FeatureSet + | undefined; /** The parser stores options it doesn't recognize here. See above. */ uninterpretedOption: UninterpretedOption[]; } @@ -767,6 +960,14 @@ export interface FieldOptions { export enum FieldOptions_CType { /** STRING - Default mode. */ STRING = 0, + /** + * CORD - The option [ctype=CORD] may be applied to a non-repeated field of type + * "bytes". It indicates that in C++, the data should be stored in a Cord + * instead of a string. For very large strings, this may reduce memory + * fragmentation. It may also allow better performance when parsing from a + * Cord, or when parsing with aliasing enabled, as the parsed Cord may then + * alias the original buffer. + */ CORD = 1, STRING_PIECE = 2, UNRECOGNIZED = -1, @@ -977,7 +1178,17 @@ export function fieldOptions_OptionTargetTypeToJSON(object: FieldOptions_OptionT } } +export interface FieldOptions_EditionDefault { + edition: Edition; + /** Textproto value. */ + value: string; +} + export interface OneofOptions { + /** Any features defined in the specific edition. */ + features: + | FeatureSet + | undefined; /** The parser stores options it doesn't recognize here. See above. */ uninterpretedOption: UninterpretedOption[]; } @@ -1000,12 +1211,16 @@ export interface EnumOptions { * and strips underscored from the fields before comparison in proto3 only. * The new behavior takes `json_name` into account and applies to proto2 as * well. - * TODO(b/261750190) Remove this legacy behavior once downstream teams have + * TODO Remove this legacy behavior once downstream teams have * had time to migrate. * * @deprecated */ deprecatedLegacyJsonFieldConflicts: boolean; + /** Any features defined in the specific edition. */ + features: + | FeatureSet + | undefined; /** The parser stores options it doesn't recognize here. See above. */ uninterpretedOption: UninterpretedOption[]; } @@ -1018,11 +1233,25 @@ export interface EnumValueOptions { * this is a formalization for deprecating enum values. */ deprecated: boolean; + /** Any features defined in the specific edition. */ + features: + | FeatureSet + | undefined; + /** + * Indicate that fields annotated with this enum value should not be printed + * out when using debug formats, e.g. when the field contains sensitive + * credentials. + */ + debugRedact: boolean; /** The parser stores options it doesn't recognize here. See above. */ uninterpretedOption: UninterpretedOption[]; } export interface ServiceOptions { + /** Any features defined in the specific edition. */ + features: + | FeatureSet + | undefined; /** * Is this service deprecated? * Depending on the target platform, this can emit Deprecated annotations @@ -1043,6 +1272,10 @@ export interface MethodOptions { */ deprecated: boolean; idempotencyLevel: MethodOptions_IdempotencyLevel; + /** Any features defined in the specific edition. */ + features: + | FeatureSet + | undefined; /** The parser stores options it doesn't recognize here. See above. */ uninterpretedOption: UninterpretedOption[]; } @@ -1127,6 +1360,294 @@ export interface UninterpretedOption_NamePart { isExtension: boolean; } +/** + * TODO Enums in C++ gencode (and potentially other languages) are + * not well scoped. This means that each of the feature enums below can clash + * with each other. The short names we've chosen maximize call-site + * readability, but leave us very open to this scenario. A future feature will + * be designed and implemented to handle this, hopefully before we ever hit a + * conflict here. + */ +export interface FeatureSet { + fieldPresence: FeatureSet_FieldPresence; + enumType: FeatureSet_EnumType; + repeatedFieldEncoding: FeatureSet_RepeatedFieldEncoding; + utf8Validation: FeatureSet_Utf8Validation; + messageEncoding: FeatureSet_MessageEncoding; + jsonFormat: FeatureSet_JsonFormat; +} + +export enum FeatureSet_FieldPresence { + FIELD_PRESENCE_UNKNOWN = 0, + EXPLICIT = 1, + IMPLICIT = 2, + LEGACY_REQUIRED = 3, + UNRECOGNIZED = -1, +} + +export function featureSet_FieldPresenceFromJSON(object: any): FeatureSet_FieldPresence { + switch (object) { + case 0: + case "FIELD_PRESENCE_UNKNOWN": + return FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN; + case 1: + case "EXPLICIT": + return FeatureSet_FieldPresence.EXPLICIT; + case 2: + case "IMPLICIT": + return FeatureSet_FieldPresence.IMPLICIT; + case 3: + case "LEGACY_REQUIRED": + return FeatureSet_FieldPresence.LEGACY_REQUIRED; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_FieldPresence.UNRECOGNIZED; + } +} + +export function featureSet_FieldPresenceToJSON(object: FeatureSet_FieldPresence): string { + switch (object) { + case FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN: + return "FIELD_PRESENCE_UNKNOWN"; + case FeatureSet_FieldPresence.EXPLICIT: + return "EXPLICIT"; + case FeatureSet_FieldPresence.IMPLICIT: + return "IMPLICIT"; + case FeatureSet_FieldPresence.LEGACY_REQUIRED: + return "LEGACY_REQUIRED"; + case FeatureSet_FieldPresence.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FeatureSet_EnumType { + ENUM_TYPE_UNKNOWN = 0, + OPEN = 1, + CLOSED = 2, + UNRECOGNIZED = -1, +} + +export function featureSet_EnumTypeFromJSON(object: any): FeatureSet_EnumType { + switch (object) { + case 0: + case "ENUM_TYPE_UNKNOWN": + return FeatureSet_EnumType.ENUM_TYPE_UNKNOWN; + case 1: + case "OPEN": + return FeatureSet_EnumType.OPEN; + case 2: + case "CLOSED": + return FeatureSet_EnumType.CLOSED; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_EnumType.UNRECOGNIZED; + } +} + +export function featureSet_EnumTypeToJSON(object: FeatureSet_EnumType): string { + switch (object) { + case FeatureSet_EnumType.ENUM_TYPE_UNKNOWN: + return "ENUM_TYPE_UNKNOWN"; + case FeatureSet_EnumType.OPEN: + return "OPEN"; + case FeatureSet_EnumType.CLOSED: + return "CLOSED"; + case FeatureSet_EnumType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FeatureSet_RepeatedFieldEncoding { + REPEATED_FIELD_ENCODING_UNKNOWN = 0, + PACKED = 1, + EXPANDED = 2, + UNRECOGNIZED = -1, +} + +export function featureSet_RepeatedFieldEncodingFromJSON(object: any): FeatureSet_RepeatedFieldEncoding { + switch (object) { + case 0: + case "REPEATED_FIELD_ENCODING_UNKNOWN": + return FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN; + case 1: + case "PACKED": + return FeatureSet_RepeatedFieldEncoding.PACKED; + case 2: + case "EXPANDED": + return FeatureSet_RepeatedFieldEncoding.EXPANDED; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_RepeatedFieldEncoding.UNRECOGNIZED; + } +} + +export function featureSet_RepeatedFieldEncodingToJSON(object: FeatureSet_RepeatedFieldEncoding): string { + switch (object) { + case FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN: + return "REPEATED_FIELD_ENCODING_UNKNOWN"; + case FeatureSet_RepeatedFieldEncoding.PACKED: + return "PACKED"; + case FeatureSet_RepeatedFieldEncoding.EXPANDED: + return "EXPANDED"; + case FeatureSet_RepeatedFieldEncoding.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FeatureSet_Utf8Validation { + UTF8_VALIDATION_UNKNOWN = 0, + NONE = 1, + VERIFY = 2, + UNRECOGNIZED = -1, +} + +export function featureSet_Utf8ValidationFromJSON(object: any): FeatureSet_Utf8Validation { + switch (object) { + case 0: + case "UTF8_VALIDATION_UNKNOWN": + return FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN; + case 1: + case "NONE": + return FeatureSet_Utf8Validation.NONE; + case 2: + case "VERIFY": + return FeatureSet_Utf8Validation.VERIFY; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_Utf8Validation.UNRECOGNIZED; + } +} + +export function featureSet_Utf8ValidationToJSON(object: FeatureSet_Utf8Validation): string { + switch (object) { + case FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN: + return "UTF8_VALIDATION_UNKNOWN"; + case FeatureSet_Utf8Validation.NONE: + return "NONE"; + case FeatureSet_Utf8Validation.VERIFY: + return "VERIFY"; + case FeatureSet_Utf8Validation.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FeatureSet_MessageEncoding { + MESSAGE_ENCODING_UNKNOWN = 0, + LENGTH_PREFIXED = 1, + DELIMITED = 2, + UNRECOGNIZED = -1, +} + +export function featureSet_MessageEncodingFromJSON(object: any): FeatureSet_MessageEncoding { + switch (object) { + case 0: + case "MESSAGE_ENCODING_UNKNOWN": + return FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN; + case 1: + case "LENGTH_PREFIXED": + return FeatureSet_MessageEncoding.LENGTH_PREFIXED; + case 2: + case "DELIMITED": + return FeatureSet_MessageEncoding.DELIMITED; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_MessageEncoding.UNRECOGNIZED; + } +} + +export function featureSet_MessageEncodingToJSON(object: FeatureSet_MessageEncoding): string { + switch (object) { + case FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN: + return "MESSAGE_ENCODING_UNKNOWN"; + case FeatureSet_MessageEncoding.LENGTH_PREFIXED: + return "LENGTH_PREFIXED"; + case FeatureSet_MessageEncoding.DELIMITED: + return "DELIMITED"; + case FeatureSet_MessageEncoding.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FeatureSet_JsonFormat { + JSON_FORMAT_UNKNOWN = 0, + ALLOW = 1, + LEGACY_BEST_EFFORT = 2, + UNRECOGNIZED = -1, +} + +export function featureSet_JsonFormatFromJSON(object: any): FeatureSet_JsonFormat { + switch (object) { + case 0: + case "JSON_FORMAT_UNKNOWN": + return FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN; + case 1: + case "ALLOW": + return FeatureSet_JsonFormat.ALLOW; + case 2: + case "LEGACY_BEST_EFFORT": + return FeatureSet_JsonFormat.LEGACY_BEST_EFFORT; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_JsonFormat.UNRECOGNIZED; + } +} + +export function featureSet_JsonFormatToJSON(object: FeatureSet_JsonFormat): string { + switch (object) { + case FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN: + return "JSON_FORMAT_UNKNOWN"; + case FeatureSet_JsonFormat.ALLOW: + return "ALLOW"; + case FeatureSet_JsonFormat.LEGACY_BEST_EFFORT: + return "LEGACY_BEST_EFFORT"; + case FeatureSet_JsonFormat.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * A compiled specification for the defaults of a set of features. These + * messages are generated from FeatureSet extensions and can be used to seed + * feature resolution. The resolution with this object becomes a simple search + * for the closest matching edition, followed by proto merges. + */ +export interface FeatureSetDefaults { + defaults: FeatureSetDefaults_FeatureSetEditionDefault[]; + /** + * The minimum supported edition (inclusive) when this was constructed. + * Editions before this will not have defaults. + */ + minimumEdition: Edition; + /** + * The maximum known edition (inclusive) when this was constructed. Editions + * after this will not have reliable defaults. + */ + maximumEdition: Edition; +} + +/** + * A map from every known edition with a unique set of defaults to its + * defaults. Not all editions may be contained here. For a given edition, + * the defaults at the closest matching edition ordered at or before it should + * be used. This field must be in strict ascending order by edition. + */ +export interface FeatureSetDefaults_FeatureSetEditionDefault { + edition: Edition; + features: FeatureSet | undefined; +} + /** * Encapsulates information about the original source file from which a * FileDescriptorProto was generated. @@ -1421,7 +1942,7 @@ function createBaseFileDescriptorProto(): FileDescriptorProto { options: undefined, sourceCodeInfo: undefined, syntax: "", - edition: "", + edition: 0, }; } @@ -1467,8 +1988,8 @@ export const FileDescriptorProto = { if (message.syntax !== "") { writer.uint32(98).string(message.syntax); } - if (message.edition !== "") { - writer.uint32(106).string(message.edition); + if (message.edition !== 0) { + writer.uint32(112).int32(message.edition); } return writer; }, @@ -1584,12 +2105,12 @@ export const FileDescriptorProto = { message.syntax = reader.string(); continue; - case 13: - if (tag !== 106) { + case 14: + if (tag !== 112) { break; } - message.edition = reader.string(); + message.edition = reader.int32() as any; continue; } if ((tag & 7) === 4 || tag === 0) { @@ -1620,7 +2141,7 @@ export const FileDescriptorProto = { options: isSet(object.options) ? FileOptions.fromJSON(object.options) : undefined, sourceCodeInfo: isSet(object.sourceCodeInfo) ? SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, syntax: isSet(object.syntax) ? String(object.syntax) : "", - edition: isSet(object.edition) ? String(object.edition) : "", + edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0, }; }, @@ -1662,8 +2183,8 @@ export const FileDescriptorProto = { if (message.syntax !== "") { obj.syntax = message.syntax; } - if (message.edition !== "") { - obj.edition = message.edition; + if (message.edition !== 0) { + obj.edition = editionToJSON(message.edition); } return obj; }, @@ -1689,7 +2210,7 @@ export const FileDescriptorProto = { ? SourceCodeInfo.fromPartial(object.sourceCodeInfo) : undefined; message.syntax = object.syntax ?? ""; - message.edition = object.edition ?? ""; + message.edition = object.edition ?? 0; return message; }, }; @@ -2078,7 +2599,7 @@ export const DescriptorProto_ReservedRange = { }; function createBaseExtensionRangeOptions(): ExtensionRangeOptions { - return { uninterpretedOption: [] }; + return { uninterpretedOption: [], declaration: [], features: undefined, verification: 0 }; } export const ExtensionRangeOptions = { @@ -2086,6 +2607,15 @@ export const ExtensionRangeOptions = { for (const v of message.uninterpretedOption) { UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); } + for (const v of message.declaration) { + ExtensionRangeOptions_Declaration.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(402).fork()).ldelim(); + } + if (message.verification !== 0) { + writer.uint32(24).int32(message.verification); + } return writer; }, @@ -2103,6 +2633,27 @@ export const ExtensionRangeOptions = { message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); continue; + case 2: + if (tag !== 18) { + break; + } + + message.declaration.push(ExtensionRangeOptions_Declaration.decode(reader, reader.uint32())); + continue; + case 50: + if (tag !== 402) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + case 3: + if (tag !== 24) { + break; + } + + message.verification = reader.int32() as any; + continue; } if ((tag & 7) === 4 || tag === 0) { break; @@ -2117,6 +2668,13 @@ export const ExtensionRangeOptions = { uninterpretedOption: Array.isArray(object?.uninterpretedOption) ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) : [], + declaration: Array.isArray(object?.declaration) + ? object.declaration.map((e: any) => ExtensionRangeOptions_Declaration.fromJSON(e)) + : [], + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + verification: isSet(object.verification) + ? extensionRangeOptions_VerificationStateFromJSON(object.verification) + : 0, }; }, @@ -2125,6 +2683,15 @@ export const ExtensionRangeOptions = { if (message.uninterpretedOption?.length) { obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); } + if (message.declaration?.length) { + obj.declaration = message.declaration.map((e) => ExtensionRangeOptions_Declaration.toJSON(e)); + } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.verification !== 0) { + obj.verification = extensionRangeOptions_VerificationStateToJSON(message.verification); + } return obj; }, @@ -2134,20 +2701,148 @@ export const ExtensionRangeOptions = { fromPartial, I>>(object: I): ExtensionRangeOptions { const message = createBaseExtensionRangeOptions(); message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + message.declaration = object.declaration?.map((e) => ExtensionRangeOptions_Declaration.fromPartial(e)) || []; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.verification = object.verification ?? 0; return message; }, }; -function createBaseFieldDescriptorProto(): FieldDescriptorProto { - return { - name: "", - number: 0, - label: 1, - type: 1, - typeName: "", - extendee: "", - defaultValue: "", - oneofIndex: 0, +function createBaseExtensionRangeOptions_Declaration(): ExtensionRangeOptions_Declaration { + return { number: 0, fullName: "", type: "", reserved: false, repeated: false }; +} + +export const ExtensionRangeOptions_Declaration = { + encode(message: ExtensionRangeOptions_Declaration, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.number !== 0) { + writer.uint32(8).int32(message.number); + } + if (message.fullName !== "") { + writer.uint32(18).string(message.fullName); + } + if (message.type !== "") { + writer.uint32(26).string(message.type); + } + if (message.reserved === true) { + writer.uint32(40).bool(message.reserved); + } + if (message.repeated === true) { + writer.uint32(48).bool(message.repeated); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ExtensionRangeOptions_Declaration { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions_Declaration(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.number = reader.int32(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.fullName = reader.string(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.type = reader.string(); + continue; + case 5: + if (tag !== 40) { + break; + } + + message.reserved = reader.bool(); + continue; + case 6: + if (tag !== 48) { + break; + } + + message.repeated = reader.bool(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): ExtensionRangeOptions_Declaration { + return { + number: isSet(object.number) ? Number(object.number) : 0, + fullName: isSet(object.fullName) ? String(object.fullName) : "", + type: isSet(object.type) ? String(object.type) : "", + reserved: isSet(object.reserved) ? Boolean(object.reserved) : false, + repeated: isSet(object.repeated) ? Boolean(object.repeated) : false, + }; + }, + + toJSON(message: ExtensionRangeOptions_Declaration): unknown { + const obj: any = {}; + if (message.number !== 0) { + obj.number = Math.round(message.number); + } + if (message.fullName !== "") { + obj.fullName = message.fullName; + } + if (message.type !== "") { + obj.type = message.type; + } + if (message.reserved === true) { + obj.reserved = message.reserved; + } + if (message.repeated === true) { + obj.repeated = message.repeated; + } + return obj; + }, + + create, I>>( + base?: I, + ): ExtensionRangeOptions_Declaration { + return ExtensionRangeOptions_Declaration.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): ExtensionRangeOptions_Declaration { + const message = createBaseExtensionRangeOptions_Declaration(); + message.number = object.number ?? 0; + message.fullName = object.fullName ?? ""; + message.type = object.type ?? ""; + message.reserved = object.reserved ?? false; + message.repeated = object.repeated ?? false; + return message; + }, +}; + +function createBaseFieldDescriptorProto(): FieldDescriptorProto { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, jsonName: "", options: undefined, proto3Optional: false, @@ -2983,6 +3678,7 @@ function createBaseFileOptions(): FileOptions { phpNamespace: "", phpMetadataNamespace: "", rubyPackage: "", + features: undefined, uninterpretedOption: [], }; } @@ -3049,6 +3745,9 @@ export const FileOptions = { if (message.rubyPackage !== "") { writer.uint32(362).string(message.rubyPackage); } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(402).fork()).ldelim(); + } for (const v of message.uninterpretedOption) { UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); } @@ -3202,6 +3901,13 @@ export const FileOptions = { message.rubyPackage = reader.string(); continue; + case 50: + if (tag !== 402) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; case 999: if (tag !== 7994) { break; @@ -3242,6 +3948,7 @@ export const FileOptions = { phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "", phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "", rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "", + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, uninterpretedOption: Array.isArray(object?.uninterpretedOption) ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) : [], @@ -3310,6 +4017,9 @@ export const FileOptions = { if (message.rubyPackage !== "") { obj.rubyPackage = message.rubyPackage; } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } if (message.uninterpretedOption?.length) { obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); } @@ -3341,6 +4051,9 @@ export const FileOptions = { message.phpNamespace = object.phpNamespace ?? ""; message.phpMetadataNamespace = object.phpMetadataNamespace ?? ""; message.rubyPackage = object.rubyPackage ?? ""; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; return message; }, @@ -3353,6 +4066,7 @@ function createBaseMessageOptions(): MessageOptions { deprecated: false, mapEntry: false, deprecatedLegacyJsonFieldConflicts: false, + features: undefined, uninterpretedOption: [], }; } @@ -3374,6 +4088,9 @@ export const MessageOptions = { if (message.deprecatedLegacyJsonFieldConflicts === true) { writer.uint32(88).bool(message.deprecatedLegacyJsonFieldConflicts); } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(98).fork()).ldelim(); + } for (const v of message.uninterpretedOption) { UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); } @@ -3422,6 +4139,13 @@ export const MessageOptions = { message.deprecatedLegacyJsonFieldConflicts = reader.bool(); continue; + case 12: + if (tag !== 98) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; case 999: if (tag !== 7994) { break; @@ -3449,6 +4173,7 @@ export const MessageOptions = { deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts) ? Boolean(object.deprecatedLegacyJsonFieldConflicts) : false, + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, uninterpretedOption: Array.isArray(object?.uninterpretedOption) ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) : [], @@ -3472,6 +4197,9 @@ export const MessageOptions = { if (message.deprecatedLegacyJsonFieldConflicts === true) { obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts; } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } if (message.uninterpretedOption?.length) { obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); } @@ -3488,6 +4216,9 @@ export const MessageOptions = { message.deprecated = object.deprecated ?? false; message.mapEntry = object.mapEntry ?? false; message.deprecatedLegacyJsonFieldConflicts = object.deprecatedLegacyJsonFieldConflicts ?? false; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; return message; }, @@ -3504,7 +4235,9 @@ function createBaseFieldOptions(): FieldOptions { weak: false, debugRedact: false, retention: 0, - target: 0, + targets: [], + editionDefaults: [], + features: undefined, uninterpretedOption: [], }; } @@ -3538,8 +4271,16 @@ export const FieldOptions = { if (message.retention !== 0) { writer.uint32(136).int32(message.retention); } - if (message.target !== 0) { - writer.uint32(144).int32(message.target); + writer.uint32(154).fork(); + for (const v of message.targets) { + writer.int32(v); + } + writer.ldelim(); + for (const v of message.editionDefaults) { + FieldOptions_EditionDefault.encode(v!, writer.uint32(162).fork()).ldelim(); + } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(170).fork()).ldelim(); } for (const v of message.uninterpretedOption) { UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); @@ -3617,12 +4358,36 @@ export const FieldOptions = { message.retention = reader.int32() as any; continue; - case 18: - if (tag !== 144) { + case 19: + if (tag === 152) { + message.targets.push(reader.int32() as any); + + continue; + } + + if (tag === 154) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.targets.push(reader.int32() as any); + } + + continue; + } + + break; + case 20: + if (tag !== 162) { break; } - message.target = reader.int32() as any; + message.editionDefaults.push(FieldOptions_EditionDefault.decode(reader, reader.uint32())); + continue; + case 21: + if (tag !== 170) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); continue; case 999: if (tag !== 7994) { @@ -3651,7 +4416,13 @@ export const FieldOptions = { weak: isSet(object.weak) ? Boolean(object.weak) : false, debugRedact: isSet(object.debugRedact) ? Boolean(object.debugRedact) : false, retention: isSet(object.retention) ? fieldOptions_OptionRetentionFromJSON(object.retention) : 0, - target: isSet(object.target) ? fieldOptions_OptionTargetTypeFromJSON(object.target) : 0, + targets: Array.isArray(object?.targets) + ? object.targets.map((e: any) => fieldOptions_OptionTargetTypeFromJSON(e)) + : [], + editionDefaults: Array.isArray(object?.editionDefaults) + ? object.editionDefaults.map((e: any) => FieldOptions_EditionDefault.fromJSON(e)) + : [], + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, uninterpretedOption: Array.isArray(object?.uninterpretedOption) ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) : [], @@ -3687,8 +4458,14 @@ export const FieldOptions = { if (message.retention !== 0) { obj.retention = fieldOptions_OptionRetentionToJSON(message.retention); } - if (message.target !== 0) { - obj.target = fieldOptions_OptionTargetTypeToJSON(message.target); + if (message.targets?.length) { + obj.targets = message.targets.map((e) => fieldOptions_OptionTargetTypeToJSON(e)); + } + if (message.editionDefaults?.length) { + obj.editionDefaults = message.editionDefaults.map((e) => FieldOptions_EditionDefault.toJSON(e)); + } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); } if (message.uninterpretedOption?.length) { obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); @@ -3710,18 +4487,99 @@ export const FieldOptions = { message.weak = object.weak ?? false; message.debugRedact = object.debugRedact ?? false; message.retention = object.retention ?? 0; - message.target = object.target ?? 0; + message.targets = object.targets?.map((e) => e) || []; + message.editionDefaults = object.editionDefaults?.map((e) => FieldOptions_EditionDefault.fromPartial(e)) || []; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; return message; }, }; +function createBaseFieldOptions_EditionDefault(): FieldOptions_EditionDefault { + return { edition: 0, value: "" }; +} + +export const FieldOptions_EditionDefault = { + encode(message: FieldOptions_EditionDefault, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.edition !== 0) { + writer.uint32(24).int32(message.edition); + } + if (message.value !== "") { + writer.uint32(18).string(message.value); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldOptions_EditionDefault { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions_EditionDefault(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 3: + if (tag !== 24) { + break; + } + + message.edition = reader.int32() as any; + continue; + case 2: + if (tag !== 18) { + break; + } + + message.value = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): FieldOptions_EditionDefault { + return { + edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0, + value: isSet(object.value) ? String(object.value) : "", + }; + }, + + toJSON(message: FieldOptions_EditionDefault): unknown { + const obj: any = {}; + if (message.edition !== 0) { + obj.edition = editionToJSON(message.edition); + } + if (message.value !== "") { + obj.value = message.value; + } + return obj; + }, + + create, I>>(base?: I): FieldOptions_EditionDefault { + return FieldOptions_EditionDefault.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): FieldOptions_EditionDefault { + const message = createBaseFieldOptions_EditionDefault(); + message.edition = object.edition ?? 0; + message.value = object.value ?? ""; + return message; + }, +}; + function createBaseOneofOptions(): OneofOptions { - return { uninterpretedOption: [] }; + return { features: undefined, uninterpretedOption: [] }; } export const OneofOptions = { encode(message: OneofOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(10).fork()).ldelim(); + } for (const v of message.uninterpretedOption) { UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); } @@ -3735,6 +4593,13 @@ export const OneofOptions = { while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; case 999: if (tag !== 7994) { break; @@ -3753,6 +4618,7 @@ export const OneofOptions = { fromJSON(object: any): OneofOptions { return { + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, uninterpretedOption: Array.isArray(object?.uninterpretedOption) ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) : [], @@ -3761,6 +4627,9 @@ export const OneofOptions = { toJSON(message: OneofOptions): unknown { const obj: any = {}; + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } if (message.uninterpretedOption?.length) { obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); } @@ -3772,13 +4641,22 @@ export const OneofOptions = { }, fromPartial, I>>(object: I): OneofOptions { const message = createBaseOneofOptions(); + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; return message; }, }; function createBaseEnumOptions(): EnumOptions { - return { allowAlias: false, deprecated: false, deprecatedLegacyJsonFieldConflicts: false, uninterpretedOption: [] }; + return { + allowAlias: false, + deprecated: false, + deprecatedLegacyJsonFieldConflicts: false, + features: undefined, + uninterpretedOption: [], + }; } export const EnumOptions = { @@ -3792,6 +4670,9 @@ export const EnumOptions = { if (message.deprecatedLegacyJsonFieldConflicts === true) { writer.uint32(48).bool(message.deprecatedLegacyJsonFieldConflicts); } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(58).fork()).ldelim(); + } for (const v of message.uninterpretedOption) { UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); } @@ -3826,6 +4707,13 @@ export const EnumOptions = { message.deprecatedLegacyJsonFieldConflicts = reader.bool(); continue; + case 7: + if (tag !== 58) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; case 999: if (tag !== 7994) { break; @@ -3849,6 +4737,7 @@ export const EnumOptions = { deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts) ? Boolean(object.deprecatedLegacyJsonFieldConflicts) : false, + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, uninterpretedOption: Array.isArray(object?.uninterpretedOption) ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) : [], @@ -3866,6 +4755,9 @@ export const EnumOptions = { if (message.deprecatedLegacyJsonFieldConflicts === true) { obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts; } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } if (message.uninterpretedOption?.length) { obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); } @@ -3880,13 +4772,16 @@ export const EnumOptions = { message.allowAlias = object.allowAlias ?? false; message.deprecated = object.deprecated ?? false; message.deprecatedLegacyJsonFieldConflicts = object.deprecatedLegacyJsonFieldConflicts ?? false; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; return message; }, }; function createBaseEnumValueOptions(): EnumValueOptions { - return { deprecated: false, uninterpretedOption: [] }; + return { deprecated: false, features: undefined, debugRedact: false, uninterpretedOption: [] }; } export const EnumValueOptions = { @@ -3894,6 +4789,12 @@ export const EnumValueOptions = { if (message.deprecated === true) { writer.uint32(8).bool(message.deprecated); } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(18).fork()).ldelim(); + } + if (message.debugRedact === true) { + writer.uint32(24).bool(message.debugRedact); + } for (const v of message.uninterpretedOption) { UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); } @@ -3914,6 +4815,20 @@ export const EnumValueOptions = { message.deprecated = reader.bool(); continue; + case 2: + if (tag !== 18) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + case 3: + if (tag !== 24) { + break; + } + + message.debugRedact = reader.bool(); + continue; case 999: if (tag !== 7994) { break; @@ -3933,6 +4848,8 @@ export const EnumValueOptions = { fromJSON(object: any): EnumValueOptions { return { deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + debugRedact: isSet(object.debugRedact) ? Boolean(object.debugRedact) : false, uninterpretedOption: Array.isArray(object?.uninterpretedOption) ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) : [], @@ -3944,6 +4861,12 @@ export const EnumValueOptions = { if (message.deprecated === true) { obj.deprecated = message.deprecated; } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.debugRedact === true) { + obj.debugRedact = message.debugRedact; + } if (message.uninterpretedOption?.length) { obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); } @@ -3956,17 +4879,24 @@ export const EnumValueOptions = { fromPartial, I>>(object: I): EnumValueOptions { const message = createBaseEnumValueOptions(); message.deprecated = object.deprecated ?? false; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.debugRedact = object.debugRedact ?? false; message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; return message; }, }; function createBaseServiceOptions(): ServiceOptions { - return { deprecated: false, uninterpretedOption: [] }; + return { features: undefined, deprecated: false, uninterpretedOption: [] }; } export const ServiceOptions = { encode(message: ServiceOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(274).fork()).ldelim(); + } if (message.deprecated === true) { writer.uint32(264).bool(message.deprecated); } @@ -3983,6 +4913,13 @@ export const ServiceOptions = { while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { + case 34: + if (tag !== 274) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; case 33: if (tag !== 264) { break; @@ -4008,6 +4945,7 @@ export const ServiceOptions = { fromJSON(object: any): ServiceOptions { return { + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, uninterpretedOption: Array.isArray(object?.uninterpretedOption) ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) @@ -4017,6 +4955,9 @@ export const ServiceOptions = { toJSON(message: ServiceOptions): unknown { const obj: any = {}; + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } if (message.deprecated === true) { obj.deprecated = message.deprecated; } @@ -4031,6 +4972,9 @@ export const ServiceOptions = { }, fromPartial, I>>(object: I): ServiceOptions { const message = createBaseServiceOptions(); + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; message.deprecated = object.deprecated ?? false; message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; return message; @@ -4038,7 +4982,7 @@ export const ServiceOptions = { }; function createBaseMethodOptions(): MethodOptions { - return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] }; + return { deprecated: false, idempotencyLevel: 0, features: undefined, uninterpretedOption: [] }; } export const MethodOptions = { @@ -4049,6 +4993,9 @@ export const MethodOptions = { if (message.idempotencyLevel !== 0) { writer.uint32(272).int32(message.idempotencyLevel); } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(282).fork()).ldelim(); + } for (const v of message.uninterpretedOption) { UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); } @@ -4076,6 +5023,13 @@ export const MethodOptions = { message.idempotencyLevel = reader.int32() as any; continue; + case 35: + if (tag !== 282) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; case 999: if (tag !== 7994) { break; @@ -4098,6 +5052,7 @@ export const MethodOptions = { idempotencyLevel: isSet(object.idempotencyLevel) ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) : 0, + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, uninterpretedOption: Array.isArray(object?.uninterpretedOption) ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) : [], @@ -4112,6 +5067,9 @@ export const MethodOptions = { if (message.idempotencyLevel !== 0) { obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel); } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } if (message.uninterpretedOption?.length) { obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); } @@ -4125,6 +5083,9 @@ export const MethodOptions = { const message = createBaseMethodOptions(); message.deprecated = object.deprecated ?? false; message.idempotencyLevel = object.idempotencyLevel ?? 0; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; return message; }, @@ -4361,6 +5322,320 @@ export const UninterpretedOption_NamePart = { }, }; +function createBaseFeatureSet(): FeatureSet { + return { + fieldPresence: 0, + enumType: 0, + repeatedFieldEncoding: 0, + utf8Validation: 0, + messageEncoding: 0, + jsonFormat: 0, + }; +} + +export const FeatureSet = { + encode(message: FeatureSet, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.fieldPresence !== 0) { + writer.uint32(8).int32(message.fieldPresence); + } + if (message.enumType !== 0) { + writer.uint32(16).int32(message.enumType); + } + if (message.repeatedFieldEncoding !== 0) { + writer.uint32(24).int32(message.repeatedFieldEncoding); + } + if (message.utf8Validation !== 0) { + writer.uint32(32).int32(message.utf8Validation); + } + if (message.messageEncoding !== 0) { + writer.uint32(40).int32(message.messageEncoding); + } + if (message.jsonFormat !== 0) { + writer.uint32(48).int32(message.jsonFormat); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FeatureSet { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFeatureSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.fieldPresence = reader.int32() as any; + continue; + case 2: + if (tag !== 16) { + break; + } + + message.enumType = reader.int32() as any; + continue; + case 3: + if (tag !== 24) { + break; + } + + message.repeatedFieldEncoding = reader.int32() as any; + continue; + case 4: + if (tag !== 32) { + break; + } + + message.utf8Validation = reader.int32() as any; + continue; + case 5: + if (tag !== 40) { + break; + } + + message.messageEncoding = reader.int32() as any; + continue; + case 6: + if (tag !== 48) { + break; + } + + message.jsonFormat = reader.int32() as any; + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): FeatureSet { + return { + fieldPresence: isSet(object.fieldPresence) ? featureSet_FieldPresenceFromJSON(object.fieldPresence) : 0, + enumType: isSet(object.enumType) ? featureSet_EnumTypeFromJSON(object.enumType) : 0, + repeatedFieldEncoding: isSet(object.repeatedFieldEncoding) + ? featureSet_RepeatedFieldEncodingFromJSON(object.repeatedFieldEncoding) + : 0, + utf8Validation: isSet(object.utf8Validation) ? featureSet_Utf8ValidationFromJSON(object.utf8Validation) : 0, + messageEncoding: isSet(object.messageEncoding) ? featureSet_MessageEncodingFromJSON(object.messageEncoding) : 0, + jsonFormat: isSet(object.jsonFormat) ? featureSet_JsonFormatFromJSON(object.jsonFormat) : 0, + }; + }, + + toJSON(message: FeatureSet): unknown { + const obj: any = {}; + if (message.fieldPresence !== 0) { + obj.fieldPresence = featureSet_FieldPresenceToJSON(message.fieldPresence); + } + if (message.enumType !== 0) { + obj.enumType = featureSet_EnumTypeToJSON(message.enumType); + } + if (message.repeatedFieldEncoding !== 0) { + obj.repeatedFieldEncoding = featureSet_RepeatedFieldEncodingToJSON(message.repeatedFieldEncoding); + } + if (message.utf8Validation !== 0) { + obj.utf8Validation = featureSet_Utf8ValidationToJSON(message.utf8Validation); + } + if (message.messageEncoding !== 0) { + obj.messageEncoding = featureSet_MessageEncodingToJSON(message.messageEncoding); + } + if (message.jsonFormat !== 0) { + obj.jsonFormat = featureSet_JsonFormatToJSON(message.jsonFormat); + } + return obj; + }, + + create, I>>(base?: I): FeatureSet { + return FeatureSet.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): FeatureSet { + const message = createBaseFeatureSet(); + message.fieldPresence = object.fieldPresence ?? 0; + message.enumType = object.enumType ?? 0; + message.repeatedFieldEncoding = object.repeatedFieldEncoding ?? 0; + message.utf8Validation = object.utf8Validation ?? 0; + message.messageEncoding = object.messageEncoding ?? 0; + message.jsonFormat = object.jsonFormat ?? 0; + return message; + }, +}; + +function createBaseFeatureSetDefaults(): FeatureSetDefaults { + return { defaults: [], minimumEdition: 0, maximumEdition: 0 }; +} + +export const FeatureSetDefaults = { + encode(message: FeatureSetDefaults, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.defaults) { + FeatureSetDefaults_FeatureSetEditionDefault.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.minimumEdition !== 0) { + writer.uint32(32).int32(message.minimumEdition); + } + if (message.maximumEdition !== 0) { + writer.uint32(40).int32(message.maximumEdition); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FeatureSetDefaults { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFeatureSetDefaults(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.defaults.push(FeatureSetDefaults_FeatureSetEditionDefault.decode(reader, reader.uint32())); + continue; + case 4: + if (tag !== 32) { + break; + } + + message.minimumEdition = reader.int32() as any; + continue; + case 5: + if (tag !== 40) { + break; + } + + message.maximumEdition = reader.int32() as any; + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): FeatureSetDefaults { + return { + defaults: Array.isArray(object?.defaults) + ? object.defaults.map((e: any) => FeatureSetDefaults_FeatureSetEditionDefault.fromJSON(e)) + : [], + minimumEdition: isSet(object.minimumEdition) ? editionFromJSON(object.minimumEdition) : 0, + maximumEdition: isSet(object.maximumEdition) ? editionFromJSON(object.maximumEdition) : 0, + }; + }, + + toJSON(message: FeatureSetDefaults): unknown { + const obj: any = {}; + if (message.defaults?.length) { + obj.defaults = message.defaults.map((e) => FeatureSetDefaults_FeatureSetEditionDefault.toJSON(e)); + } + if (message.minimumEdition !== 0) { + obj.minimumEdition = editionToJSON(message.minimumEdition); + } + if (message.maximumEdition !== 0) { + obj.maximumEdition = editionToJSON(message.maximumEdition); + } + return obj; + }, + + create, I>>(base?: I): FeatureSetDefaults { + return FeatureSetDefaults.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): FeatureSetDefaults { + const message = createBaseFeatureSetDefaults(); + message.defaults = object.defaults?.map((e) => FeatureSetDefaults_FeatureSetEditionDefault.fromPartial(e)) || []; + message.minimumEdition = object.minimumEdition ?? 0; + message.maximumEdition = object.maximumEdition ?? 0; + return message; + }, +}; + +function createBaseFeatureSetDefaults_FeatureSetEditionDefault(): FeatureSetDefaults_FeatureSetEditionDefault { + return { edition: 0, features: undefined }; +} + +export const FeatureSetDefaults_FeatureSetEditionDefault = { + encode(message: FeatureSetDefaults_FeatureSetEditionDefault, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.edition !== 0) { + writer.uint32(24).int32(message.edition); + } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FeatureSetDefaults_FeatureSetEditionDefault { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFeatureSetDefaults_FeatureSetEditionDefault(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 3: + if (tag !== 24) { + break; + } + + message.edition = reader.int32() as any; + continue; + case 2: + if (tag !== 18) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): FeatureSetDefaults_FeatureSetEditionDefault { + return { + edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0, + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + }; + }, + + toJSON(message: FeatureSetDefaults_FeatureSetEditionDefault): unknown { + const obj: any = {}; + if (message.edition !== 0) { + obj.edition = editionToJSON(message.edition); + } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + return obj; + }, + + create, I>>( + base?: I, + ): FeatureSetDefaults_FeatureSetEditionDefault { + return FeatureSetDefaults_FeatureSetEditionDefault.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): FeatureSetDefaults_FeatureSetEditionDefault { + const message = createBaseFeatureSetDefaults_FeatureSetEditionDefault(); + message.edition = object.edition ?? 0; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + return message; + }, +}; + function createBaseSourceCodeInfo(): SourceCodeInfo { return { location: [] }; } diff --git a/ui/grpc_generated/google/protobuf/timestamp.ts b/ui/grpc_generated/google/protobuf/timestamp.ts index 560af8a4e0..959778faa4 100644 --- a/ui/grpc_generated/google/protobuf/timestamp.ts +++ b/ui/grpc_generated/google/protobuf/timestamp.ts @@ -92,7 +92,7 @@ export const protobufPackage = "google.protobuf"; * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use * the Joda Time's [`ISODateTimeFormat.dateTime()`]( - * http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D + * http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime() * ) to obtain a formatter capable of generating timestamps in this format. */ export interface Timestamp {