| // Code generated by protoc-gen-go. DO NOT EDIT. |
| // source: google/storagetransfer/v1/transfer_types.proto |
| |
| package storagetransfer // import "google.golang.org/genproto/googleapis/storagetransfer/v1" |
| |
| import proto "github.com/golang/protobuf/proto" |
| import fmt "fmt" |
| import math "math" |
| import duration "github.com/golang/protobuf/ptypes/duration" |
| import timestamp "github.com/golang/protobuf/ptypes/timestamp" |
| import _ "google.golang.org/genproto/googleapis/api/annotations" |
| import code "google.golang.org/genproto/googleapis/rpc/code" |
| import date "google.golang.org/genproto/googleapis/type/date" |
| import timeofday "google.golang.org/genproto/googleapis/type/timeofday" |
| |
| // Reference imports to suppress errors if they are not otherwise used. |
| var _ = proto.Marshal |
| var _ = fmt.Errorf |
| var _ = math.Inf |
| |
| // This is a compile-time assertion to ensure that this generated file |
| // is compatible with the proto package it is being compiled against. |
| // A compilation error at this line likely means your copy of the |
| // proto package needs to be updated. |
| const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package |
| |
| // The status of the transfer job. |
| type TransferJob_Status int32 |
| |
| const ( |
| // Zero is an illegal value. |
| TransferJob_STATUS_UNSPECIFIED TransferJob_Status = 0 |
| // New transfers will be performed based on the schedule. |
| TransferJob_ENABLED TransferJob_Status = 1 |
| // New transfers will not be scheduled. |
| TransferJob_DISABLED TransferJob_Status = 2 |
| // This is a soft delete state. After a transfer job is set to this |
| // state, the job and all the transfer executions are subject to |
| // garbage collection. |
| TransferJob_DELETED TransferJob_Status = 3 |
| ) |
| |
| var TransferJob_Status_name = map[int32]string{ |
| 0: "STATUS_UNSPECIFIED", |
| 1: "ENABLED", |
| 2: "DISABLED", |
| 3: "DELETED", |
| } |
| var TransferJob_Status_value = map[string]int32{ |
| "STATUS_UNSPECIFIED": 0, |
| "ENABLED": 1, |
| "DISABLED": 2, |
| "DELETED": 3, |
| } |
| |
| func (x TransferJob_Status) String() string { |
| return proto.EnumName(TransferJob_Status_name, int32(x)) |
| } |
| func (TransferJob_Status) EnumDescriptor() ([]byte, []int) { |
| return fileDescriptor_transfer_types_86dab7ad4ac5c425, []int{9, 0} |
| } |
| |
| // The status of a TransferOperation. |
| type TransferOperation_Status int32 |
| |
| const ( |
| // Zero is an illegal value. |
| TransferOperation_STATUS_UNSPECIFIED TransferOperation_Status = 0 |
| // In progress. |
| TransferOperation_IN_PROGRESS TransferOperation_Status = 1 |
| // Paused. |
| TransferOperation_PAUSED TransferOperation_Status = 2 |
| // Completed successfully. |
| TransferOperation_SUCCESS TransferOperation_Status = 3 |
| // Terminated due to an unrecoverable failure. |
| TransferOperation_FAILED TransferOperation_Status = 4 |
| // Aborted by the user. |
| TransferOperation_ABORTED TransferOperation_Status = 5 |
| ) |
| |
| var TransferOperation_Status_name = map[int32]string{ |
| 0: "STATUS_UNSPECIFIED", |
| 1: "IN_PROGRESS", |
| 2: "PAUSED", |
| 3: "SUCCESS", |
| 4: "FAILED", |
| 5: "ABORTED", |
| } |
| var TransferOperation_Status_value = map[string]int32{ |
| "STATUS_UNSPECIFIED": 0, |
| "IN_PROGRESS": 1, |
| "PAUSED": 2, |
| "SUCCESS": 3, |
| "FAILED": 4, |
| "ABORTED": 5, |
| } |
| |
| func (x TransferOperation_Status) String() string { |
| return proto.EnumName(TransferOperation_Status_name, int32(x)) |
| } |
| func (TransferOperation_Status) EnumDescriptor() ([]byte, []int) { |
| return fileDescriptor_transfer_types_86dab7ad4ac5c425, []int{13, 0} |
| } |
| |
| // Google service account |
| type GoogleServiceAccount struct { |
| // Required. |
| AccountEmail string `protobuf:"bytes,1,opt,name=account_email,json=accountEmail,proto3" json:"account_email,omitempty"` |
| XXX_NoUnkeyedLiteral struct{} `json:"-"` |
| XXX_unrecognized []byte `json:"-"` |
| XXX_sizecache int32 `json:"-"` |
| } |
| |
| func (m *GoogleServiceAccount) Reset() { *m = GoogleServiceAccount{} } |
| func (m *GoogleServiceAccount) String() string { return proto.CompactTextString(m) } |
| func (*GoogleServiceAccount) ProtoMessage() {} |
| func (*GoogleServiceAccount) Descriptor() ([]byte, []int) { |
| return fileDescriptor_transfer_types_86dab7ad4ac5c425, []int{0} |
| } |
| func (m *GoogleServiceAccount) XXX_Unmarshal(b []byte) error { |
| return xxx_messageInfo_GoogleServiceAccount.Unmarshal(m, b) |
| } |
| func (m *GoogleServiceAccount) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { |
| return xxx_messageInfo_GoogleServiceAccount.Marshal(b, m, deterministic) |
| } |
| func (dst *GoogleServiceAccount) XXX_Merge(src proto.Message) { |
| xxx_messageInfo_GoogleServiceAccount.Merge(dst, src) |
| } |
| func (m *GoogleServiceAccount) XXX_Size() int { |
| return xxx_messageInfo_GoogleServiceAccount.Size(m) |
| } |
| func (m *GoogleServiceAccount) XXX_DiscardUnknown() { |
| xxx_messageInfo_GoogleServiceAccount.DiscardUnknown(m) |
| } |
| |
| var xxx_messageInfo_GoogleServiceAccount proto.InternalMessageInfo |
| |
| func (m *GoogleServiceAccount) GetAccountEmail() string { |
| if m != nil { |
| return m.AccountEmail |
| } |
| return "" |
| } |
| |
| // AWS access key (see |
| // [AWS Security Credentials](http://docs.aws.amazon.com/general/latest/gr/aws-security-credentials.html)). |
| type AwsAccessKey struct { |
| // AWS access key ID. |
| // Required. |
| AccessKeyId string `protobuf:"bytes,1,opt,name=access_key_id,json=accessKeyId,proto3" json:"access_key_id,omitempty"` |
| // AWS secret access key. This field is not returned in RPC responses. |
| // Required. |
| SecretAccessKey string `protobuf:"bytes,2,opt,name=secret_access_key,json=secretAccessKey,proto3" json:"secret_access_key,omitempty"` |
| XXX_NoUnkeyedLiteral struct{} `json:"-"` |
| XXX_unrecognized []byte `json:"-"` |
| XXX_sizecache int32 `json:"-"` |
| } |
| |
| func (m *AwsAccessKey) Reset() { *m = AwsAccessKey{} } |
| func (m *AwsAccessKey) String() string { return proto.CompactTextString(m) } |
| func (*AwsAccessKey) ProtoMessage() {} |
| func (*AwsAccessKey) Descriptor() ([]byte, []int) { |
| return fileDescriptor_transfer_types_86dab7ad4ac5c425, []int{1} |
| } |
| func (m *AwsAccessKey) XXX_Unmarshal(b []byte) error { |
| return xxx_messageInfo_AwsAccessKey.Unmarshal(m, b) |
| } |
| func (m *AwsAccessKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { |
| return xxx_messageInfo_AwsAccessKey.Marshal(b, m, deterministic) |
| } |
| func (dst *AwsAccessKey) XXX_Merge(src proto.Message) { |
| xxx_messageInfo_AwsAccessKey.Merge(dst, src) |
| } |
| func (m *AwsAccessKey) XXX_Size() int { |
| return xxx_messageInfo_AwsAccessKey.Size(m) |
| } |
| func (m *AwsAccessKey) XXX_DiscardUnknown() { |
| xxx_messageInfo_AwsAccessKey.DiscardUnknown(m) |
| } |
| |
| var xxx_messageInfo_AwsAccessKey proto.InternalMessageInfo |
| |
| func (m *AwsAccessKey) GetAccessKeyId() string { |
| if m != nil { |
| return m.AccessKeyId |
| } |
| return "" |
| } |
| |
| func (m *AwsAccessKey) GetSecretAccessKey() string { |
| if m != nil { |
| return m.SecretAccessKey |
| } |
| return "" |
| } |
| |
| // Conditions that determine which objects will be transferred. |
| type ObjectConditions struct { |
| // If unspecified, `minTimeElapsedSinceLastModification` takes a zero value |
| // and `maxTimeElapsedSinceLastModification` takes the maximum possible |
| // value of Duration. Objects that satisfy the object conditions |
| // must either have a `lastModificationTime` greater or equal to |
| // `NOW` - `maxTimeElapsedSinceLastModification` and less than |
| // `NOW` - `minTimeElapsedSinceLastModification`, or not have a |
| // `lastModificationTime`. |
| MinTimeElapsedSinceLastModification *duration.Duration `protobuf:"bytes,1,opt,name=min_time_elapsed_since_last_modification,json=minTimeElapsedSinceLastModification,proto3" json:"min_time_elapsed_since_last_modification,omitempty"` |
| // `maxTimeElapsedSinceLastModification` is the complement to |
| // `minTimeElapsedSinceLastModification`. |
| MaxTimeElapsedSinceLastModification *duration.Duration `protobuf:"bytes,2,opt,name=max_time_elapsed_since_last_modification,json=maxTimeElapsedSinceLastModification,proto3" json:"max_time_elapsed_since_last_modification,omitempty"` |
| // If `includePrefixes` is specified, objects that satisfy the object |
| // conditions must have names that start with one of the `includePrefixes` |
| // and that do not start with any of the `excludePrefixes`. If `includePrefixes` |
| // is not specified, all objects except those that have names starting with |
| // one of the `excludePrefixes` must satisfy the object conditions. |
| // |
| // Requirements: |
| // |
| // * Each include-prefix and exclude-prefix can contain any sequence of |
| // Unicode characters, of max length 1024 bytes when UTF8-encoded, and |
| // must not contain Carriage Return or Line Feed characters. Wildcard |
| // matching and regular expression matching are not supported. |
| // |
| // * Each include-prefix and exclude-prefix must omit the leading slash. |
| // For example, to include the `requests.gz` object in a transfer from |
| // `s3://my-aws-bucket/logs/y=2015/requests.gz`, specify the include |
| // prefix as `logs/y=2015/requests.gz`. |
| // |
| // * None of the include-prefix or the exclude-prefix values can be empty, |
| // if specified. |
| // |
| // * Each include-prefix must include a distinct portion of the object |
| // namespace, i.e., no include-prefix may be a prefix of another |
| // include-prefix. |
| // |
| // * Each exclude-prefix must exclude a distinct portion of the object |
| // namespace, i.e., no exclude-prefix may be a prefix of another |
| // exclude-prefix. |
| // |
| // * If `includePrefixes` is specified, then each exclude-prefix must start |
| // with the value of a path explicitly included by `includePrefixes`. |
| // |
| // The max size of `includePrefixes` is 1000. |
| IncludePrefixes []string `protobuf:"bytes,3,rep,name=include_prefixes,json=includePrefixes,proto3" json:"include_prefixes,omitempty"` |
| // `excludePrefixes` must follow the requirements described for |
| // `includePrefixes`. |
| // |
| // The max size of `excludePrefixes` is 1000. |
| ExcludePrefixes []string `protobuf:"bytes,4,rep,name=exclude_prefixes,json=excludePrefixes,proto3" json:"exclude_prefixes,omitempty"` |
| XXX_NoUnkeyedLiteral struct{} `json:"-"` |
| XXX_unrecognized []byte `json:"-"` |
| XXX_sizecache int32 `json:"-"` |
| } |
| |
| func (m *ObjectConditions) Reset() { *m = ObjectConditions{} } |
| func (m *ObjectConditions) String() string { return proto.CompactTextString(m) } |
| func (*ObjectConditions) ProtoMessage() {} |
| func (*ObjectConditions) Descriptor() ([]byte, []int) { |
| return fileDescriptor_transfer_types_86dab7ad4ac5c425, []int{2} |
| } |
| func (m *ObjectConditions) XXX_Unmarshal(b []byte) error { |
| return xxx_messageInfo_ObjectConditions.Unmarshal(m, b) |
| } |
| func (m *ObjectConditions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { |
| return xxx_messageInfo_ObjectConditions.Marshal(b, m, deterministic) |
| } |
| func (dst *ObjectConditions) XXX_Merge(src proto.Message) { |
| xxx_messageInfo_ObjectConditions.Merge(dst, src) |
| } |
| func (m *ObjectConditions) XXX_Size() int { |
| return xxx_messageInfo_ObjectConditions.Size(m) |
| } |
| func (m *ObjectConditions) XXX_DiscardUnknown() { |
| xxx_messageInfo_ObjectConditions.DiscardUnknown(m) |
| } |
| |
| var xxx_messageInfo_ObjectConditions proto.InternalMessageInfo |
| |
| func (m *ObjectConditions) GetMinTimeElapsedSinceLastModification() *duration.Duration { |
| if m != nil { |
| return m.MinTimeElapsedSinceLastModification |
| } |
| return nil |
| } |
| |
| func (m *ObjectConditions) GetMaxTimeElapsedSinceLastModification() *duration.Duration { |
| if m != nil { |
| return m.MaxTimeElapsedSinceLastModification |
| } |
| return nil |
| } |
| |
| func (m *ObjectConditions) GetIncludePrefixes() []string { |
| if m != nil { |
| return m.IncludePrefixes |
| } |
| return nil |
| } |
| |
| func (m *ObjectConditions) GetExcludePrefixes() []string { |
| if m != nil { |
| return m.ExcludePrefixes |
| } |
| return nil |
| } |
| |
| // In a GcsData, an object's name is the Google Cloud Storage object's name and |
| // its `lastModificationTime` refers to the object's updated time, which changes |
| // when the content or the metadata of the object is updated. |
| type GcsData struct { |
| // Google Cloud Storage bucket name (see |
| // [Bucket Name Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). |
| // Required. |
| BucketName string `protobuf:"bytes,1,opt,name=bucket_name,json=bucketName,proto3" json:"bucket_name,omitempty"` |
| XXX_NoUnkeyedLiteral struct{} `json:"-"` |
| XXX_unrecognized []byte `json:"-"` |
| XXX_sizecache int32 `json:"-"` |
| } |
| |
| func (m *GcsData) Reset() { *m = GcsData{} } |
| func (m *GcsData) String() string { return proto.CompactTextString(m) } |
| func (*GcsData) ProtoMessage() {} |
| func (*GcsData) Descriptor() ([]byte, []int) { |
| return fileDescriptor_transfer_types_86dab7ad4ac5c425, []int{3} |
| } |
| func (m *GcsData) XXX_Unmarshal(b []byte) error { |
| return xxx_messageInfo_GcsData.Unmarshal(m, b) |
| } |
| func (m *GcsData) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { |
| return xxx_messageInfo_GcsData.Marshal(b, m, deterministic) |
| } |
| func (dst *GcsData) XXX_Merge(src proto.Message) { |
| xxx_messageInfo_GcsData.Merge(dst, src) |
| } |
| func (m *GcsData) XXX_Size() int { |
| return xxx_messageInfo_GcsData.Size(m) |
| } |
| func (m *GcsData) XXX_DiscardUnknown() { |
| xxx_messageInfo_GcsData.DiscardUnknown(m) |
| } |
| |
| var xxx_messageInfo_GcsData proto.InternalMessageInfo |
| |
| func (m *GcsData) GetBucketName() string { |
| if m != nil { |
| return m.BucketName |
| } |
| return "" |
| } |
| |
| // An AwsS3Data can be a data source, but not a data sink. |
| // In an AwsS3Data, an object's name is the S3 object's key name. |
| type AwsS3Data struct { |
| // S3 Bucket name (see |
| // [Creating a bucket](http://docs.aws.amazon.com/AmazonS3/latest/dev/create-bucket-get-location-example.html)). |
| // Required. |
| BucketName string `protobuf:"bytes,1,opt,name=bucket_name,json=bucketName,proto3" json:"bucket_name,omitempty"` |
| // AWS access key used to sign the API requests to the AWS S3 bucket. |
| // Permissions on the bucket must be granted to the access ID of the |
| // AWS access key. |
| // Required. |
| AwsAccessKey *AwsAccessKey `protobuf:"bytes,2,opt,name=aws_access_key,json=awsAccessKey,proto3" json:"aws_access_key,omitempty"` |
| XXX_NoUnkeyedLiteral struct{} `json:"-"` |
| XXX_unrecognized []byte `json:"-"` |
| XXX_sizecache int32 `json:"-"` |
| } |
| |
| func (m *AwsS3Data) Reset() { *m = AwsS3Data{} } |
| func (m *AwsS3Data) String() string { return proto.CompactTextString(m) } |
| func (*AwsS3Data) ProtoMessage() {} |
| func (*AwsS3Data) Descriptor() ([]byte, []int) { |
| return fileDescriptor_transfer_types_86dab7ad4ac5c425, []int{4} |
| } |
| func (m *AwsS3Data) XXX_Unmarshal(b []byte) error { |
| return xxx_messageInfo_AwsS3Data.Unmarshal(m, b) |
| } |
| func (m *AwsS3Data) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { |
| return xxx_messageInfo_AwsS3Data.Marshal(b, m, deterministic) |
| } |
| func (dst *AwsS3Data) XXX_Merge(src proto.Message) { |
| xxx_messageInfo_AwsS3Data.Merge(dst, src) |
| } |
| func (m *AwsS3Data) XXX_Size() int { |
| return xxx_messageInfo_AwsS3Data.Size(m) |
| } |
| func (m *AwsS3Data) XXX_DiscardUnknown() { |
| xxx_messageInfo_AwsS3Data.DiscardUnknown(m) |
| } |
| |
| var xxx_messageInfo_AwsS3Data proto.InternalMessageInfo |
| |
| func (m *AwsS3Data) GetBucketName() string { |
| if m != nil { |
| return m.BucketName |
| } |
| return "" |
| } |
| |
| func (m *AwsS3Data) GetAwsAccessKey() *AwsAccessKey { |
| if m != nil { |
| return m.AwsAccessKey |
| } |
| return nil |
| } |
| |
| // An HttpData specifies a list of objects on the web to be transferred over |
| // HTTP. The information of the objects to be transferred is contained in a |
| // file referenced by a URL. The first line in the file must be |
| // "TsvHttpData-1.0", which specifies the format of the file. Subsequent lines |
| // specify the information of the list of objects, one object per list entry. |
| // Each entry has the following tab-delimited fields: |
| // |
| // * HTTP URL - The location of the object. |
| // |
| // * Length - The size of the object in bytes. |
| // |
| // * MD5 - The base64-encoded MD5 hash of the object. |
| // |
| // For an example of a valid TSV file, see |
| // [Transferring data from URLs](https://cloud.google.com/storage/transfer/create-url-list). |
| // |
| // When transferring data based on a URL list, keep the following in mind: |
| // |
| // * When an object located at `http(s)://hostname:port/<URL-path>` is transferred |
| // to a data sink, the name of the object at the data sink is |
| // `<hostname>/<URL-path>`. |
| // |
| // * If the specified size of an object does not match the actual size of the |
| // object fetched, the object will not be transferred. |
| // |
| // * If the specified MD5 does not match the MD5 computed from the transferred |
| // bytes, the object transfer will fail. For more information, see |
| // [Generating MD5 hashes](https://cloud.google.com/storage/transfer/#md5) |
| // |
| // * Ensure that each URL you specify is publicly accessible. For |
| // example, in Google Cloud Storage you can |
| // [share an object publicly] |
| // (https://cloud.google.com/storage/docs/cloud-console#_sharingdata) and get |
| // a link to it. |
| // |
| // * Storage Transfer Service obeys `robots.txt` rules and requires the source |
| // HTTP server to support `Range` requests and to return a `Content-Length` |
| // header in each response. |
| // |
| // * [ObjectConditions](#ObjectConditions) have no effect when filtering objects |
| // to transfer. |
| type HttpData struct { |
| // The URL that points to the file that stores the object list entries. |
| // This file must allow public access. Currently, only URLs with HTTP and |
| // HTTPS schemes are supported. |
| // Required. |
| ListUrl string `protobuf:"bytes,1,opt,name=list_url,json=listUrl,proto3" json:"list_url,omitempty"` |
| XXX_NoUnkeyedLiteral struct{} `json:"-"` |
| XXX_unrecognized []byte `json:"-"` |
| XXX_sizecache int32 `json:"-"` |
| } |
| |
| func (m *HttpData) Reset() { *m = HttpData{} } |
| func (m *HttpData) String() string { return proto.CompactTextString(m) } |
| func (*HttpData) ProtoMessage() {} |
| func (*HttpData) Descriptor() ([]byte, []int) { |
| return fileDescriptor_transfer_types_86dab7ad4ac5c425, []int{5} |
| } |
| func (m *HttpData) XXX_Unmarshal(b []byte) error { |
| return xxx_messageInfo_HttpData.Unmarshal(m, b) |
| } |
| func (m *HttpData) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { |
| return xxx_messageInfo_HttpData.Marshal(b, m, deterministic) |
| } |
| func (dst *HttpData) XXX_Merge(src proto.Message) { |
| xxx_messageInfo_HttpData.Merge(dst, src) |
| } |
| func (m *HttpData) XXX_Size() int { |
| return xxx_messageInfo_HttpData.Size(m) |
| } |
| func (m *HttpData) XXX_DiscardUnknown() { |
| xxx_messageInfo_HttpData.DiscardUnknown(m) |
| } |
| |
| var xxx_messageInfo_HttpData proto.InternalMessageInfo |
| |
| func (m *HttpData) GetListUrl() string { |
| if m != nil { |
| return m.ListUrl |
| } |
| return "" |
| } |
| |
| // TransferOptions uses three boolean parameters to define the actions |
| // to be performed on objects in a transfer. |
| type TransferOptions struct { |
| // Whether overwriting objects that already exist in the sink is allowed. |
| OverwriteObjectsAlreadyExistingInSink bool `protobuf:"varint,1,opt,name=overwrite_objects_already_existing_in_sink,json=overwriteObjectsAlreadyExistingInSink,proto3" json:"overwrite_objects_already_existing_in_sink,omitempty"` |
| // Whether objects that exist only in the sink should be deleted. Note that |
| // this option and `deleteObjectsFromSourceAfterTransfer` are mutually |
| // exclusive. |
| DeleteObjectsUniqueInSink bool `protobuf:"varint,2,opt,name=delete_objects_unique_in_sink,json=deleteObjectsUniqueInSink,proto3" json:"delete_objects_unique_in_sink,omitempty"` |
| // Whether objects should be deleted from the source after they are |
| // transferred to the sink. Note that this option and |
| // `deleteObjectsUniqueInSink` are mutually exclusive. |
| DeleteObjectsFromSourceAfterTransfer bool `protobuf:"varint,3,opt,name=delete_objects_from_source_after_transfer,json=deleteObjectsFromSourceAfterTransfer,proto3" json:"delete_objects_from_source_after_transfer,omitempty"` |
| XXX_NoUnkeyedLiteral struct{} `json:"-"` |
| XXX_unrecognized []byte `json:"-"` |
| XXX_sizecache int32 `json:"-"` |
| } |
| |
| func (m *TransferOptions) Reset() { *m = TransferOptions{} } |
| func (m *TransferOptions) String() string { return proto.CompactTextString(m) } |
| func (*TransferOptions) ProtoMessage() {} |
| func (*TransferOptions) Descriptor() ([]byte, []int) { |
| return fileDescriptor_transfer_types_86dab7ad4ac5c425, []int{6} |
| } |
| func (m *TransferOptions) XXX_Unmarshal(b []byte) error { |
| return xxx_messageInfo_TransferOptions.Unmarshal(m, b) |
| } |
| func (m *TransferOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { |
| return xxx_messageInfo_TransferOptions.Marshal(b, m, deterministic) |
| } |
| func (dst *TransferOptions) XXX_Merge(src proto.Message) { |
| xxx_messageInfo_TransferOptions.Merge(dst, src) |
| } |
| func (m *TransferOptions) XXX_Size() int { |
| return xxx_messageInfo_TransferOptions.Size(m) |
| } |
| func (m *TransferOptions) XXX_DiscardUnknown() { |
| xxx_messageInfo_TransferOptions.DiscardUnknown(m) |
| } |
| |
| var xxx_messageInfo_TransferOptions proto.InternalMessageInfo |
| |
| func (m *TransferOptions) GetOverwriteObjectsAlreadyExistingInSink() bool { |
| if m != nil { |
| return m.OverwriteObjectsAlreadyExistingInSink |
| } |
| return false |
| } |
| |
| func (m *TransferOptions) GetDeleteObjectsUniqueInSink() bool { |
| if m != nil { |
| return m.DeleteObjectsUniqueInSink |
| } |
| return false |
| } |
| |
| func (m *TransferOptions) GetDeleteObjectsFromSourceAfterTransfer() bool { |
| if m != nil { |
| return m.DeleteObjectsFromSourceAfterTransfer |
| } |
| return false |
| } |
| |
| // Configuration for running a transfer. |
| type TransferSpec struct { |
| // The read source of the data. |
| // |
| // Types that are valid to be assigned to DataSource: |
| // *TransferSpec_GcsDataSource |
| // *TransferSpec_AwsS3DataSource |
| // *TransferSpec_HttpDataSource |
| DataSource isTransferSpec_DataSource `protobuf_oneof:"data_source"` |
| // The write sink for the data. |
| // |
| // Types that are valid to be assigned to DataSink: |
| // *TransferSpec_GcsDataSink |
| DataSink isTransferSpec_DataSink `protobuf_oneof:"data_sink"` |
| // Only objects that satisfy these object conditions are included in the set |
| // of data source and data sink objects. Object conditions based on |
| // objects' `lastModificationTime` do not exclude objects in a data sink. |
| ObjectConditions *ObjectConditions `protobuf:"bytes,5,opt,name=object_conditions,json=objectConditions,proto3" json:"object_conditions,omitempty"` |
| // If the option `deleteObjectsUniqueInSink` is `true`, object conditions |
| // based on objects' `lastModificationTime` are ignored and do not exclude |
| // objects in a data source or a data sink. |
| TransferOptions *TransferOptions `protobuf:"bytes,6,opt,name=transfer_options,json=transferOptions,proto3" json:"transfer_options,omitempty"` |
| XXX_NoUnkeyedLiteral struct{} `json:"-"` |
| XXX_unrecognized []byte `json:"-"` |
| XXX_sizecache int32 `json:"-"` |
| } |
| |
| func (m *TransferSpec) Reset() { *m = TransferSpec{} } |
| func (m *TransferSpec) String() string { return proto.CompactTextString(m) } |
| func (*TransferSpec) ProtoMessage() {} |
| func (*TransferSpec) Descriptor() ([]byte, []int) { |
| return fileDescriptor_transfer_types_86dab7ad4ac5c425, []int{7} |
| } |
| func (m *TransferSpec) XXX_Unmarshal(b []byte) error { |
| return xxx_messageInfo_TransferSpec.Unmarshal(m, b) |
| } |
| func (m *TransferSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { |
| return xxx_messageInfo_TransferSpec.Marshal(b, m, deterministic) |
| } |
| func (dst *TransferSpec) XXX_Merge(src proto.Message) { |
| xxx_messageInfo_TransferSpec.Merge(dst, src) |
| } |
| func (m *TransferSpec) XXX_Size() int { |
| return xxx_messageInfo_TransferSpec.Size(m) |
| } |
| func (m *TransferSpec) XXX_DiscardUnknown() { |
| xxx_messageInfo_TransferSpec.DiscardUnknown(m) |
| } |
| |
| var xxx_messageInfo_TransferSpec proto.InternalMessageInfo |
| |
| type isTransferSpec_DataSource interface { |
| isTransferSpec_DataSource() |
| } |
| |
| type TransferSpec_GcsDataSource struct { |
| GcsDataSource *GcsData `protobuf:"bytes,1,opt,name=gcs_data_source,json=gcsDataSource,proto3,oneof"` |
| } |
| |
| type TransferSpec_AwsS3DataSource struct { |
| AwsS3DataSource *AwsS3Data `protobuf:"bytes,2,opt,name=aws_s3_data_source,json=awsS3DataSource,proto3,oneof"` |
| } |
| |
| type TransferSpec_HttpDataSource struct { |
| HttpDataSource *HttpData `protobuf:"bytes,3,opt,name=http_data_source,json=httpDataSource,proto3,oneof"` |
| } |
| |
| func (*TransferSpec_GcsDataSource) isTransferSpec_DataSource() {} |
| |
| func (*TransferSpec_AwsS3DataSource) isTransferSpec_DataSource() {} |
| |
| func (*TransferSpec_HttpDataSource) isTransferSpec_DataSource() {} |
| |
| func (m *TransferSpec) GetDataSource() isTransferSpec_DataSource { |
| if m != nil { |
| return m.DataSource |
| } |
| return nil |
| } |
| |
| func (m *TransferSpec) GetGcsDataSource() *GcsData { |
| if x, ok := m.GetDataSource().(*TransferSpec_GcsDataSource); ok { |
| return x.GcsDataSource |
| } |
| return nil |
| } |
| |
| func (m *TransferSpec) GetAwsS3DataSource() *AwsS3Data { |
| if x, ok := m.GetDataSource().(*TransferSpec_AwsS3DataSource); ok { |
| return x.AwsS3DataSource |
| } |
| return nil |
| } |
| |
| func (m *TransferSpec) GetHttpDataSource() *HttpData { |
| if x, ok := m.GetDataSource().(*TransferSpec_HttpDataSource); ok { |
| return x.HttpDataSource |
| } |
| return nil |
| } |
| |
| type isTransferSpec_DataSink interface { |
| isTransferSpec_DataSink() |
| } |
| |
| type TransferSpec_GcsDataSink struct { |
| GcsDataSink *GcsData `protobuf:"bytes,4,opt,name=gcs_data_sink,json=gcsDataSink,proto3,oneof"` |
| } |
| |
| func (*TransferSpec_GcsDataSink) isTransferSpec_DataSink() {} |
| |
| func (m *TransferSpec) GetDataSink() isTransferSpec_DataSink { |
| if m != nil { |
| return m.DataSink |
| } |
| return nil |
| } |
| |
| func (m *TransferSpec) GetGcsDataSink() *GcsData { |
| if x, ok := m.GetDataSink().(*TransferSpec_GcsDataSink); ok { |
| return x.GcsDataSink |
| } |
| return nil |
| } |
| |
| func (m *TransferSpec) GetObjectConditions() *ObjectConditions { |
| if m != nil { |
| return m.ObjectConditions |
| } |
| return nil |
| } |
| |
| func (m *TransferSpec) GetTransferOptions() *TransferOptions { |
| if m != nil { |
| return m.TransferOptions |
| } |
| return nil |
| } |
| |
| // XXX_OneofFuncs is for the internal use of the proto package. |
| func (*TransferSpec) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { |
| return _TransferSpec_OneofMarshaler, _TransferSpec_OneofUnmarshaler, _TransferSpec_OneofSizer, []interface{}{ |
| (*TransferSpec_GcsDataSource)(nil), |
| (*TransferSpec_AwsS3DataSource)(nil), |
| (*TransferSpec_HttpDataSource)(nil), |
| (*TransferSpec_GcsDataSink)(nil), |
| } |
| } |
| |
| func _TransferSpec_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { |
| m := msg.(*TransferSpec) |
| // data_source |
| switch x := m.DataSource.(type) { |
| case *TransferSpec_GcsDataSource: |
| b.EncodeVarint(1<<3 | proto.WireBytes) |
| if err := b.EncodeMessage(x.GcsDataSource); err != nil { |
| return err |
| } |
| case *TransferSpec_AwsS3DataSource: |
| b.EncodeVarint(2<<3 | proto.WireBytes) |
| if err := b.EncodeMessage(x.AwsS3DataSource); err != nil { |
| return err |
| } |
| case *TransferSpec_HttpDataSource: |
| b.EncodeVarint(3<<3 | proto.WireBytes) |
| if err := b.EncodeMessage(x.HttpDataSource); err != nil { |
| return err |
| } |
| case nil: |
| default: |
| return fmt.Errorf("TransferSpec.DataSource has unexpected type %T", x) |
| } |
| // data_sink |
| switch x := m.DataSink.(type) { |
| case *TransferSpec_GcsDataSink: |
| b.EncodeVarint(4<<3 | proto.WireBytes) |
| if err := b.EncodeMessage(x.GcsDataSink); err != nil { |
| return err |
| } |
| case nil: |
| default: |
| return fmt.Errorf("TransferSpec.DataSink has unexpected type %T", x) |
| } |
| return nil |
| } |
| |
| func _TransferSpec_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { |
| m := msg.(*TransferSpec) |
| switch tag { |
| case 1: // data_source.gcs_data_source |
| if wire != proto.WireBytes { |
| return true, proto.ErrInternalBadWireType |
| } |
| msg := new(GcsData) |
| err := b.DecodeMessage(msg) |
| m.DataSource = &TransferSpec_GcsDataSource{msg} |
| return true, err |
| case 2: // data_source.aws_s3_data_source |
| if wire != proto.WireBytes { |
| return true, proto.ErrInternalBadWireType |
| } |
| msg := new(AwsS3Data) |
| err := b.DecodeMessage(msg) |
| m.DataSource = &TransferSpec_AwsS3DataSource{msg} |
| return true, err |
| case 3: // data_source.http_data_source |
| if wire != proto.WireBytes { |
| return true, proto.ErrInternalBadWireType |
| } |
| msg := new(HttpData) |
| err := b.DecodeMessage(msg) |
| m.DataSource = &TransferSpec_HttpDataSource{msg} |
| return true, err |
| case 4: // data_sink.gcs_data_sink |
| if wire != proto.WireBytes { |
| return true, proto.ErrInternalBadWireType |
| } |
| msg := new(GcsData) |
| err := b.DecodeMessage(msg) |
| m.DataSink = &TransferSpec_GcsDataSink{msg} |
| return true, err |
| default: |
| return false, nil |
| } |
| } |
| |
| func _TransferSpec_OneofSizer(msg proto.Message) (n int) { |
| m := msg.(*TransferSpec) |
| // data_source |
| switch x := m.DataSource.(type) { |
| case *TransferSpec_GcsDataSource: |
| s := proto.Size(x.GcsDataSource) |
| n += 1 // tag and wire |
| n += proto.SizeVarint(uint64(s)) |
| n += s |
| case *TransferSpec_AwsS3DataSource: |
| s := proto.Size(x.AwsS3DataSource) |
| n += 1 // tag and wire |
| n += proto.SizeVarint(uint64(s)) |
| n += s |
| case *TransferSpec_HttpDataSource: |
| s := proto.Size(x.HttpDataSource) |
| n += 1 // tag and wire |
| n += proto.SizeVarint(uint64(s)) |
| n += s |
| case nil: |
| default: |
| panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) |
| } |
| // data_sink |
| switch x := m.DataSink.(type) { |
| case *TransferSpec_GcsDataSink: |
| s := proto.Size(x.GcsDataSink) |
| n += 1 // tag and wire |
| n += proto.SizeVarint(uint64(s)) |
| n += s |
| case nil: |
| default: |
| panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) |
| } |
| return n |
| } |
| |
| // Transfers can be scheduled to recur or to run just once. |
| type Schedule struct { |
| // The first day the recurring transfer is scheduled to run. If |
| // `scheduleStartDate` is in the past, the transfer will run for the first |
| // time on the following day. |
| // Required. |
| ScheduleStartDate *date.Date `protobuf:"bytes,1,opt,name=schedule_start_date,json=scheduleStartDate,proto3" json:"schedule_start_date,omitempty"` |
| // The last day the recurring transfer will be run. If `scheduleEndDate` |
| // is the same as `scheduleStartDate`, the transfer will be executed only |
| // once. |
| ScheduleEndDate *date.Date `protobuf:"bytes,2,opt,name=schedule_end_date,json=scheduleEndDate,proto3" json:"schedule_end_date,omitempty"` |
| // The time in UTC at which the transfer will be scheduled to start in a day. |
| // Transfers may start later than this time. If not specified, recurring and |
| // one-time transfers that are scheduled to run today will run immediately; |
| // recurring transfers that are scheduled to run on a future date will start |
| // at approximately midnight UTC on that date. Note that when configuring a |
| // transfer with the Cloud Platform Console, the transfer's start time in a |
| // day is specified in your local timezone. |
| StartTimeOfDay *timeofday.TimeOfDay `protobuf:"bytes,3,opt,name=start_time_of_day,json=startTimeOfDay,proto3" json:"start_time_of_day,omitempty"` |
| XXX_NoUnkeyedLiteral struct{} `json:"-"` |
| XXX_unrecognized []byte `json:"-"` |
| XXX_sizecache int32 `json:"-"` |
| } |
| |
| func (m *Schedule) Reset() { *m = Schedule{} } |
| func (m *Schedule) String() string { return proto.CompactTextString(m) } |
| func (*Schedule) ProtoMessage() {} |
| func (*Schedule) Descriptor() ([]byte, []int) { |
| return fileDescriptor_transfer_types_86dab7ad4ac5c425, []int{8} |
| } |
| func (m *Schedule) XXX_Unmarshal(b []byte) error { |
| return xxx_messageInfo_Schedule.Unmarshal(m, b) |
| } |
| func (m *Schedule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { |
| return xxx_messageInfo_Schedule.Marshal(b, m, deterministic) |
| } |
| func (dst *Schedule) XXX_Merge(src proto.Message) { |
| xxx_messageInfo_Schedule.Merge(dst, src) |
| } |
| func (m *Schedule) XXX_Size() int { |
| return xxx_messageInfo_Schedule.Size(m) |
| } |
| func (m *Schedule) XXX_DiscardUnknown() { |
| xxx_messageInfo_Schedule.DiscardUnknown(m) |
| } |
| |
| var xxx_messageInfo_Schedule proto.InternalMessageInfo |
| |
| func (m *Schedule) GetScheduleStartDate() *date.Date { |
| if m != nil { |
| return m.ScheduleStartDate |
| } |
| return nil |
| } |
| |
| func (m *Schedule) GetScheduleEndDate() *date.Date { |
| if m != nil { |
| return m.ScheduleEndDate |
| } |
| return nil |
| } |
| |
| func (m *Schedule) GetStartTimeOfDay() *timeofday.TimeOfDay { |
| if m != nil { |
| return m.StartTimeOfDay |
| } |
| return nil |
| } |
| |
| // This resource represents the configuration of a transfer job that runs |
| // periodically. |
| type TransferJob struct { |
| // A globally unique name assigned by Storage Transfer Service when the |
| // job is created. This field should be left empty in requests to create a new |
| // transfer job; otherwise, the requests result in an `INVALID_ARGUMENT` |
| // error. |
| Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` |
| // A description provided by the user for the job. Its max length is 1024 |
| // bytes when Unicode-encoded. |
| Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` |
| // The ID of the Google Cloud Platform Console project that owns the job. |
| ProjectId string `protobuf:"bytes,3,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` |
| // Transfer specification. |
| TransferSpec *TransferSpec `protobuf:"bytes,4,opt,name=transfer_spec,json=transferSpec,proto3" json:"transfer_spec,omitempty"` |
| // Schedule specification. |
| Schedule *Schedule `protobuf:"bytes,5,opt,name=schedule,proto3" json:"schedule,omitempty"` |
| // Status of the job. This value MUST be specified for |
| // `CreateTransferJobRequests`. |
| // |
| // NOTE: The effect of the new job status takes place during a subsequent job |
| // run. For example, if you change the job status from `ENABLED` to |
| // `DISABLED`, and an operation spawned by the transfer is running, the status |
| // change would not affect the current operation. |
| Status TransferJob_Status `protobuf:"varint,6,opt,name=status,proto3,enum=google.storagetransfer.v1.TransferJob_Status" json:"status,omitempty"` |
| // This field cannot be changed by user requests. |
| CreationTime *timestamp.Timestamp `protobuf:"bytes,7,opt,name=creation_time,json=creationTime,proto3" json:"creation_time,omitempty"` |
| // This field cannot be changed by user requests. |
| LastModificationTime *timestamp.Timestamp `protobuf:"bytes,8,opt,name=last_modification_time,json=lastModificationTime,proto3" json:"last_modification_time,omitempty"` |
| // This field cannot be changed by user requests. |
| DeletionTime *timestamp.Timestamp `protobuf:"bytes,9,opt,name=deletion_time,json=deletionTime,proto3" json:"deletion_time,omitempty"` |
| XXX_NoUnkeyedLiteral struct{} `json:"-"` |
| XXX_unrecognized []byte `json:"-"` |
| XXX_sizecache int32 `json:"-"` |
| } |
| |
| func (m *TransferJob) Reset() { *m = TransferJob{} } |
| func (m *TransferJob) String() string { return proto.CompactTextString(m) } |
| func (*TransferJob) ProtoMessage() {} |
| func (*TransferJob) Descriptor() ([]byte, []int) { |
| return fileDescriptor_transfer_types_86dab7ad4ac5c425, []int{9} |
| } |
| func (m *TransferJob) XXX_Unmarshal(b []byte) error { |
| return xxx_messageInfo_TransferJob.Unmarshal(m, b) |
| } |
| func (m *TransferJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { |
| return xxx_messageInfo_TransferJob.Marshal(b, m, deterministic) |
| } |
| func (dst *TransferJob) XXX_Merge(src proto.Message) { |
| xxx_messageInfo_TransferJob.Merge(dst, src) |
| } |
| func (m *TransferJob) XXX_Size() int { |
| return xxx_messageInfo_TransferJob.Size(m) |
| } |
| func (m *TransferJob) XXX_DiscardUnknown() { |
| xxx_messageInfo_TransferJob.DiscardUnknown(m) |
| } |
| |
| var xxx_messageInfo_TransferJob proto.InternalMessageInfo |
| |
| func (m *TransferJob) GetName() string { |
| if m != nil { |
| return m.Name |
| } |
| return "" |
| } |
| |
| func (m *TransferJob) GetDescription() string { |
| if m != nil { |
| return m.Description |
| } |
| return "" |
| } |
| |
| func (m *TransferJob) GetProjectId() string { |
| if m != nil { |
| return m.ProjectId |
| } |
| return "" |
| } |
| |
| func (m *TransferJob) GetTransferSpec() *TransferSpec { |
| if m != nil { |
| return m.TransferSpec |
| } |
| return nil |
| } |
| |
| func (m *TransferJob) GetSchedule() *Schedule { |
| if m != nil { |
| return m.Schedule |
| } |
| return nil |
| } |
| |
| func (m *TransferJob) GetStatus() TransferJob_Status { |
| if m != nil { |
| return m.Status |
| } |
| return TransferJob_STATUS_UNSPECIFIED |
| } |
| |
| func (m *TransferJob) GetCreationTime() *timestamp.Timestamp { |
| if m != nil { |
| return m.CreationTime |
| } |
| return nil |
| } |
| |
| func (m *TransferJob) GetLastModificationTime() *timestamp.Timestamp { |
| if m != nil { |
| return m.LastModificationTime |
| } |
| return nil |
| } |
| |
| func (m *TransferJob) GetDeletionTime() *timestamp.Timestamp { |
| if m != nil { |
| return m.DeletionTime |
| } |
| return nil |
| } |
| |
| // An entry describing an error that has occurred. |
| type ErrorLogEntry struct { |
| // A URL that refers to the target (a data source, a data sink, |
| // or an object) with which the error is associated. |
| // Required. |
| Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` |
| // A list of messages that carry the error details. |
| ErrorDetails []string `protobuf:"bytes,3,rep,name=error_details,json=errorDetails,proto3" json:"error_details,omitempty"` |
| XXX_NoUnkeyedLiteral struct{} `json:"-"` |
| XXX_unrecognized []byte `json:"-"` |
| XXX_sizecache int32 `json:"-"` |
| } |
| |
| func (m *ErrorLogEntry) Reset() { *m = ErrorLogEntry{} } |
| func (m *ErrorLogEntry) String() string { return proto.CompactTextString(m) } |
| func (*ErrorLogEntry) ProtoMessage() {} |
| func (*ErrorLogEntry) Descriptor() ([]byte, []int) { |
| return fileDescriptor_transfer_types_86dab7ad4ac5c425, []int{10} |
| } |
| func (m *ErrorLogEntry) XXX_Unmarshal(b []byte) error { |
| return xxx_messageInfo_ErrorLogEntry.Unmarshal(m, b) |
| } |
| func (m *ErrorLogEntry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { |
| return xxx_messageInfo_ErrorLogEntry.Marshal(b, m, deterministic) |
| } |
| func (dst *ErrorLogEntry) XXX_Merge(src proto.Message) { |
| xxx_messageInfo_ErrorLogEntry.Merge(dst, src) |
| } |
| func (m *ErrorLogEntry) XXX_Size() int { |
| return xxx_messageInfo_ErrorLogEntry.Size(m) |
| } |
| func (m *ErrorLogEntry) XXX_DiscardUnknown() { |
| xxx_messageInfo_ErrorLogEntry.DiscardUnknown(m) |
| } |
| |
| var xxx_messageInfo_ErrorLogEntry proto.InternalMessageInfo |
| |
| func (m *ErrorLogEntry) GetUrl() string { |
| if m != nil { |
| return m.Url |
| } |
| return "" |
| } |
| |
| func (m *ErrorLogEntry) GetErrorDetails() []string { |
| if m != nil { |
| return m.ErrorDetails |
| } |
| return nil |
| } |
| |
| // A summary of errors by error code, plus a count and sample error log |
| // entries. |
| type ErrorSummary struct { |
| // Required. |
| ErrorCode code.Code `protobuf:"varint,1,opt,name=error_code,json=errorCode,proto3,enum=google.rpc.Code" json:"error_code,omitempty"` |
| // Count of this type of error. |
| // Required. |
| ErrorCount int64 `protobuf:"varint,2,opt,name=error_count,json=errorCount,proto3" json:"error_count,omitempty"` |
| // Error samples. |
| ErrorLogEntries []*ErrorLogEntry `protobuf:"bytes,3,rep,name=error_log_entries,json=errorLogEntries,proto3" json:"error_log_entries,omitempty"` |
| XXX_NoUnkeyedLiteral struct{} `json:"-"` |
| XXX_unrecognized []byte `json:"-"` |
| XXX_sizecache int32 `json:"-"` |
| } |
| |
| func (m *ErrorSummary) Reset() { *m = ErrorSummary{} } |
| func (m *ErrorSummary) String() string { return proto.CompactTextString(m) } |
| func (*ErrorSummary) ProtoMessage() {} |
| func (*ErrorSummary) Descriptor() ([]byte, []int) { |
| return fileDescriptor_transfer_types_86dab7ad4ac5c425, []int{11} |
| } |
| func (m *ErrorSummary) XXX_Unmarshal(b []byte) error { |
| return xxx_messageInfo_ErrorSummary.Unmarshal(m, b) |
| } |
| func (m *ErrorSummary) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { |
| return xxx_messageInfo_ErrorSummary.Marshal(b, m, deterministic) |
| } |
| func (dst *ErrorSummary) XXX_Merge(src proto.Message) { |
| xxx_messageInfo_ErrorSummary.Merge(dst, src) |
| } |
| func (m *ErrorSummary) XXX_Size() int { |
| return xxx_messageInfo_ErrorSummary.Size(m) |
| } |
| func (m *ErrorSummary) XXX_DiscardUnknown() { |
| xxx_messageInfo_ErrorSummary.DiscardUnknown(m) |
| } |
| |
| var xxx_messageInfo_ErrorSummary proto.InternalMessageInfo |
| |
| func (m *ErrorSummary) GetErrorCode() code.Code { |
| if m != nil { |
| return m.ErrorCode |
| } |
| return code.Code_OK |
| } |
| |
| func (m *ErrorSummary) GetErrorCount() int64 { |
| if m != nil { |
| return m.ErrorCount |
| } |
| return 0 |
| } |
| |
| func (m *ErrorSummary) GetErrorLogEntries() []*ErrorLogEntry { |
| if m != nil { |
| return m.ErrorLogEntries |
| } |
| return nil |
| } |
| |
| // A collection of counters that report the progress of a transfer operation. |
| type TransferCounters struct { |
| // Objects found in the data source that are scheduled to be transferred, |
| // excluding any that are filtered based on object conditions or skipped due |
| // to sync. |
| ObjectsFoundFromSource int64 `protobuf:"varint,1,opt,name=objects_found_from_source,json=objectsFoundFromSource,proto3" json:"objects_found_from_source,omitempty"` |
| // Bytes found in the data source that are scheduled to be transferred, |
| // excluding any that are filtered based on object conditions or skipped due |
| // to sync. |
| BytesFoundFromSource int64 `protobuf:"varint,2,opt,name=bytes_found_from_source,json=bytesFoundFromSource,proto3" json:"bytes_found_from_source,omitempty"` |
| // Objects found only in the data sink that are scheduled to be deleted. |
| ObjectsFoundOnlyFromSink int64 `protobuf:"varint,3,opt,name=objects_found_only_from_sink,json=objectsFoundOnlyFromSink,proto3" json:"objects_found_only_from_sink,omitempty"` |
| // Bytes found only in the data sink that are scheduled to be deleted. |
| BytesFoundOnlyFromSink int64 `protobuf:"varint,4,opt,name=bytes_found_only_from_sink,json=bytesFoundOnlyFromSink,proto3" json:"bytes_found_only_from_sink,omitempty"` |
| // Objects in the data source that are not transferred because they already |
| // exist in the data sink. |
| ObjectsFromSourceSkippedBySync int64 `protobuf:"varint,5,opt,name=objects_from_source_skipped_by_sync,json=objectsFromSourceSkippedBySync,proto3" json:"objects_from_source_skipped_by_sync,omitempty"` |
| // Bytes in the data source that are not transferred because they already |
| // exist in the data sink. |
| BytesFromSourceSkippedBySync int64 `protobuf:"varint,6,opt,name=bytes_from_source_skipped_by_sync,json=bytesFromSourceSkippedBySync,proto3" json:"bytes_from_source_skipped_by_sync,omitempty"` |
| // Objects that are copied to the data sink. |
| ObjectsCopiedToSink int64 `protobuf:"varint,7,opt,name=objects_copied_to_sink,json=objectsCopiedToSink,proto3" json:"objects_copied_to_sink,omitempty"` |
| // Bytes that are copied to the data sink. |
| BytesCopiedToSink int64 `protobuf:"varint,8,opt,name=bytes_copied_to_sink,json=bytesCopiedToSink,proto3" json:"bytes_copied_to_sink,omitempty"` |
| // Objects that are deleted from the data source. |
| ObjectsDeletedFromSource int64 `protobuf:"varint,9,opt,name=objects_deleted_from_source,json=objectsDeletedFromSource,proto3" json:"objects_deleted_from_source,omitempty"` |
| // Bytes that are deleted from the data source. |
| BytesDeletedFromSource int64 `protobuf:"varint,10,opt,name=bytes_deleted_from_source,json=bytesDeletedFromSource,proto3" json:"bytes_deleted_from_source,omitempty"` |
| // Objects that are deleted from the data sink. |
| ObjectsDeletedFromSink int64 `protobuf:"varint,11,opt,name=objects_deleted_from_sink,json=objectsDeletedFromSink,proto3" json:"objects_deleted_from_sink,omitempty"` |
| // Bytes that are deleted from the data sink. |
| BytesDeletedFromSink int64 `protobuf:"varint,12,opt,name=bytes_deleted_from_sink,json=bytesDeletedFromSink,proto3" json:"bytes_deleted_from_sink,omitempty"` |
| // Objects in the data source that failed during the transfer. |
| ObjectsFromSourceFailed int64 `protobuf:"varint,13,opt,name=objects_from_source_failed,json=objectsFromSourceFailed,proto3" json:"objects_from_source_failed,omitempty"` |
| // Bytes in the data source that failed during the transfer. |
| BytesFromSourceFailed int64 `protobuf:"varint,14,opt,name=bytes_from_source_failed,json=bytesFromSourceFailed,proto3" json:"bytes_from_source_failed,omitempty"` |
| // Objects that failed to be deleted from the data sink. |
| ObjectsFailedToDeleteFromSink int64 `protobuf:"varint,15,opt,name=objects_failed_to_delete_from_sink,json=objectsFailedToDeleteFromSink,proto3" json:"objects_failed_to_delete_from_sink,omitempty"` |
| // Bytes that failed to be deleted from the data sink. |
| BytesFailedToDeleteFromSink int64 `protobuf:"varint,16,opt,name=bytes_failed_to_delete_from_sink,json=bytesFailedToDeleteFromSink,proto3" json:"bytes_failed_to_delete_from_sink,omitempty"` |
| XXX_NoUnkeyedLiteral struct{} `json:"-"` |
| XXX_unrecognized []byte `json:"-"` |
| XXX_sizecache int32 `json:"-"` |
| } |
| |
| func (m *TransferCounters) Reset() { *m = TransferCounters{} } |
| func (m *TransferCounters) String() string { return proto.CompactTextString(m) } |
| func (*TransferCounters) ProtoMessage() {} |
| func (*TransferCounters) Descriptor() ([]byte, []int) { |
| return fileDescriptor_transfer_types_86dab7ad4ac5c425, []int{12} |
| } |
| func (m *TransferCounters) XXX_Unmarshal(b []byte) error { |
| return xxx_messageInfo_TransferCounters.Unmarshal(m, b) |
| } |
| func (m *TransferCounters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { |
| return xxx_messageInfo_TransferCounters.Marshal(b, m, deterministic) |
| } |
| func (dst *TransferCounters) XXX_Merge(src proto.Message) { |
| xxx_messageInfo_TransferCounters.Merge(dst, src) |
| } |
| func (m *TransferCounters) XXX_Size() int { |
| return xxx_messageInfo_TransferCounters.Size(m) |
| } |
| func (m *TransferCounters) XXX_DiscardUnknown() { |
| xxx_messageInfo_TransferCounters.DiscardUnknown(m) |
| } |
| |
| var xxx_messageInfo_TransferCounters proto.InternalMessageInfo |
| |
| func (m *TransferCounters) GetObjectsFoundFromSource() int64 { |
| if m != nil { |
| return m.ObjectsFoundFromSource |
| } |
| return 0 |
| } |
| |
| func (m *TransferCounters) GetBytesFoundFromSource() int64 { |
| if m != nil { |
| return m.BytesFoundFromSource |
| } |
| return 0 |
| } |
| |
| func (m *TransferCounters) GetObjectsFoundOnlyFromSink() int64 { |
| if m != nil { |
| return m.ObjectsFoundOnlyFromSink |
| } |
| return 0 |
| } |
| |
| func (m *TransferCounters) GetBytesFoundOnlyFromSink() int64 { |
| if m != nil { |
| return m.BytesFoundOnlyFromSink |
| } |
| return 0 |
| } |
| |
| func (m *TransferCounters) GetObjectsFromSourceSkippedBySync() int64 { |
| if m != nil { |
| return m.ObjectsFromSourceSkippedBySync |
| } |
| return 0 |
| } |
| |
| func (m *TransferCounters) GetBytesFromSourceSkippedBySync() int64 { |
| if m != nil { |
| return m.BytesFromSourceSkippedBySync |
| } |
| return 0 |
| } |
| |
| func (m *TransferCounters) GetObjectsCopiedToSink() int64 { |
| if m != nil { |
| return m.ObjectsCopiedToSink |
| } |
| return 0 |
| } |
| |
| func (m *TransferCounters) GetBytesCopiedToSink() int64 { |
| if m != nil { |
| return m.BytesCopiedToSink |
| } |
| return 0 |
| } |
| |
| func (m *TransferCounters) GetObjectsDeletedFromSource() int64 { |
| if m != nil { |
| return m.ObjectsDeletedFromSource |
| } |
| return 0 |
| } |
| |
| func (m *TransferCounters) GetBytesDeletedFromSource() int64 { |
| if m != nil { |
| return m.BytesDeletedFromSource |
| } |
| return 0 |
| } |
| |
| func (m *TransferCounters) GetObjectsDeletedFromSink() int64 { |
| if m != nil { |
| return m.ObjectsDeletedFromSink |
| } |
| return 0 |
| } |
| |
| func (m *TransferCounters) GetBytesDeletedFromSink() int64 { |
| if m != nil { |
| return m.BytesDeletedFromSink |
| } |
| return 0 |
| } |
| |
| func (m *TransferCounters) GetObjectsFromSourceFailed() int64 { |
| if m != nil { |
| return m.ObjectsFromSourceFailed |
| } |
| return 0 |
| } |
| |
| func (m *TransferCounters) GetBytesFromSourceFailed() int64 { |
| if m != nil { |
| return m.BytesFromSourceFailed |
| } |
| return 0 |
| } |
| |
| func (m *TransferCounters) GetObjectsFailedToDeleteFromSink() int64 { |
| if m != nil { |
| return m.ObjectsFailedToDeleteFromSink |
| } |
| return 0 |
| } |
| |
| func (m *TransferCounters) GetBytesFailedToDeleteFromSink() int64 { |
| if m != nil { |
| return m.BytesFailedToDeleteFromSink |
| } |
| return 0 |
| } |
| |
| // A description of the execution of a transfer. |
| type TransferOperation struct { |
| // A globally unique ID assigned by the system. |
| Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` |
| // The ID of the Google Cloud Platform Console project that owns the operation. |
| // Required. |
| ProjectId string `protobuf:"bytes,2,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` |
| // Transfer specification. |
| // Required. |
| TransferSpec *TransferSpec `protobuf:"bytes,3,opt,name=transfer_spec,json=transferSpec,proto3" json:"transfer_spec,omitempty"` |
| // Start time of this transfer execution. |
| StartTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` |
| // End time of this transfer execution. |
| EndTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` |
| // Status of the transfer operation. |
| Status TransferOperation_Status `protobuf:"varint,6,opt,name=status,proto3,enum=google.storagetransfer.v1.TransferOperation_Status" json:"status,omitempty"` |
| // Information about the progress of the transfer operation. |
| Counters *TransferCounters `protobuf:"bytes,7,opt,name=counters,proto3" json:"counters,omitempty"` |
| // Summarizes errors encountered with sample error log entries. |
| ErrorBreakdowns []*ErrorSummary `protobuf:"bytes,8,rep,name=error_breakdowns,json=errorBreakdowns,proto3" json:"error_breakdowns,omitempty"` |
| // The name of the transfer job that triggers this transfer operation. |
| TransferJobName string `protobuf:"bytes,9,opt,name=transfer_job_name,json=transferJobName,proto3" json:"transfer_job_name,omitempty"` |
| XXX_NoUnkeyedLiteral struct{} `json:"-"` |
| XXX_unrecognized []byte `json:"-"` |
| XXX_sizecache int32 `json:"-"` |
| } |
| |
| func (m *TransferOperation) Reset() { *m = TransferOperation{} } |
| func (m *TransferOperation) String() string { return proto.CompactTextString(m) } |
| func (*TransferOperation) ProtoMessage() {} |
| func (*TransferOperation) Descriptor() ([]byte, []int) { |
| return fileDescriptor_transfer_types_86dab7ad4ac5c425, []int{13} |
| } |
| func (m *TransferOperation) XXX_Unmarshal(b []byte) error { |
| return xxx_messageInfo_TransferOperation.Unmarshal(m, b) |
| } |
| func (m *TransferOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { |
| return xxx_messageInfo_TransferOperation.Marshal(b, m, deterministic) |
| } |
| func (dst *TransferOperation) XXX_Merge(src proto.Message) { |
| xxx_messageInfo_TransferOperation.Merge(dst, src) |
| } |
| func (m *TransferOperation) XXX_Size() int { |
| return xxx_messageInfo_TransferOperation.Size(m) |
| } |
| func (m *TransferOperation) XXX_DiscardUnknown() { |
| xxx_messageInfo_TransferOperation.DiscardUnknown(m) |
| } |
| |
| var xxx_messageInfo_TransferOperation proto.InternalMessageInfo |
| |
| func (m *TransferOperation) GetName() string { |
| if m != nil { |
| return m.Name |
| } |
| return "" |
| } |
| |
| func (m *TransferOperation) GetProjectId() string { |
| if m != nil { |
| return m.ProjectId |
| } |
| return "" |
| } |
| |
| func (m *TransferOperation) GetTransferSpec() *TransferSpec { |
| if m != nil { |
| return m.TransferSpec |
| } |
| return nil |
| } |
| |
| func (m *TransferOperation) GetStartTime() *timestamp.Timestamp { |
| if m != nil { |
| return m.StartTime |
| } |
| return nil |
| } |
| |
| func (m *TransferOperation) GetEndTime() *timestamp.Timestamp { |
| if m != nil { |
| return m.EndTime |
| } |
| return nil |
| } |
| |
| func (m *TransferOperation) GetStatus() TransferOperation_Status { |
| if m != nil { |
| return m.Status |
| } |
| return TransferOperation_STATUS_UNSPECIFIED |
| } |
| |
| func (m *TransferOperation) GetCounters() *TransferCounters { |
| if m != nil { |
| return m.Counters |
| } |
| return nil |
| } |
| |
| func (m *TransferOperation) GetErrorBreakdowns() []*ErrorSummary { |
| if m != nil { |
| return m.ErrorBreakdowns |
| } |
| return nil |
| } |
| |
| func (m *TransferOperation) GetTransferJobName() string { |
| if m != nil { |
| return m.TransferJobName |
| } |
| return "" |
| } |
| |
| func init() { |
| proto.RegisterType((*GoogleServiceAccount)(nil), "google.storagetransfer.v1.GoogleServiceAccount") |
| proto.RegisterType((*AwsAccessKey)(nil), "google.storagetransfer.v1.AwsAccessKey") |
| proto.RegisterType((*ObjectConditions)(nil), "google.storagetransfer.v1.ObjectConditions") |
| proto.RegisterType((*GcsData)(nil), "google.storagetransfer.v1.GcsData") |
| proto.RegisterType((*AwsS3Data)(nil), "google.storagetransfer.v1.AwsS3Data") |
| proto.RegisterType((*HttpData)(nil), "google.storagetransfer.v1.HttpData") |
| proto.RegisterType((*TransferOptions)(nil), "google.storagetransfer.v1.TransferOptions") |
| proto.RegisterType((*TransferSpec)(nil), "google.storagetransfer.v1.TransferSpec") |
| proto.RegisterType((*Schedule)(nil), "google.storagetransfer.v1.Schedule") |
| proto.RegisterType((*TransferJob)(nil), "google.storagetransfer.v1.TransferJob") |
| proto.RegisterType((*ErrorLogEntry)(nil), "google.storagetransfer.v1.ErrorLogEntry") |
| proto.RegisterType((*ErrorSummary)(nil), "google.storagetransfer.v1.ErrorSummary") |
| proto.RegisterType((*TransferCounters)(nil), "google.storagetransfer.v1.TransferCounters") |
| proto.RegisterType((*TransferOperation)(nil), "google.storagetransfer.v1.TransferOperation") |
| proto.RegisterEnum("google.storagetransfer.v1.TransferJob_Status", TransferJob_Status_name, TransferJob_Status_value) |
| proto.RegisterEnum("google.storagetransfer.v1.TransferOperation_Status", TransferOperation_Status_name, TransferOperation_Status_value) |
| } |
| |
| func init() { |
| proto.RegisterFile("google/storagetransfer/v1/transfer_types.proto", fileDescriptor_transfer_types_86dab7ad4ac5c425) |
| } |
| |
| var fileDescriptor_transfer_types_86dab7ad4ac5c425 = []byte{ |
| // 1767 bytes of a gzipped FileDescriptorProto |
| 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x58, 0xdd, 0x6e, 0xdb, 0xc8, |
| 0x15, 0x8e, 0x24, 0xc7, 0x96, 0x8e, 0x24, 0x4b, 0x9e, 0xcd, 0x7a, 0x65, 0x27, 0xd9, 0xa4, 0xf4, |
| 0x2e, 0x92, 0xcd, 0xa2, 0x12, 0x62, 0x63, 0x51, 0xa4, 0xc1, 0x36, 0x95, 0x2d, 0xd9, 0xd6, 0xc6, |
| 0x1b, 0x1b, 0xa4, 0xbc, 0xfd, 0x41, 0x50, 0x62, 0x44, 0x8e, 0x14, 0xae, 0x29, 0x0e, 0xcb, 0x19, |
| 0xc5, 0x16, 0x7a, 0xd5, 0xab, 0x3e, 0x43, 0x5f, 0xa1, 0x0f, 0xd0, 0x37, 0xe8, 0x4d, 0x51, 0xa0, |
| 0x77, 0x7d, 0x8e, 0x5e, 0xf6, 0xb2, 0x98, 0x1f, 0x52, 0x14, 0x2d, 0xcb, 0x06, 0xf6, 0x8e, 0x3c, |
| 0xe7, 0xfb, 0xbe, 0x33, 0x33, 0xe7, 0xe8, 0x9c, 0xa1, 0xa0, 0x39, 0xa2, 0x74, 0xe4, 0x93, 0x16, |
| 0xe3, 0x34, 0xc2, 0x23, 0xc2, 0x23, 0x1c, 0xb0, 0x21, 0x89, 0x5a, 0x1f, 0x5f, 0xb6, 0xe2, 0x67, |
| 0x9b, 0x4f, 0x43, 0xc2, 0x9a, 0x61, 0x44, 0x39, 0x45, 0x5b, 0x0a, 0xdf, 0xcc, 0xe0, 0x9b, 0x1f, |
| 0x5f, 0x6e, 0x3f, 0xd2, 0x52, 0x38, 0xf4, 0x5a, 0x38, 0x08, 0x28, 0xc7, 0xdc, 0xa3, 0x81, 0x26, |
| 0x6e, 0x7f, 0xae, 0xbd, 0xf2, 0x6d, 0x30, 0x19, 0xb6, 0xdc, 0x49, 0x24, 0x01, 0xda, 0xff, 0x24, |
| 0xeb, 0xe7, 0xde, 0x98, 0x30, 0x8e, 0xc7, 0xa1, 0x06, 0x7c, 0xaa, 0x01, 0x51, 0xe8, 0xb4, 0x1c, |
| 0xea, 0x12, 0x6d, 0xde, 0xd4, 0x66, 0xb1, 0xc8, 0x96, 0x8b, 0x79, 0x6c, 0x7f, 0x98, 0xb6, 0x0b, |
| 0x2d, 0x3a, 0x74, 0xf1, 0x54, 0x39, 0x8d, 0xd7, 0xf0, 0xe0, 0x48, 0xba, 0x2d, 0x12, 0x7d, 0xf4, |
| 0x1c, 0xd2, 0x76, 0x1c, 0x3a, 0x09, 0x38, 0xda, 0x81, 0x2a, 0x56, 0x8f, 0x36, 0x19, 0x63, 0xcf, |
| 0x6f, 0xe4, 0x9e, 0xe6, 0x9e, 0x97, 0xcc, 0x8a, 0x36, 0x76, 0x85, 0xcd, 0xf8, 0x03, 0x54, 0xda, |
| 0x97, 0xac, 0xed, 0x38, 0x84, 0xb1, 0xb7, 0x64, 0x8a, 0x0c, 0x49, 0x22, 0x8c, 0xd9, 0x17, 0x64, |
| 0x6a, 0x7b, 0xae, 0x26, 0x95, 0x71, 0x8c, 0xe8, 0xb9, 0xe8, 0x05, 0x6c, 0x30, 0xe2, 0x44, 0x84, |
| 0xdb, 0x33, 0x68, 0x23, 0x2f, 0x71, 0x35, 0xe5, 0x48, 0xf4, 0x8c, 0x7f, 0xe5, 0xa1, 0x7e, 0x3a, |
| 0xf8, 0x91, 0x38, 0xfc, 0x80, 0x06, 0xae, 0x27, 0x0f, 0x11, 0x85, 0xf0, 0x7c, 0xec, 0x05, 0xb6, |
| 0xd8, 0x88, 0x4d, 0x7c, 0x1c, 0x32, 0xe2, 0xda, 0xcc, 0x0b, 0x1c, 0x62, 0xfb, 0x98, 0x71, 0x7b, |
| 0x4c, 0x5d, 0x6f, 0xe8, 0x39, 0xf2, 0x40, 0x65, 0xfc, 0xf2, 0xee, 0x96, 0x4e, 0x6d, 0x33, 0x3e, |
| 0xd1, 0x66, 0x47, 0x9f, 0xb8, 0xb9, 0x33, 0xf6, 0x82, 0xbe, 0x37, 0x26, 0x5d, 0x25, 0x64, 0x09, |
| 0x9d, 0x13, 0xcc, 0xf8, 0xf7, 0x29, 0x15, 0x19, 0x11, 0x5f, 0xdd, 0x2d, 0x62, 0xfe, 0xf6, 0x88, |
| 0xf8, 0xea, 0xd6, 0x88, 0x5f, 0x41, 0xdd, 0x0b, 0x1c, 0x7f, 0xe2, 0x12, 0x3b, 0x8c, 0xc8, 0xd0, |
| 0xbb, 0x22, 0xac, 0x51, 0x78, 0x5a, 0x10, 0x67, 0xa4, 0xed, 0x67, 0xda, 0x2c, 0xa0, 0xe4, 0x2a, |
| 0x03, 0x5d, 0x51, 0x50, 0x6d, 0x8f, 0xa1, 0xc6, 0x0b, 0x58, 0x3b, 0x72, 0x58, 0x07, 0x73, 0x8c, |
| 0x9e, 0x40, 0x79, 0x30, 0x71, 0x2e, 0x08, 0xb7, 0x03, 0x3c, 0x26, 0x3a, 0x4f, 0xa0, 0x4c, 0xef, |
| 0xf0, 0x98, 0x18, 0x7f, 0x82, 0x52, 0xfb, 0x92, 0x59, 0x7b, 0x77, 0x42, 0xa3, 0xef, 0x61, 0x1d, |
| 0x5f, 0xb2, 0x6c, 0x46, 0xcb, 0xbb, 0xcf, 0x9a, 0x37, 0xfe, 0x48, 0x9a, 0xe9, 0xca, 0x31, 0x2b, |
| 0x38, 0xf5, 0x66, 0x7c, 0x09, 0xc5, 0x63, 0xce, 0x43, 0x19, 0x7b, 0x0b, 0x8a, 0xbe, 0xc7, 0xb8, |
| 0x3d, 0x89, 0xe2, 0x1a, 0x5c, 0x13, 0xef, 0xe7, 0x91, 0x6f, 0xfc, 0x25, 0x0f, 0xb5, 0xbe, 0x56, |
| 0x3c, 0x0d, 0x55, 0x75, 0xfc, 0x0e, 0x5e, 0xd0, 0x8f, 0x24, 0xba, 0x8c, 0x3c, 0x4e, 0x6c, 0x2a, |
| 0x6b, 0x87, 0xd9, 0xd8, 0x8f, 0x08, 0x76, 0xa7, 0x36, 0xb9, 0xf2, 0x18, 0xf7, 0x82, 0x91, 0xed, |
| 0x05, 0x22, 0x81, 0x17, 0x52, 0xb0, 0x68, 0x7e, 0x99, 0x30, 0x54, 0xb1, 0xb1, 0xb6, 0xc2, 0x77, |
| 0x35, 0xbc, 0x17, 0x58, 0x5e, 0x70, 0x81, 0x7e, 0x0d, 0x8f, 0x5d, 0xe2, 0x93, 0x94, 0xee, 0x24, |
| 0xf0, 0xfe, 0x38, 0x21, 0x89, 0x5a, 0x5e, 0xaa, 0x6d, 0x29, 0x90, 0x96, 0x3a, 0x97, 0x10, 0xad, |
| 0xf0, 0x1b, 0xf8, 0x2a, 0xa3, 0x30, 0x8c, 0xe8, 0xd8, 0x66, 0x74, 0x12, 0x39, 0xc4, 0xc6, 0x43, |
| 0x2e, 0x5a, 0x8c, 0xde, 0x50, 0xa3, 0x20, 0xd5, 0xbe, 0x98, 0x53, 0x3b, 0x8c, 0xe8, 0xd8, 0x92, |
| 0xe8, 0xb6, 0x00, 0xc7, 0x9b, 0x37, 0xfe, 0xba, 0x02, 0x95, 0xf8, 0xc5, 0x0a, 0x89, 0x83, 0x4e, |
| 0xa0, 0x36, 0x72, 0x98, 0xed, 0x62, 0x8e, 0xb5, 0xbc, 0xfe, 0x2d, 0x18, 0x4b, 0x32, 0xa2, 0x8b, |
| 0xe3, 0xf8, 0x9e, 0x59, 0x1d, 0xa9, 0x47, 0x15, 0x0b, 0x59, 0x80, 0x44, 0x7a, 0xd9, 0xde, 0x9c, |
| 0xa0, 0x4a, 0xf1, 0x17, 0xcb, 0x53, 0xac, 0x2a, 0xe8, 0xf8, 0x9e, 0x59, 0xc3, 0xf1, 0x8b, 0x16, |
| 0x3d, 0x85, 0xfa, 0x07, 0xce, 0xc3, 0x39, 0xc9, 0x82, 0x94, 0xdc, 0x59, 0x22, 0x19, 0xd7, 0xc5, |
| 0xf1, 0x3d, 0x73, 0xfd, 0x83, 0x7e, 0xd6, 0x82, 0xc7, 0x50, 0x9d, 0xed, 0x59, 0xe4, 0x63, 0xe5, |
| 0xce, 0x3b, 0xce, 0x99, 0xe5, 0x78, 0xc7, 0x22, 0x4f, 0xbf, 0x85, 0x0d, 0x95, 0x20, 0xdb, 0x49, |
| 0xfa, 0x4e, 0xe3, 0xbe, 0x54, 0xfb, 0x7a, 0x89, 0x5a, 0xb6, 0x55, 0x99, 0x75, 0x9a, 0x6d, 0x5e, |
| 0xe7, 0x50, 0x4f, 0x86, 0x09, 0x55, 0x25, 0xdb, 0x58, 0x95, 0xc2, 0x2f, 0x96, 0x08, 0x67, 0x8a, |
| 0xdc, 0xac, 0xf1, 0x79, 0xc3, 0x7e, 0x15, 0xca, 0xa9, 0x63, 0xdc, 0x2f, 0x43, 0x29, 0x39, 0x05, |
| 0xe3, 0xdf, 0x39, 0x28, 0x5a, 0xce, 0x07, 0xe2, 0x4e, 0x7c, 0x82, 0xda, 0xf0, 0x09, 0xd3, 0xcf, |
| 0x36, 0xe3, 0x38, 0xe2, 0xe2, 0xb8, 0xe2, 0xda, 0xd8, 0x88, 0x97, 0x20, 0x26, 0x45, 0xb3, 0x83, |
| 0x39, 0x31, 0x37, 0x62, 0xb4, 0x25, 0xc0, 0xc2, 0x84, 0xbe, 0x85, 0xc4, 0x68, 0x93, 0xc0, 0x55, |
| 0x02, 0xf9, 0x9b, 0x04, 0x6a, 0x31, 0xb6, 0x1b, 0xb8, 0x92, 0xde, 0x86, 0x0d, 0x15, 0x58, 0xb6, |
| 0x53, 0x3a, 0xb4, 0x5d, 0x3c, 0xd5, 0x79, 0xdf, 0x9c, 0xa3, 0x8b, 0x26, 0x79, 0x3a, 0xec, 0xe0, |
| 0xa9, 0xb9, 0x2e, 0x09, 0xc9, 0xbb, 0xf1, 0x9f, 0x15, 0x28, 0xc7, 0x47, 0xf2, 0x1d, 0x1d, 0x20, |
| 0x04, 0x2b, 0xa9, 0xbe, 0x24, 0x9f, 0xd1, 0x53, 0x28, 0xbb, 0x84, 0x39, 0x91, 0x17, 0x26, 0x6d, |
| 0xb9, 0x64, 0xa6, 0x4d, 0xe8, 0x31, 0x40, 0x18, 0x51, 0x99, 0x65, 0xcf, 0x95, 0x2b, 0x28, 0x99, |
| 0x25, 0x6d, 0xe9, 0xb9, 0xe8, 0x04, 0xaa, 0x49, 0xa6, 0x58, 0x48, 0x1c, 0x5d, 0x4d, 0xcf, 0xee, |
| 0x90, 0x26, 0xf1, 0x0b, 0x34, 0x2b, 0x3c, 0xfd, 0x7b, 0x7c, 0x03, 0xc5, 0xf8, 0x20, 0x74, 0x21, |
| 0x2d, 0x2b, 0xf2, 0x38, 0x5d, 0x66, 0x42, 0x42, 0x5d, 0x58, 0x65, 0x1c, 0xf3, 0x89, 0x2a, 0x97, |
| 0xf5, 0xdd, 0x9f, 0xdf, 0x61, 0x1d, 0xdf, 0xd1, 0x41, 0xd3, 0x92, 0x24, 0x53, 0x93, 0xd1, 0x1b, |
| 0xa8, 0x3a, 0x11, 0x91, 0x43, 0x46, 0x26, 0xa0, 0xb1, 0x26, 0x17, 0xb3, 0x7d, 0x6d, 0x5e, 0xf5, |
| 0xe3, 0x3b, 0x87, 0x59, 0x89, 0x09, 0xc2, 0x84, 0xce, 0x60, 0xf3, 0xda, 0xd0, 0x53, 0x4a, 0xc5, |
| 0x5b, 0x95, 0x1e, 0xf8, 0x99, 0x39, 0x27, 0x15, 0xdf, 0x40, 0x55, 0xf6, 0xb8, 0x44, 0xa8, 0x74, |
| 0xfb, 0x92, 0x62, 0x82, 0x30, 0x19, 0xc7, 0xb0, 0xaa, 0x76, 0x89, 0x36, 0x01, 0x59, 0xfd, 0x76, |
| 0xff, 0xdc, 0xb2, 0xcf, 0xdf, 0x59, 0x67, 0xdd, 0x83, 0xde, 0x61, 0xaf, 0xdb, 0xa9, 0xdf, 0x43, |
| 0x65, 0x58, 0xeb, 0xbe, 0x6b, 0xef, 0x9f, 0x74, 0x3b, 0xf5, 0x1c, 0xaa, 0x40, 0xb1, 0xd3, 0xb3, |
| 0xd4, 0x5b, 0x5e, 0xb8, 0x3a, 0xdd, 0x93, 0x6e, 0xbf, 0xdb, 0xa9, 0x17, 0x8c, 0x43, 0xa8, 0x76, |
| 0xa3, 0x88, 0x46, 0x27, 0x74, 0xd4, 0x0d, 0x78, 0x34, 0x45, 0x75, 0x28, 0xcc, 0xe6, 0x8e, 0x78, |
| 0x14, 0xf7, 0x22, 0x22, 0x20, 0xb6, 0x4b, 0x38, 0xf6, 0xfc, 0x78, 0x2c, 0x57, 0xa4, 0xb1, 0xa3, |
| 0x6c, 0xc6, 0xdf, 0x73, 0x50, 0x91, 0x42, 0xd6, 0x64, 0x3c, 0xc6, 0xd1, 0x14, 0xb5, 0x00, 0x14, |
| 0x4b, 0x5c, 0xd7, 0xa4, 0xdc, 0xfa, 0x6e, 0x3d, 0xde, 0x60, 0x14, 0x3a, 0xcd, 0x03, 0xea, 0x12, |
| 0xb3, 0x24, 0x31, 0xe2, 0x51, 0x4c, 0xdc, 0x98, 0x30, 0x09, 0xb8, 0x2c, 0xdf, 0x82, 0x09, 0xda, |
| 0x2f, 0xee, 0x67, 0x7d, 0xd8, 0x50, 0x00, 0x9f, 0x8e, 0x6c, 0x12, 0xf0, 0xc8, 0xd3, 0x57, 0x84, |
| 0xf2, 0xee, 0xf3, 0x25, 0xa5, 0x31, 0xb7, 0x3d, 0xb3, 0x46, 0x52, 0xaf, 0x1e, 0x61, 0xc6, 0x7f, |
| 0xd7, 0xa0, 0x1e, 0x57, 0x8f, 0x8c, 0x43, 0x22, 0x86, 0x5e, 0xc1, 0x56, 0x32, 0xae, 0xe8, 0x24, |
| 0x70, 0xd3, 0x43, 0x4b, 0xee, 0xa5, 0x60, 0x6e, 0x6a, 0xc0, 0xa1, 0xf0, 0xcf, 0x86, 0x14, 0xfa, |
| 0x06, 0x3e, 0x1b, 0x4c, 0x39, 0x59, 0x44, 0x54, 0x5b, 0x7a, 0x20, 0xdd, 0x59, 0xda, 0xaf, 0xe0, |
| 0xd1, 0x7c, 0x44, 0x1a, 0xf8, 0x53, 0xcd, 0x16, 0x8d, 0xbd, 0x20, 0xb9, 0x8d, 0x74, 0xd0, 0xd3, |
| 0xc0, 0x9f, 0x4a, 0x05, 0xd1, 0xbf, 0x7f, 0x09, 0xdb, 0xe9, 0xb0, 0x19, 0xf6, 0x8a, 0x5a, 0xf2, |
| 0x2c, 0xf2, 0x1c, 0xf7, 0x2d, 0xec, 0x2c, 0x1a, 0xce, 0xec, 0xc2, 0x0b, 0x43, 0xe2, 0xda, 0x83, |
| 0xa9, 0xcd, 0xa6, 0x81, 0x23, 0x7f, 0xc4, 0x05, 0xf3, 0x73, 0x9a, 0x9d, 0xcb, 0x96, 0xc2, 0xed, |
| 0x4f, 0xad, 0x69, 0xe0, 0xa0, 0x23, 0xf8, 0x99, 0x5e, 0xc8, 0x12, 0xa9, 0x55, 0x29, 0xf5, 0x48, |
| 0xad, 0xe7, 0x06, 0xa1, 0x3d, 0x88, 0x8f, 0xd8, 0x76, 0x68, 0xe8, 0x11, 0xd7, 0xe6, 0x54, 0xed, |
| 0x66, 0x4d, 0xb2, 0x3f, 0xd1, 0xde, 0x03, 0xe9, 0xec, 0x53, 0xb9, 0x95, 0x16, 0xa8, 0xe3, 0xcd, |
| 0x52, 0x8a, 0x92, 0xb2, 0x21, 0x7d, 0x73, 0x84, 0x6f, 0xe1, 0x61, 0x1c, 0x45, 0x5d, 0x3b, 0xe6, |
| 0x53, 0x56, 0x9a, 0x3b, 0xf6, 0x8e, 0x42, 0xa4, 0xd2, 0xf6, 0x0a, 0xb6, 0x54, 0xbc, 0x45, 0x64, |
| 0x48, 0x9d, 0xfa, 0x42, 0xea, 0xe2, 0xc8, 0x62, 0xbd, 0xe5, 0xb9, 0x1a, 0x4b, 0x93, 0xc5, 0xa2, |
| 0x93, 0x1a, 0xbb, 0x4e, 0xac, 0xa4, 0x6a, 0x2c, 0x4b, 0x7b, 0x0d, 0xdb, 0x8b, 0xf2, 0x3c, 0xc4, |
| 0x9e, 0x4f, 0xdc, 0x46, 0x55, 0x32, 0x3f, 0xbb, 0x96, 0xde, 0x43, 0xe9, 0x46, 0xbf, 0x80, 0xc6, |
| 0xf5, 0xbc, 0x6a, 0xea, 0xba, 0xa4, 0x7e, 0x9a, 0x49, 0xa7, 0x26, 0xf6, 0xc0, 0x48, 0xa2, 0x4a, |
| 0x8b, 0x48, 0x8a, 0xbe, 0x13, 0xce, 0xd6, 0x5d, 0x93, 0x12, 0x8f, 0xe3, 0xe8, 0x12, 0xd8, 0xa7, |
| 0x6a, 0x07, 0xc9, 0x06, 0xba, 0xf0, 0x54, 0xaf, 0xe1, 0x66, 0xa1, 0xba, 0x14, 0x7a, 0xa8, 0xd6, |
| 0xb2, 0x50, 0xc6, 0xf8, 0xf3, 0x7d, 0xd8, 0x98, 0xdd, 0x2f, 0x88, 0xfa, 0x4a, 0x59, 0x38, 0x52, |
| 0xe7, 0x07, 0x66, 0xfe, 0xd6, 0x81, 0x59, 0xf8, 0x29, 0x03, 0xf3, 0x15, 0xc0, 0xec, 0x9a, 0xa0, |
| 0x67, 0xef, 0xb2, 0x91, 0x50, 0x4a, 0xee, 0x08, 0xe8, 0x1b, 0x28, 0x8a, 0x7b, 0x89, 0x24, 0xde, |
| 0xbf, 0x95, 0xb8, 0x46, 0x02, 0x57, 0xd2, 0xde, 0x66, 0x26, 0xec, 0xde, 0x9d, 0x2e, 0x64, 0xfa, |
| 0xc0, 0xb2, 0x73, 0xf6, 0x08, 0x8a, 0x8e, 0xee, 0x9f, 0x7a, 0xc4, 0x7e, 0x7d, 0x07, 0xb9, 0xb8, |
| 0xe5, 0x9a, 0x09, 0x19, 0x99, 0x50, 0x57, 0x7d, 0x7e, 0x10, 0x11, 0x7c, 0xe1, 0xd2, 0xcb, 0x80, |
| 0x35, 0x8a, 0xb2, 0xcd, 0x3f, 0xbb, 0xad, 0xcd, 0xeb, 0xe1, 0xa3, 0xbb, 0xfc, 0x7e, 0xc2, 0x17, |
| 0x9f, 0xe0, 0x49, 0xa6, 0x7e, 0xa4, 0x03, 0xf5, 0x51, 0x57, 0x52, 0x9f, 0xe0, 0x7c, 0x76, 0x77, |
| 0x90, 0xdf, 0x81, 0xce, 0xad, 0xc3, 0xb5, 0x06, 0xe5, 0xde, 0x3b, 0xfb, 0xcc, 0x3c, 0x3d, 0x32, |
| 0xbb, 0x96, 0x55, 0xcf, 0x21, 0x80, 0xd5, 0xb3, 0xf6, 0xb9, 0x15, 0x8f, 0x57, 0xeb, 0xfc, 0xe0, |
| 0x40, 0x38, 0x0a, 0xc2, 0x71, 0xd8, 0xee, 0x89, 0xb9, 0xbb, 0x22, 0x1c, 0xed, 0xfd, 0x53, 0x53, |
| 0xcc, 0xdd, 0xfb, 0xfb, 0xff, 0xc8, 0xc1, 0x8e, 0x43, 0xc7, 0x4b, 0x36, 0x24, 0x13, 0xb7, 0x5f, |
| 0x8d, 0x0f, 0xaa, 0x3f, 0x0d, 0x09, 0xfb, 0xfd, 0xb1, 0xc6, 0x8f, 0xa8, 0x8f, 0x83, 0x51, 0x93, |
| 0x46, 0xa3, 0xd6, 0x88, 0x04, 0x12, 0xda, 0x52, 0x2e, 0x1c, 0x7a, 0x6c, 0xc1, 0x1f, 0x3a, 0xaf, |
| 0x33, 0xa6, 0xff, 0xe5, 0x72, 0x7f, 0xcb, 0x3f, 0x51, 0xff, 0x83, 0x34, 0x0f, 0x7c, 0x3a, 0x71, |
| 0x9b, 0x96, 0x42, 0xc4, 0x01, 0x9b, 0x3f, 0xbc, 0xfc, 0x67, 0x8c, 0x78, 0x2f, 0x11, 0xef, 0x33, |
| 0x88, 0xf7, 0x3f, 0xbc, 0x1c, 0xac, 0xca, 0xd8, 0x7b, 0xff, 0x0f, 0x00, 0x00, 0xff, 0xff, 0x0c, |
| 0xec, 0x5b, 0x90, 0x4b, 0x12, 0x00, 0x00, |
| } |