blob: d73f2dd850f3b4617f3205fe4fb30918c352b5c0 [file] [log] [blame]
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: google/cloud/language/v1beta2/language_service.proto
package language // import "google.golang.org/genproto/googleapis/cloud/language/v1beta2"
import proto "github.com/golang/protobuf/proto"
import fmt "fmt"
import math "math"
import _ "github.com/golang/protobuf/ptypes/timestamp"
import _ "google.golang.org/genproto/googleapis/api/annotations"
import _ "google.golang.org/genproto/googleapis/longrunning"
import _ "google.golang.org/genproto/googleapis/rpc/status"
import (
context "golang.org/x/net/context"
grpc "google.golang.org/grpc"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
// Represents the text encoding that the caller uses to process the output.
// Providing an `EncodingType` is recommended because the API provides the
// beginning offsets for various outputs, such as tokens and mentions, and
// languages that natively use different text encodings may access offsets
// differently.
type EncodingType int32
const (
// If `EncodingType` is not specified, encoding-dependent information (such as
// `begin_offset`) will be set at `-1`.
EncodingType_NONE EncodingType = 0
// Encoding-dependent information (such as `begin_offset`) is calculated based
// on the UTF-8 encoding of the input. C++ and Go are examples of languages
// that use this encoding natively.
EncodingType_UTF8 EncodingType = 1
// Encoding-dependent information (such as `begin_offset`) is calculated based
// on the UTF-16 encoding of the input. Java and Javascript are examples of
// languages that use this encoding natively.
EncodingType_UTF16 EncodingType = 2
// Encoding-dependent information (such as `begin_offset`) is calculated based
// on the UTF-32 encoding of the input. Python is an example of a language
// that uses this encoding natively.
EncodingType_UTF32 EncodingType = 3
)
var EncodingType_name = map[int32]string{
0: "NONE",
1: "UTF8",
2: "UTF16",
3: "UTF32",
}
var EncodingType_value = map[string]int32{
"NONE": 0,
"UTF8": 1,
"UTF16": 2,
"UTF32": 3,
}
func (x EncodingType) String() string {
return proto.EnumName(EncodingType_name, int32(x))
}
func (EncodingType) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{0}
}
// The document types enum.
type Document_Type int32
const (
// The content type is not specified.
Document_TYPE_UNSPECIFIED Document_Type = 0
// Plain text
Document_PLAIN_TEXT Document_Type = 1
// HTML
Document_HTML Document_Type = 2
)
var Document_Type_name = map[int32]string{
0: "TYPE_UNSPECIFIED",
1: "PLAIN_TEXT",
2: "HTML",
}
var Document_Type_value = map[string]int32{
"TYPE_UNSPECIFIED": 0,
"PLAIN_TEXT": 1,
"HTML": 2,
}
func (x Document_Type) String() string {
return proto.EnumName(Document_Type_name, int32(x))
}
func (Document_Type) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{0, 0}
}
// The type of the entity.
type Entity_Type int32
const (
// Unknown
Entity_UNKNOWN Entity_Type = 0
// Person
Entity_PERSON Entity_Type = 1
// Location
Entity_LOCATION Entity_Type = 2
// Organization
Entity_ORGANIZATION Entity_Type = 3
// Event
Entity_EVENT Entity_Type = 4
// Work of art
Entity_WORK_OF_ART Entity_Type = 5
// Consumer goods
Entity_CONSUMER_GOOD Entity_Type = 6
// Other types
Entity_OTHER Entity_Type = 7
)
var Entity_Type_name = map[int32]string{
0: "UNKNOWN",
1: "PERSON",
2: "LOCATION",
3: "ORGANIZATION",
4: "EVENT",
5: "WORK_OF_ART",
6: "CONSUMER_GOOD",
7: "OTHER",
}
var Entity_Type_value = map[string]int32{
"UNKNOWN": 0,
"PERSON": 1,
"LOCATION": 2,
"ORGANIZATION": 3,
"EVENT": 4,
"WORK_OF_ART": 5,
"CONSUMER_GOOD": 6,
"OTHER": 7,
}
func (x Entity_Type) String() string {
return proto.EnumName(Entity_Type_name, int32(x))
}
func (Entity_Type) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{2, 0}
}
// The part of speech tags enum.
type PartOfSpeech_Tag int32
const (
// Unknown
PartOfSpeech_UNKNOWN PartOfSpeech_Tag = 0
// Adjective
PartOfSpeech_ADJ PartOfSpeech_Tag = 1
// Adposition (preposition and postposition)
PartOfSpeech_ADP PartOfSpeech_Tag = 2
// Adverb
PartOfSpeech_ADV PartOfSpeech_Tag = 3
// Conjunction
PartOfSpeech_CONJ PartOfSpeech_Tag = 4
// Determiner
PartOfSpeech_DET PartOfSpeech_Tag = 5
// Noun (common and proper)
PartOfSpeech_NOUN PartOfSpeech_Tag = 6
// Cardinal number
PartOfSpeech_NUM PartOfSpeech_Tag = 7
// Pronoun
PartOfSpeech_PRON PartOfSpeech_Tag = 8
// Particle or other function word
PartOfSpeech_PRT PartOfSpeech_Tag = 9
// Punctuation
PartOfSpeech_PUNCT PartOfSpeech_Tag = 10
// Verb (all tenses and modes)
PartOfSpeech_VERB PartOfSpeech_Tag = 11
// Other: foreign words, typos, abbreviations
PartOfSpeech_X PartOfSpeech_Tag = 12
// Affix
PartOfSpeech_AFFIX PartOfSpeech_Tag = 13
)
var PartOfSpeech_Tag_name = map[int32]string{
0: "UNKNOWN",
1: "ADJ",
2: "ADP",
3: "ADV",
4: "CONJ",
5: "DET",
6: "NOUN",
7: "NUM",
8: "PRON",
9: "PRT",
10: "PUNCT",
11: "VERB",
12: "X",
13: "AFFIX",
}
var PartOfSpeech_Tag_value = map[string]int32{
"UNKNOWN": 0,
"ADJ": 1,
"ADP": 2,
"ADV": 3,
"CONJ": 4,
"DET": 5,
"NOUN": 6,
"NUM": 7,
"PRON": 8,
"PRT": 9,
"PUNCT": 10,
"VERB": 11,
"X": 12,
"AFFIX": 13,
}
func (x PartOfSpeech_Tag) String() string {
return proto.EnumName(PartOfSpeech_Tag_name, int32(x))
}
func (PartOfSpeech_Tag) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{5, 0}
}
// The characteristic of a verb that expresses time flow during an event.
type PartOfSpeech_Aspect int32
const (
// Aspect is not applicable in the analyzed language or is not predicted.
PartOfSpeech_ASPECT_UNKNOWN PartOfSpeech_Aspect = 0
// Perfective
PartOfSpeech_PERFECTIVE PartOfSpeech_Aspect = 1
// Imperfective
PartOfSpeech_IMPERFECTIVE PartOfSpeech_Aspect = 2
// Progressive
PartOfSpeech_PROGRESSIVE PartOfSpeech_Aspect = 3
)
var PartOfSpeech_Aspect_name = map[int32]string{
0: "ASPECT_UNKNOWN",
1: "PERFECTIVE",
2: "IMPERFECTIVE",
3: "PROGRESSIVE",
}
var PartOfSpeech_Aspect_value = map[string]int32{
"ASPECT_UNKNOWN": 0,
"PERFECTIVE": 1,
"IMPERFECTIVE": 2,
"PROGRESSIVE": 3,
}
func (x PartOfSpeech_Aspect) String() string {
return proto.EnumName(PartOfSpeech_Aspect_name, int32(x))
}
func (PartOfSpeech_Aspect) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{5, 1}
}
// The grammatical function performed by a noun or pronoun in a phrase,
// clause, or sentence. In some languages, other parts of speech, such as
// adjective and determiner, take case inflection in agreement with the noun.
type PartOfSpeech_Case int32
const (
// Case is not applicable in the analyzed language or is not predicted.
PartOfSpeech_CASE_UNKNOWN PartOfSpeech_Case = 0
// Accusative
PartOfSpeech_ACCUSATIVE PartOfSpeech_Case = 1
// Adverbial
PartOfSpeech_ADVERBIAL PartOfSpeech_Case = 2
// Complementive
PartOfSpeech_COMPLEMENTIVE PartOfSpeech_Case = 3
// Dative
PartOfSpeech_DATIVE PartOfSpeech_Case = 4
// Genitive
PartOfSpeech_GENITIVE PartOfSpeech_Case = 5
// Instrumental
PartOfSpeech_INSTRUMENTAL PartOfSpeech_Case = 6
// Locative
PartOfSpeech_LOCATIVE PartOfSpeech_Case = 7
// Nominative
PartOfSpeech_NOMINATIVE PartOfSpeech_Case = 8
// Oblique
PartOfSpeech_OBLIQUE PartOfSpeech_Case = 9
// Partitive
PartOfSpeech_PARTITIVE PartOfSpeech_Case = 10
// Prepositional
PartOfSpeech_PREPOSITIONAL PartOfSpeech_Case = 11
// Reflexive
PartOfSpeech_REFLEXIVE_CASE PartOfSpeech_Case = 12
// Relative
PartOfSpeech_RELATIVE_CASE PartOfSpeech_Case = 13
// Vocative
PartOfSpeech_VOCATIVE PartOfSpeech_Case = 14
)
var PartOfSpeech_Case_name = map[int32]string{
0: "CASE_UNKNOWN",
1: "ACCUSATIVE",
2: "ADVERBIAL",
3: "COMPLEMENTIVE",
4: "DATIVE",
5: "GENITIVE",
6: "INSTRUMENTAL",
7: "LOCATIVE",
8: "NOMINATIVE",
9: "OBLIQUE",
10: "PARTITIVE",
11: "PREPOSITIONAL",
12: "REFLEXIVE_CASE",
13: "RELATIVE_CASE",
14: "VOCATIVE",
}
var PartOfSpeech_Case_value = map[string]int32{
"CASE_UNKNOWN": 0,
"ACCUSATIVE": 1,
"ADVERBIAL": 2,
"COMPLEMENTIVE": 3,
"DATIVE": 4,
"GENITIVE": 5,
"INSTRUMENTAL": 6,
"LOCATIVE": 7,
"NOMINATIVE": 8,
"OBLIQUE": 9,
"PARTITIVE": 10,
"PREPOSITIONAL": 11,
"REFLEXIVE_CASE": 12,
"RELATIVE_CASE": 13,
"VOCATIVE": 14,
}
func (x PartOfSpeech_Case) String() string {
return proto.EnumName(PartOfSpeech_Case_name, int32(x))
}
func (PartOfSpeech_Case) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{5, 2}
}
// Depending on the language, Form can be categorizing different forms of
// verbs, adjectives, adverbs, etc. For example, categorizing inflected
// endings of verbs and adjectives or distinguishing between short and long
// forms of adjectives and participles
type PartOfSpeech_Form int32
const (
// Form is not applicable in the analyzed language or is not predicted.
PartOfSpeech_FORM_UNKNOWN PartOfSpeech_Form = 0
// Adnomial
PartOfSpeech_ADNOMIAL PartOfSpeech_Form = 1
// Auxiliary
PartOfSpeech_AUXILIARY PartOfSpeech_Form = 2
// Complementizer
PartOfSpeech_COMPLEMENTIZER PartOfSpeech_Form = 3
// Final ending
PartOfSpeech_FINAL_ENDING PartOfSpeech_Form = 4
// Gerund
PartOfSpeech_GERUND PartOfSpeech_Form = 5
// Realis
PartOfSpeech_REALIS PartOfSpeech_Form = 6
// Irrealis
PartOfSpeech_IRREALIS PartOfSpeech_Form = 7
// Short form
PartOfSpeech_SHORT PartOfSpeech_Form = 8
// Long form
PartOfSpeech_LONG PartOfSpeech_Form = 9
// Order form
PartOfSpeech_ORDER PartOfSpeech_Form = 10
// Specific form
PartOfSpeech_SPECIFIC PartOfSpeech_Form = 11
)
var PartOfSpeech_Form_name = map[int32]string{
0: "FORM_UNKNOWN",
1: "ADNOMIAL",
2: "AUXILIARY",
3: "COMPLEMENTIZER",
4: "FINAL_ENDING",
5: "GERUND",
6: "REALIS",
7: "IRREALIS",
8: "SHORT",
9: "LONG",
10: "ORDER",
11: "SPECIFIC",
}
var PartOfSpeech_Form_value = map[string]int32{
"FORM_UNKNOWN": 0,
"ADNOMIAL": 1,
"AUXILIARY": 2,
"COMPLEMENTIZER": 3,
"FINAL_ENDING": 4,
"GERUND": 5,
"REALIS": 6,
"IRREALIS": 7,
"SHORT": 8,
"LONG": 9,
"ORDER": 10,
"SPECIFIC": 11,
}
func (x PartOfSpeech_Form) String() string {
return proto.EnumName(PartOfSpeech_Form_name, int32(x))
}
func (PartOfSpeech_Form) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{5, 3}
}
// Gender classes of nouns reflected in the behaviour of associated words.
type PartOfSpeech_Gender int32
const (
// Gender is not applicable in the analyzed language or is not predicted.
PartOfSpeech_GENDER_UNKNOWN PartOfSpeech_Gender = 0
// Feminine
PartOfSpeech_FEMININE PartOfSpeech_Gender = 1
// Masculine
PartOfSpeech_MASCULINE PartOfSpeech_Gender = 2
// Neuter
PartOfSpeech_NEUTER PartOfSpeech_Gender = 3
)
var PartOfSpeech_Gender_name = map[int32]string{
0: "GENDER_UNKNOWN",
1: "FEMININE",
2: "MASCULINE",
3: "NEUTER",
}
var PartOfSpeech_Gender_value = map[string]int32{
"GENDER_UNKNOWN": 0,
"FEMININE": 1,
"MASCULINE": 2,
"NEUTER": 3,
}
func (x PartOfSpeech_Gender) String() string {
return proto.EnumName(PartOfSpeech_Gender_name, int32(x))
}
func (PartOfSpeech_Gender) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{5, 4}
}
// The grammatical feature of verbs, used for showing modality and attitude.
type PartOfSpeech_Mood int32
const (
// Mood is not applicable in the analyzed language or is not predicted.
PartOfSpeech_MOOD_UNKNOWN PartOfSpeech_Mood = 0
// Conditional
PartOfSpeech_CONDITIONAL_MOOD PartOfSpeech_Mood = 1
// Imperative
PartOfSpeech_IMPERATIVE PartOfSpeech_Mood = 2
// Indicative
PartOfSpeech_INDICATIVE PartOfSpeech_Mood = 3
// Interrogative
PartOfSpeech_INTERROGATIVE PartOfSpeech_Mood = 4
// Jussive
PartOfSpeech_JUSSIVE PartOfSpeech_Mood = 5
// Subjunctive
PartOfSpeech_SUBJUNCTIVE PartOfSpeech_Mood = 6
)
var PartOfSpeech_Mood_name = map[int32]string{
0: "MOOD_UNKNOWN",
1: "CONDITIONAL_MOOD",
2: "IMPERATIVE",
3: "INDICATIVE",
4: "INTERROGATIVE",
5: "JUSSIVE",
6: "SUBJUNCTIVE",
}
var PartOfSpeech_Mood_value = map[string]int32{
"MOOD_UNKNOWN": 0,
"CONDITIONAL_MOOD": 1,
"IMPERATIVE": 2,
"INDICATIVE": 3,
"INTERROGATIVE": 4,
"JUSSIVE": 5,
"SUBJUNCTIVE": 6,
}
func (x PartOfSpeech_Mood) String() string {
return proto.EnumName(PartOfSpeech_Mood_name, int32(x))
}
func (PartOfSpeech_Mood) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{5, 5}
}
// Count distinctions.
type PartOfSpeech_Number int32
const (
// Number is not applicable in the analyzed language or is not predicted.
PartOfSpeech_NUMBER_UNKNOWN PartOfSpeech_Number = 0
// Singular
PartOfSpeech_SINGULAR PartOfSpeech_Number = 1
// Plural
PartOfSpeech_PLURAL PartOfSpeech_Number = 2
// Dual
PartOfSpeech_DUAL PartOfSpeech_Number = 3
)
var PartOfSpeech_Number_name = map[int32]string{
0: "NUMBER_UNKNOWN",
1: "SINGULAR",
2: "PLURAL",
3: "DUAL",
}
var PartOfSpeech_Number_value = map[string]int32{
"NUMBER_UNKNOWN": 0,
"SINGULAR": 1,
"PLURAL": 2,
"DUAL": 3,
}
func (x PartOfSpeech_Number) String() string {
return proto.EnumName(PartOfSpeech_Number_name, int32(x))
}
func (PartOfSpeech_Number) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{5, 6}
}
// The distinction between the speaker, second person, third person, etc.
type PartOfSpeech_Person int32
const (
// Person is not applicable in the analyzed language or is not predicted.
PartOfSpeech_PERSON_UNKNOWN PartOfSpeech_Person = 0
// First
PartOfSpeech_FIRST PartOfSpeech_Person = 1
// Second
PartOfSpeech_SECOND PartOfSpeech_Person = 2
// Third
PartOfSpeech_THIRD PartOfSpeech_Person = 3
// Reflexive
PartOfSpeech_REFLEXIVE_PERSON PartOfSpeech_Person = 4
)
var PartOfSpeech_Person_name = map[int32]string{
0: "PERSON_UNKNOWN",
1: "FIRST",
2: "SECOND",
3: "THIRD",
4: "REFLEXIVE_PERSON",
}
var PartOfSpeech_Person_value = map[string]int32{
"PERSON_UNKNOWN": 0,
"FIRST": 1,
"SECOND": 2,
"THIRD": 3,
"REFLEXIVE_PERSON": 4,
}
func (x PartOfSpeech_Person) String() string {
return proto.EnumName(PartOfSpeech_Person_name, int32(x))
}
func (PartOfSpeech_Person) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{5, 7}
}
// This category shows if the token is part of a proper name.
type PartOfSpeech_Proper int32
const (
// Proper is not applicable in the analyzed language or is not predicted.
PartOfSpeech_PROPER_UNKNOWN PartOfSpeech_Proper = 0
// Proper
PartOfSpeech_PROPER PartOfSpeech_Proper = 1
// Not proper
PartOfSpeech_NOT_PROPER PartOfSpeech_Proper = 2
)
var PartOfSpeech_Proper_name = map[int32]string{
0: "PROPER_UNKNOWN",
1: "PROPER",
2: "NOT_PROPER",
}
var PartOfSpeech_Proper_value = map[string]int32{
"PROPER_UNKNOWN": 0,
"PROPER": 1,
"NOT_PROPER": 2,
}
func (x PartOfSpeech_Proper) String() string {
return proto.EnumName(PartOfSpeech_Proper_name, int32(x))
}
func (PartOfSpeech_Proper) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{5, 8}
}
// Reciprocal features of a pronoun.
type PartOfSpeech_Reciprocity int32
const (
// Reciprocity is not applicable in the analyzed language or is not
// predicted.
PartOfSpeech_RECIPROCITY_UNKNOWN PartOfSpeech_Reciprocity = 0
// Reciprocal
PartOfSpeech_RECIPROCAL PartOfSpeech_Reciprocity = 1
// Non-reciprocal
PartOfSpeech_NON_RECIPROCAL PartOfSpeech_Reciprocity = 2
)
var PartOfSpeech_Reciprocity_name = map[int32]string{
0: "RECIPROCITY_UNKNOWN",
1: "RECIPROCAL",
2: "NON_RECIPROCAL",
}
var PartOfSpeech_Reciprocity_value = map[string]int32{
"RECIPROCITY_UNKNOWN": 0,
"RECIPROCAL": 1,
"NON_RECIPROCAL": 2,
}
func (x PartOfSpeech_Reciprocity) String() string {
return proto.EnumName(PartOfSpeech_Reciprocity_name, int32(x))
}
func (PartOfSpeech_Reciprocity) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{5, 9}
}
// Time reference.
type PartOfSpeech_Tense int32
const (
// Tense is not applicable in the analyzed language or is not predicted.
PartOfSpeech_TENSE_UNKNOWN PartOfSpeech_Tense = 0
// Conditional
PartOfSpeech_CONDITIONAL_TENSE PartOfSpeech_Tense = 1
// Future
PartOfSpeech_FUTURE PartOfSpeech_Tense = 2
// Past
PartOfSpeech_PAST PartOfSpeech_Tense = 3
// Present
PartOfSpeech_PRESENT PartOfSpeech_Tense = 4
// Imperfect
PartOfSpeech_IMPERFECT PartOfSpeech_Tense = 5
// Pluperfect
PartOfSpeech_PLUPERFECT PartOfSpeech_Tense = 6
)
var PartOfSpeech_Tense_name = map[int32]string{
0: "TENSE_UNKNOWN",
1: "CONDITIONAL_TENSE",
2: "FUTURE",
3: "PAST",
4: "PRESENT",
5: "IMPERFECT",
6: "PLUPERFECT",
}
var PartOfSpeech_Tense_value = map[string]int32{
"TENSE_UNKNOWN": 0,
"CONDITIONAL_TENSE": 1,
"FUTURE": 2,
"PAST": 3,
"PRESENT": 4,
"IMPERFECT": 5,
"PLUPERFECT": 6,
}
func (x PartOfSpeech_Tense) String() string {
return proto.EnumName(PartOfSpeech_Tense_name, int32(x))
}
func (PartOfSpeech_Tense) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{5, 10}
}
// The relationship between the action that a verb expresses and the
// participants identified by its arguments.
type PartOfSpeech_Voice int32
const (
// Voice is not applicable in the analyzed language or is not predicted.
PartOfSpeech_VOICE_UNKNOWN PartOfSpeech_Voice = 0
// Active
PartOfSpeech_ACTIVE PartOfSpeech_Voice = 1
// Causative
PartOfSpeech_CAUSATIVE PartOfSpeech_Voice = 2
// Passive
PartOfSpeech_PASSIVE PartOfSpeech_Voice = 3
)
var PartOfSpeech_Voice_name = map[int32]string{
0: "VOICE_UNKNOWN",
1: "ACTIVE",
2: "CAUSATIVE",
3: "PASSIVE",
}
var PartOfSpeech_Voice_value = map[string]int32{
"VOICE_UNKNOWN": 0,
"ACTIVE": 1,
"CAUSATIVE": 2,
"PASSIVE": 3,
}
func (x PartOfSpeech_Voice) String() string {
return proto.EnumName(PartOfSpeech_Voice_name, int32(x))
}
func (PartOfSpeech_Voice) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{5, 11}
}
// The parse label enum for the token.
type DependencyEdge_Label int32
const (
// Unknown
DependencyEdge_UNKNOWN DependencyEdge_Label = 0
// Abbreviation modifier
DependencyEdge_ABBREV DependencyEdge_Label = 1
// Adjectival complement
DependencyEdge_ACOMP DependencyEdge_Label = 2
// Adverbial clause modifier
DependencyEdge_ADVCL DependencyEdge_Label = 3
// Adverbial modifier
DependencyEdge_ADVMOD DependencyEdge_Label = 4
// Adjectival modifier of an NP
DependencyEdge_AMOD DependencyEdge_Label = 5
// Appositional modifier of an NP
DependencyEdge_APPOS DependencyEdge_Label = 6
// Attribute dependent of a copular verb
DependencyEdge_ATTR DependencyEdge_Label = 7
// Auxiliary (non-main) verb
DependencyEdge_AUX DependencyEdge_Label = 8
// Passive auxiliary
DependencyEdge_AUXPASS DependencyEdge_Label = 9
// Coordinating conjunction
DependencyEdge_CC DependencyEdge_Label = 10
// Clausal complement of a verb or adjective
DependencyEdge_CCOMP DependencyEdge_Label = 11
// Conjunct
DependencyEdge_CONJ DependencyEdge_Label = 12
// Clausal subject
DependencyEdge_CSUBJ DependencyEdge_Label = 13
// Clausal passive subject
DependencyEdge_CSUBJPASS DependencyEdge_Label = 14
// Dependency (unable to determine)
DependencyEdge_DEP DependencyEdge_Label = 15
// Determiner
DependencyEdge_DET DependencyEdge_Label = 16
// Discourse
DependencyEdge_DISCOURSE DependencyEdge_Label = 17
// Direct object
DependencyEdge_DOBJ DependencyEdge_Label = 18
// Expletive
DependencyEdge_EXPL DependencyEdge_Label = 19
// Goes with (part of a word in a text not well edited)
DependencyEdge_GOESWITH DependencyEdge_Label = 20
// Indirect object
DependencyEdge_IOBJ DependencyEdge_Label = 21
// Marker (word introducing a subordinate clause)
DependencyEdge_MARK DependencyEdge_Label = 22
// Multi-word expression
DependencyEdge_MWE DependencyEdge_Label = 23
// Multi-word verbal expression
DependencyEdge_MWV DependencyEdge_Label = 24
// Negation modifier
DependencyEdge_NEG DependencyEdge_Label = 25
// Noun compound modifier
DependencyEdge_NN DependencyEdge_Label = 26
// Noun phrase used as an adverbial modifier
DependencyEdge_NPADVMOD DependencyEdge_Label = 27
// Nominal subject
DependencyEdge_NSUBJ DependencyEdge_Label = 28
// Passive nominal subject
DependencyEdge_NSUBJPASS DependencyEdge_Label = 29
// Numeric modifier of a noun
DependencyEdge_NUM DependencyEdge_Label = 30
// Element of compound number
DependencyEdge_NUMBER DependencyEdge_Label = 31
// Punctuation mark
DependencyEdge_P DependencyEdge_Label = 32
// Parataxis relation
DependencyEdge_PARATAXIS DependencyEdge_Label = 33
// Participial modifier
DependencyEdge_PARTMOD DependencyEdge_Label = 34
// The complement of a preposition is a clause
DependencyEdge_PCOMP DependencyEdge_Label = 35
// Object of a preposition
DependencyEdge_POBJ DependencyEdge_Label = 36
// Possession modifier
DependencyEdge_POSS DependencyEdge_Label = 37
// Postverbal negative particle
DependencyEdge_POSTNEG DependencyEdge_Label = 38
// Predicate complement
DependencyEdge_PRECOMP DependencyEdge_Label = 39
// Preconjunt
DependencyEdge_PRECONJ DependencyEdge_Label = 40
// Predeterminer
DependencyEdge_PREDET DependencyEdge_Label = 41
// Prefix
DependencyEdge_PREF DependencyEdge_Label = 42
// Prepositional modifier
DependencyEdge_PREP DependencyEdge_Label = 43
// The relationship between a verb and verbal morpheme
DependencyEdge_PRONL DependencyEdge_Label = 44
// Particle
DependencyEdge_PRT DependencyEdge_Label = 45
// Associative or possessive marker
DependencyEdge_PS DependencyEdge_Label = 46
// Quantifier phrase modifier
DependencyEdge_QUANTMOD DependencyEdge_Label = 47
// Relative clause modifier
DependencyEdge_RCMOD DependencyEdge_Label = 48
// Complementizer in relative clause
DependencyEdge_RCMODREL DependencyEdge_Label = 49
// Ellipsis without a preceding predicate
DependencyEdge_RDROP DependencyEdge_Label = 50
// Referent
DependencyEdge_REF DependencyEdge_Label = 51
// Remnant
DependencyEdge_REMNANT DependencyEdge_Label = 52
// Reparandum
DependencyEdge_REPARANDUM DependencyEdge_Label = 53
// Root
DependencyEdge_ROOT DependencyEdge_Label = 54
// Suffix specifying a unit of number
DependencyEdge_SNUM DependencyEdge_Label = 55
// Suffix
DependencyEdge_SUFF DependencyEdge_Label = 56
// Temporal modifier
DependencyEdge_TMOD DependencyEdge_Label = 57
// Topic marker
DependencyEdge_TOPIC DependencyEdge_Label = 58
// Clause headed by an infinite form of the verb that modifies a noun
DependencyEdge_VMOD DependencyEdge_Label = 59
// Vocative
DependencyEdge_VOCATIVE DependencyEdge_Label = 60
// Open clausal complement
DependencyEdge_XCOMP DependencyEdge_Label = 61
// Name suffix
DependencyEdge_SUFFIX DependencyEdge_Label = 62
// Name title
DependencyEdge_TITLE DependencyEdge_Label = 63
// Adverbial phrase modifier
DependencyEdge_ADVPHMOD DependencyEdge_Label = 64
// Causative auxiliary
DependencyEdge_AUXCAUS DependencyEdge_Label = 65
// Helper auxiliary
DependencyEdge_AUXVV DependencyEdge_Label = 66
// Rentaishi (Prenominal modifier)
DependencyEdge_DTMOD DependencyEdge_Label = 67
// Foreign words
DependencyEdge_FOREIGN DependencyEdge_Label = 68
// Keyword
DependencyEdge_KW DependencyEdge_Label = 69
// List for chains of comparable items
DependencyEdge_LIST DependencyEdge_Label = 70
// Nominalized clause
DependencyEdge_NOMC DependencyEdge_Label = 71
// Nominalized clausal subject
DependencyEdge_NOMCSUBJ DependencyEdge_Label = 72
// Nominalized clausal passive
DependencyEdge_NOMCSUBJPASS DependencyEdge_Label = 73
// Compound of numeric modifier
DependencyEdge_NUMC DependencyEdge_Label = 74
// Copula
DependencyEdge_COP DependencyEdge_Label = 75
// Dislocated relation (for fronted/topicalized elements)
DependencyEdge_DISLOCATED DependencyEdge_Label = 76
// Aspect marker
DependencyEdge_ASP DependencyEdge_Label = 77
// Genitive modifier
DependencyEdge_GMOD DependencyEdge_Label = 78
// Genitive object
DependencyEdge_GOBJ DependencyEdge_Label = 79
// Infinitival modifier
DependencyEdge_INFMOD DependencyEdge_Label = 80
// Measure
DependencyEdge_MES DependencyEdge_Label = 81
// Nominal complement of a noun
DependencyEdge_NCOMP DependencyEdge_Label = 82
)
var DependencyEdge_Label_name = map[int32]string{
0: "UNKNOWN",
1: "ABBREV",
2: "ACOMP",
3: "ADVCL",
4: "ADVMOD",
5: "AMOD",
6: "APPOS",
7: "ATTR",
8: "AUX",
9: "AUXPASS",
10: "CC",
11: "CCOMP",
12: "CONJ",
13: "CSUBJ",
14: "CSUBJPASS",
15: "DEP",
16: "DET",
17: "DISCOURSE",
18: "DOBJ",
19: "EXPL",
20: "GOESWITH",
21: "IOBJ",
22: "MARK",
23: "MWE",
24: "MWV",
25: "NEG",
26: "NN",
27: "NPADVMOD",
28: "NSUBJ",
29: "NSUBJPASS",
30: "NUM",
31: "NUMBER",
32: "P",
33: "PARATAXIS",
34: "PARTMOD",
35: "PCOMP",
36: "POBJ",
37: "POSS",
38: "POSTNEG",
39: "PRECOMP",
40: "PRECONJ",
41: "PREDET",
42: "PREF",
43: "PREP",
44: "PRONL",
45: "PRT",
46: "PS",
47: "QUANTMOD",
48: "RCMOD",
49: "RCMODREL",
50: "RDROP",
51: "REF",
52: "REMNANT",
53: "REPARANDUM",
54: "ROOT",
55: "SNUM",
56: "SUFF",
57: "TMOD",
58: "TOPIC",
59: "VMOD",
60: "VOCATIVE",
61: "XCOMP",
62: "SUFFIX",
63: "TITLE",
64: "ADVPHMOD",
65: "AUXCAUS",
66: "AUXVV",
67: "DTMOD",
68: "FOREIGN",
69: "KW",
70: "LIST",
71: "NOMC",
72: "NOMCSUBJ",
73: "NOMCSUBJPASS",
74: "NUMC",
75: "COP",
76: "DISLOCATED",
77: "ASP",
78: "GMOD",
79: "GOBJ",
80: "INFMOD",
81: "MES",
82: "NCOMP",
}
var DependencyEdge_Label_value = map[string]int32{
"UNKNOWN": 0,
"ABBREV": 1,
"ACOMP": 2,
"ADVCL": 3,
"ADVMOD": 4,
"AMOD": 5,
"APPOS": 6,
"ATTR": 7,
"AUX": 8,
"AUXPASS": 9,
"CC": 10,
"CCOMP": 11,
"CONJ": 12,
"CSUBJ": 13,
"CSUBJPASS": 14,
"DEP": 15,
"DET": 16,
"DISCOURSE": 17,
"DOBJ": 18,
"EXPL": 19,
"GOESWITH": 20,
"IOBJ": 21,
"MARK": 22,
"MWE": 23,
"MWV": 24,
"NEG": 25,
"NN": 26,
"NPADVMOD": 27,
"NSUBJ": 28,
"NSUBJPASS": 29,
"NUM": 30,
"NUMBER": 31,
"P": 32,
"PARATAXIS": 33,
"PARTMOD": 34,
"PCOMP": 35,
"POBJ": 36,
"POSS": 37,
"POSTNEG": 38,
"PRECOMP": 39,
"PRECONJ": 40,
"PREDET": 41,
"PREF": 42,
"PREP": 43,
"PRONL": 44,
"PRT": 45,
"PS": 46,
"QUANTMOD": 47,
"RCMOD": 48,
"RCMODREL": 49,
"RDROP": 50,
"REF": 51,
"REMNANT": 52,
"REPARANDUM": 53,
"ROOT": 54,
"SNUM": 55,
"SUFF": 56,
"TMOD": 57,
"TOPIC": 58,
"VMOD": 59,
"VOCATIVE": 60,
"XCOMP": 61,
"SUFFIX": 62,
"TITLE": 63,
"ADVPHMOD": 64,
"AUXCAUS": 65,
"AUXVV": 66,
"DTMOD": 67,
"FOREIGN": 68,
"KW": 69,
"LIST": 70,
"NOMC": 71,
"NOMCSUBJ": 72,
"NOMCSUBJPASS": 73,
"NUMC": 74,
"COP": 75,
"DISLOCATED": 76,
"ASP": 77,
"GMOD": 78,
"GOBJ": 79,
"INFMOD": 80,
"MES": 81,
"NCOMP": 82,
}
func (x DependencyEdge_Label) String() string {
return proto.EnumName(DependencyEdge_Label_name, int32(x))
}
func (DependencyEdge_Label) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{6, 0}
}
// The supported types of mentions.
type EntityMention_Type int32
const (
// Unknown
EntityMention_TYPE_UNKNOWN EntityMention_Type = 0
// Proper name
EntityMention_PROPER EntityMention_Type = 1
// Common noun (or noun compound)
EntityMention_COMMON EntityMention_Type = 2
)
var EntityMention_Type_name = map[int32]string{
0: "TYPE_UNKNOWN",
1: "PROPER",
2: "COMMON",
}
var EntityMention_Type_value = map[string]int32{
"TYPE_UNKNOWN": 0,
"PROPER": 1,
"COMMON": 2,
}
func (x EntityMention_Type) String() string {
return proto.EnumName(EntityMention_Type_name, int32(x))
}
func (EntityMention_Type) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{7, 0}
}
// ################################################################ #
//
// Represents the input to API methods.
type Document struct {
// Required. If the type is not set or is `TYPE_UNSPECIFIED`,
// returns an `INVALID_ARGUMENT` error.
Type Document_Type `protobuf:"varint,1,opt,name=type,proto3,enum=google.cloud.language.v1beta2.Document_Type" json:"type,omitempty"`
// The source of the document: a string containing the content or a
// Google Cloud Storage URI.
//
// Types that are valid to be assigned to Source:
// *Document_Content
// *Document_GcsContentUri
Source isDocument_Source `protobuf_oneof:"source"`
// The language of the document (if not specified, the language is
// automatically detected). Both ISO and BCP-47 language codes are
// accepted.<br>
// [Language Support](/natural-language/docs/languages)
// lists currently supported languages for each API method.
// If the language (either specified by the caller or automatically detected)
// is not supported by the called API method, an `INVALID_ARGUMENT` error
// is returned.
Language string `protobuf:"bytes,4,opt,name=language,proto3" json:"language,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Document) Reset() { *m = Document{} }
func (m *Document) String() string { return proto.CompactTextString(m) }
func (*Document) ProtoMessage() {}
func (*Document) Descriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{0}
}
func (m *Document) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Document.Unmarshal(m, b)
}
func (m *Document) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Document.Marshal(b, m, deterministic)
}
func (dst *Document) XXX_Merge(src proto.Message) {
xxx_messageInfo_Document.Merge(dst, src)
}
func (m *Document) XXX_Size() int {
return xxx_messageInfo_Document.Size(m)
}
func (m *Document) XXX_DiscardUnknown() {
xxx_messageInfo_Document.DiscardUnknown(m)
}
var xxx_messageInfo_Document proto.InternalMessageInfo
func (m *Document) GetType() Document_Type {
if m != nil {
return m.Type
}
return Document_TYPE_UNSPECIFIED
}
type isDocument_Source interface {
isDocument_Source()
}
type Document_Content struct {
Content string `protobuf:"bytes,2,opt,name=content,proto3,oneof"`
}
type Document_GcsContentUri struct {
GcsContentUri string `protobuf:"bytes,3,opt,name=gcs_content_uri,json=gcsContentUri,proto3,oneof"`
}
func (*Document_Content) isDocument_Source() {}
func (*Document_GcsContentUri) isDocument_Source() {}
func (m *Document) GetSource() isDocument_Source {
if m != nil {
return m.Source
}
return nil
}
func (m *Document) GetContent() string {
if x, ok := m.GetSource().(*Document_Content); ok {
return x.Content
}
return ""
}
func (m *Document) GetGcsContentUri() string {
if x, ok := m.GetSource().(*Document_GcsContentUri); ok {
return x.GcsContentUri
}
return ""
}
func (m *Document) GetLanguage() string {
if m != nil {
return m.Language
}
return ""
}
// XXX_OneofFuncs is for the internal use of the proto package.
func (*Document) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
return _Document_OneofMarshaler, _Document_OneofUnmarshaler, _Document_OneofSizer, []interface{}{
(*Document_Content)(nil),
(*Document_GcsContentUri)(nil),
}
}
func _Document_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
m := msg.(*Document)
// source
switch x := m.Source.(type) {
case *Document_Content:
b.EncodeVarint(2<<3 | proto.WireBytes)
b.EncodeStringBytes(x.Content)
case *Document_GcsContentUri:
b.EncodeVarint(3<<3 | proto.WireBytes)
b.EncodeStringBytes(x.GcsContentUri)
case nil:
default:
return fmt.Errorf("Document.Source has unexpected type %T", x)
}
return nil
}
func _Document_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
m := msg.(*Document)
switch tag {
case 2: // source.content
if wire != proto.WireBytes {
return true, proto.ErrInternalBadWireType
}
x, err := b.DecodeStringBytes()
m.Source = &Document_Content{x}
return true, err
case 3: // source.gcs_content_uri
if wire != proto.WireBytes {
return true, proto.ErrInternalBadWireType
}
x, err := b.DecodeStringBytes()
m.Source = &Document_GcsContentUri{x}
return true, err
default:
return false, nil
}
}
func _Document_OneofSizer(msg proto.Message) (n int) {
m := msg.(*Document)
// source
switch x := m.Source.(type) {
case *Document_Content:
n += 1 // tag and wire
n += proto.SizeVarint(uint64(len(x.Content)))
n += len(x.Content)
case *Document_GcsContentUri:
n += 1 // tag and wire
n += proto.SizeVarint(uint64(len(x.GcsContentUri)))
n += len(x.GcsContentUri)
case nil:
default:
panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
}
return n
}
// Represents a sentence in the input document.
type Sentence struct {
// The sentence text.
Text *TextSpan `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"`
// For calls to [AnalyzeSentiment][] or if
// [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment] is set to
// true, this field will contain the sentiment for the sentence.
Sentiment *Sentiment `protobuf:"bytes,2,opt,name=sentiment,proto3" json:"sentiment,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Sentence) Reset() { *m = Sentence{} }
func (m *Sentence) String() string { return proto.CompactTextString(m) }
func (*Sentence) ProtoMessage() {}
func (*Sentence) Descriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{1}
}
func (m *Sentence) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Sentence.Unmarshal(m, b)
}
func (m *Sentence) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Sentence.Marshal(b, m, deterministic)
}
func (dst *Sentence) XXX_Merge(src proto.Message) {
xxx_messageInfo_Sentence.Merge(dst, src)
}
func (m *Sentence) XXX_Size() int {
return xxx_messageInfo_Sentence.Size(m)
}
func (m *Sentence) XXX_DiscardUnknown() {
xxx_messageInfo_Sentence.DiscardUnknown(m)
}
var xxx_messageInfo_Sentence proto.InternalMessageInfo
func (m *Sentence) GetText() *TextSpan {
if m != nil {
return m.Text
}
return nil
}
func (m *Sentence) GetSentiment() *Sentiment {
if m != nil {
return m.Sentiment
}
return nil
}
// Represents a phrase in the text that is a known entity, such as
// a person, an organization, or location. The API associates information, such
// as salience and mentions, with entities.
type Entity struct {
// The representative name for the entity.
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
// The entity type.
Type Entity_Type `protobuf:"varint,2,opt,name=type,proto3,enum=google.cloud.language.v1beta2.Entity_Type" json:"type,omitempty"`
// Metadata associated with the entity.
//
// Currently, Wikipedia URLs and Knowledge Graph MIDs are provided, if
// available. The associated keys are "wikipedia_url" and "mid", respectively.
Metadata map[string]string `protobuf:"bytes,3,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
// The salience score associated with the entity in the [0, 1.0] range.
//
// The salience score for an entity provides information about the
// importance or centrality of that entity to the entire document text.
// Scores closer to 0 are less salient, while scores closer to 1.0 are highly
// salient.
Salience float32 `protobuf:"fixed32,4,opt,name=salience,proto3" json:"salience,omitempty"`
// The mentions of this entity in the input document. The API currently
// supports proper noun mentions.
Mentions []*EntityMention `protobuf:"bytes,5,rep,name=mentions,proto3" json:"mentions,omitempty"`
// For calls to [AnalyzeEntitySentiment][] or if
// [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] is set to
// true, this field will contain the aggregate sentiment expressed for this
// entity in the provided document.
Sentiment *Sentiment `protobuf:"bytes,6,opt,name=sentiment,proto3" json:"sentiment,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Entity) Reset() { *m = Entity{} }
func (m *Entity) String() string { return proto.CompactTextString(m) }
func (*Entity) ProtoMessage() {}
func (*Entity) Descriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{2}
}
func (m *Entity) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Entity.Unmarshal(m, b)
}
func (m *Entity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Entity.Marshal(b, m, deterministic)
}
func (dst *Entity) XXX_Merge(src proto.Message) {
xxx_messageInfo_Entity.Merge(dst, src)
}
func (m *Entity) XXX_Size() int {
return xxx_messageInfo_Entity.Size(m)
}
func (m *Entity) XXX_DiscardUnknown() {
xxx_messageInfo_Entity.DiscardUnknown(m)
}
var xxx_messageInfo_Entity proto.InternalMessageInfo
func (m *Entity) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *Entity) GetType() Entity_Type {
if m != nil {
return m.Type
}
return Entity_UNKNOWN
}
func (m *Entity) GetMetadata() map[string]string {
if m != nil {
return m.Metadata
}
return nil
}
func (m *Entity) GetSalience() float32 {
if m != nil {
return m.Salience
}
return 0
}
func (m *Entity) GetMentions() []*EntityMention {
if m != nil {
return m.Mentions
}
return nil
}
func (m *Entity) GetSentiment() *Sentiment {
if m != nil {
return m.Sentiment
}
return nil
}
// Represents the smallest syntactic building block of the text.
type Token struct {
// The token text.
Text *TextSpan `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"`
// Parts of speech tag for this token.
PartOfSpeech *PartOfSpeech `protobuf:"bytes,2,opt,name=part_of_speech,json=partOfSpeech,proto3" json:"part_of_speech,omitempty"`
// Dependency tree parse for this token.
DependencyEdge *DependencyEdge `protobuf:"bytes,3,opt,name=dependency_edge,json=dependencyEdge,proto3" json:"dependency_edge,omitempty"`
// [Lemma](https://en.wikipedia.org/wiki/Lemma_%28morphology%29) of the token.
Lemma string `protobuf:"bytes,4,opt,name=lemma,proto3" json:"lemma,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Token) Reset() { *m = Token{} }
func (m *Token) String() string { return proto.CompactTextString(m) }
func (*Token) ProtoMessage() {}
func (*Token) Descriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{3}
}
func (m *Token) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Token.Unmarshal(m, b)
}
func (m *Token) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Token.Marshal(b, m, deterministic)
}
func (dst *Token) XXX_Merge(src proto.Message) {
xxx_messageInfo_Token.Merge(dst, src)
}
func (m *Token) XXX_Size() int {
return xxx_messageInfo_Token.Size(m)
}
func (m *Token) XXX_DiscardUnknown() {
xxx_messageInfo_Token.DiscardUnknown(m)
}
var xxx_messageInfo_Token proto.InternalMessageInfo
func (m *Token) GetText() *TextSpan {
if m != nil {
return m.Text
}
return nil
}
func (m *Token) GetPartOfSpeech() *PartOfSpeech {
if m != nil {
return m.PartOfSpeech
}
return nil
}
func (m *Token) GetDependencyEdge() *DependencyEdge {
if m != nil {
return m.DependencyEdge
}
return nil
}
func (m *Token) GetLemma() string {
if m != nil {
return m.Lemma
}
return ""
}
// Represents the feeling associated with the entire text or entities in
// the text.
type Sentiment struct {
// A non-negative number in the [0, +inf) range, which represents
// the absolute magnitude of sentiment regardless of score (positive or
// negative).
Magnitude float32 `protobuf:"fixed32,2,opt,name=magnitude,proto3" json:"magnitude,omitempty"`
// Sentiment score between -1.0 (negative sentiment) and 1.0
// (positive sentiment).
Score float32 `protobuf:"fixed32,3,opt,name=score,proto3" json:"score,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Sentiment) Reset() { *m = Sentiment{} }
func (m *Sentiment) String() string { return proto.CompactTextString(m) }
func (*Sentiment) ProtoMessage() {}
func (*Sentiment) Descriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{4}
}
func (m *Sentiment) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Sentiment.Unmarshal(m, b)
}
func (m *Sentiment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Sentiment.Marshal(b, m, deterministic)
}
func (dst *Sentiment) XXX_Merge(src proto.Message) {
xxx_messageInfo_Sentiment.Merge(dst, src)
}
func (m *Sentiment) XXX_Size() int {
return xxx_messageInfo_Sentiment.Size(m)
}
func (m *Sentiment) XXX_DiscardUnknown() {
xxx_messageInfo_Sentiment.DiscardUnknown(m)
}
var xxx_messageInfo_Sentiment proto.InternalMessageInfo
func (m *Sentiment) GetMagnitude() float32 {
if m != nil {
return m.Magnitude
}
return 0
}
func (m *Sentiment) GetScore() float32 {
if m != nil {
return m.Score
}
return 0
}
// Represents part of speech information for a token.
type PartOfSpeech struct {
// The part of speech tag.
Tag PartOfSpeech_Tag `protobuf:"varint,1,opt,name=tag,proto3,enum=google.cloud.language.v1beta2.PartOfSpeech_Tag" json:"tag,omitempty"`
// The grammatical aspect.
Aspect PartOfSpeech_Aspect `protobuf:"varint,2,opt,name=aspect,proto3,enum=google.cloud.language.v1beta2.PartOfSpeech_Aspect" json:"aspect,omitempty"`
// The grammatical case.
Case PartOfSpeech_Case `protobuf:"varint,3,opt,name=case,proto3,enum=google.cloud.language.v1beta2.PartOfSpeech_Case" json:"case,omitempty"`
// The grammatical form.
Form PartOfSpeech_Form `protobuf:"varint,4,opt,name=form,proto3,enum=google.cloud.language.v1beta2.PartOfSpeech_Form" json:"form,omitempty"`
// The grammatical gender.
Gender PartOfSpeech_Gender `protobuf:"varint,5,opt,name=gender,proto3,enum=google.cloud.language.v1beta2.PartOfSpeech_Gender" json:"gender,omitempty"`
// The grammatical mood.
Mood PartOfSpeech_Mood `protobuf:"varint,6,opt,name=mood,proto3,enum=google.cloud.language.v1beta2.PartOfSpeech_Mood" json:"mood,omitempty"`
// The grammatical number.
Number PartOfSpeech_Number `protobuf:"varint,7,opt,name=number,proto3,enum=google.cloud.language.v1beta2.PartOfSpeech_Number" json:"number,omitempty"`
// The grammatical person.
Person PartOfSpeech_Person `protobuf:"varint,8,opt,name=person,proto3,enum=google.cloud.language.v1beta2.PartOfSpeech_Person" json:"person,omitempty"`
// The grammatical properness.
Proper PartOfSpeech_Proper `protobuf:"varint,9,opt,name=proper,proto3,enum=google.cloud.language.v1beta2.PartOfSpeech_Proper" json:"proper,omitempty"`
// The grammatical reciprocity.
Reciprocity PartOfSpeech_Reciprocity `protobuf:"varint,10,opt,name=reciprocity,proto3,enum=google.cloud.language.v1beta2.PartOfSpeech_Reciprocity" json:"reciprocity,omitempty"`
// The grammatical tense.
Tense PartOfSpeech_Tense `protobuf:"varint,11,opt,name=tense,proto3,enum=google.cloud.language.v1beta2.PartOfSpeech_Tense" json:"tense,omitempty"`
// The grammatical voice.
Voice PartOfSpeech_Voice `protobuf:"varint,12,opt,name=voice,proto3,enum=google.cloud.language.v1beta2.PartOfSpeech_Voice" json:"voice,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *PartOfSpeech) Reset() { *m = PartOfSpeech{} }
func (m *PartOfSpeech) String() string { return proto.CompactTextString(m) }
func (*PartOfSpeech) ProtoMessage() {}
func (*PartOfSpeech) Descriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{5}
}
func (m *PartOfSpeech) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_PartOfSpeech.Unmarshal(m, b)
}
func (m *PartOfSpeech) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_PartOfSpeech.Marshal(b, m, deterministic)
}
func (dst *PartOfSpeech) XXX_Merge(src proto.Message) {
xxx_messageInfo_PartOfSpeech.Merge(dst, src)
}
func (m *PartOfSpeech) XXX_Size() int {
return xxx_messageInfo_PartOfSpeech.Size(m)
}
func (m *PartOfSpeech) XXX_DiscardUnknown() {
xxx_messageInfo_PartOfSpeech.DiscardUnknown(m)
}
var xxx_messageInfo_PartOfSpeech proto.InternalMessageInfo
func (m *PartOfSpeech) GetTag() PartOfSpeech_Tag {
if m != nil {
return m.Tag
}
return PartOfSpeech_UNKNOWN
}
func (m *PartOfSpeech) GetAspect() PartOfSpeech_Aspect {
if m != nil {
return m.Aspect
}
return PartOfSpeech_ASPECT_UNKNOWN
}
func (m *PartOfSpeech) GetCase() PartOfSpeech_Case {
if m != nil {
return m.Case
}
return PartOfSpeech_CASE_UNKNOWN
}
func (m *PartOfSpeech) GetForm() PartOfSpeech_Form {
if m != nil {
return m.Form
}
return PartOfSpeech_FORM_UNKNOWN
}
func (m *PartOfSpeech) GetGender() PartOfSpeech_Gender {
if m != nil {
return m.Gender
}
return PartOfSpeech_GENDER_UNKNOWN
}
func (m *PartOfSpeech) GetMood() PartOfSpeech_Mood {
if m != nil {
return m.Mood
}
return PartOfSpeech_MOOD_UNKNOWN
}
func (m *PartOfSpeech) GetNumber() PartOfSpeech_Number {
if m != nil {
return m.Number
}
return PartOfSpeech_NUMBER_UNKNOWN
}
func (m *PartOfSpeech) GetPerson() PartOfSpeech_Person {
if m != nil {
return m.Person
}
return PartOfSpeech_PERSON_UNKNOWN
}
func (m *PartOfSpeech) GetProper() PartOfSpeech_Proper {
if m != nil {
return m.Proper
}
return PartOfSpeech_PROPER_UNKNOWN
}
func (m *PartOfSpeech) GetReciprocity() PartOfSpeech_Reciprocity {
if m != nil {
return m.Reciprocity
}
return PartOfSpeech_RECIPROCITY_UNKNOWN
}
func (m *PartOfSpeech) GetTense() PartOfSpeech_Tense {
if m != nil {
return m.Tense
}
return PartOfSpeech_TENSE_UNKNOWN
}
func (m *PartOfSpeech) GetVoice() PartOfSpeech_Voice {
if m != nil {
return m.Voice
}
return PartOfSpeech_VOICE_UNKNOWN
}
// Represents dependency parse tree information for a token.
type DependencyEdge struct {
// Represents the head of this token in the dependency tree.
// This is the index of the token which has an arc going to this token.
// The index is the position of the token in the array of tokens returned
// by the API method. If this token is a root token, then the
// `head_token_index` is its own index.
HeadTokenIndex int32 `protobuf:"varint,1,opt,name=head_token_index,json=headTokenIndex,proto3" json:"head_token_index,omitempty"`
// The parse label for the token.
Label DependencyEdge_Label `protobuf:"varint,2,opt,name=label,proto3,enum=google.cloud.language.v1beta2.DependencyEdge_Label" json:"label,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *DependencyEdge) Reset() { *m = DependencyEdge{} }
func (m *DependencyEdge) String() string { return proto.CompactTextString(m) }
func (*DependencyEdge) ProtoMessage() {}
func (*DependencyEdge) Descriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{6}
}
func (m *DependencyEdge) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_DependencyEdge.Unmarshal(m, b)
}
func (m *DependencyEdge) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_DependencyEdge.Marshal(b, m, deterministic)
}
func (dst *DependencyEdge) XXX_Merge(src proto.Message) {
xxx_messageInfo_DependencyEdge.Merge(dst, src)
}
func (m *DependencyEdge) XXX_Size() int {
return xxx_messageInfo_DependencyEdge.Size(m)
}
func (m *DependencyEdge) XXX_DiscardUnknown() {
xxx_messageInfo_DependencyEdge.DiscardUnknown(m)
}
var xxx_messageInfo_DependencyEdge proto.InternalMessageInfo
func (m *DependencyEdge) GetHeadTokenIndex() int32 {
if m != nil {
return m.HeadTokenIndex
}
return 0
}
func (m *DependencyEdge) GetLabel() DependencyEdge_Label {
if m != nil {
return m.Label
}
return DependencyEdge_UNKNOWN
}
// Represents a mention for an entity in the text. Currently, proper noun
// mentions are supported.
type EntityMention struct {
// The mention text.
Text *TextSpan `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"`
// The type of the entity mention.
Type EntityMention_Type `protobuf:"varint,2,opt,name=type,proto3,enum=google.cloud.language.v1beta2.EntityMention_Type" json:"type,omitempty"`
// For calls to [AnalyzeEntitySentiment][] or if
// [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] is set to
// true, this field will contain the sentiment expressed for this mention of
// the entity in the provided document.
Sentiment *Sentiment `protobuf:"bytes,3,opt,name=sentiment,proto3" json:"sentiment,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *EntityMention) Reset() { *m = EntityMention{} }
func (m *EntityMention) String() string { return proto.CompactTextString(m) }
func (*EntityMention) ProtoMessage() {}
func (*EntityMention) Descriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{7}
}
func (m *EntityMention) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_EntityMention.Unmarshal(m, b)
}
func (m *EntityMention) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_EntityMention.Marshal(b, m, deterministic)
}
func (dst *EntityMention) XXX_Merge(src proto.Message) {
xxx_messageInfo_EntityMention.Merge(dst, src)
}
func (m *EntityMention) XXX_Size() int {
return xxx_messageInfo_EntityMention.Size(m)
}
func (m *EntityMention) XXX_DiscardUnknown() {
xxx_messageInfo_EntityMention.DiscardUnknown(m)
}
var xxx_messageInfo_EntityMention proto.InternalMessageInfo
func (m *EntityMention) GetText() *TextSpan {
if m != nil {
return m.Text
}
return nil
}
func (m *EntityMention) GetType() EntityMention_Type {
if m != nil {
return m.Type
}
return EntityMention_TYPE_UNKNOWN
}
func (m *EntityMention) GetSentiment() *Sentiment {
if m != nil {
return m.Sentiment
}
return nil
}
// Represents an output piece of text.
type TextSpan struct {
// The content of the output text.
Content string `protobuf:"bytes,1,opt,name=content,proto3" json:"content,omitempty"`
// The API calculates the beginning offset of the content in the original
// document according to the [EncodingType][google.cloud.language.v1beta2.EncodingType] specified in the API request.
BeginOffset int32 `protobuf:"varint,2,opt,name=begin_offset,json=beginOffset,proto3" json:"begin_offset,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *TextSpan) Reset() { *m = TextSpan{} }
func (m *TextSpan) String() string { return proto.CompactTextString(m) }
func (*TextSpan) ProtoMessage() {}
func (*TextSpan) Descriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{8}
}
func (m *TextSpan) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_TextSpan.Unmarshal(m, b)
}
func (m *TextSpan) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_TextSpan.Marshal(b, m, deterministic)
}
func (dst *TextSpan) XXX_Merge(src proto.Message) {
xxx_messageInfo_TextSpan.Merge(dst, src)
}
func (m *TextSpan) XXX_Size() int {
return xxx_messageInfo_TextSpan.Size(m)
}
func (m *TextSpan) XXX_DiscardUnknown() {
xxx_messageInfo_TextSpan.DiscardUnknown(m)
}
var xxx_messageInfo_TextSpan proto.InternalMessageInfo
func (m *TextSpan) GetContent() string {
if m != nil {
return m.Content
}
return ""
}
func (m *TextSpan) GetBeginOffset() int32 {
if m != nil {
return m.BeginOffset
}
return 0
}
// Represents a category returned from the text classifier.
type ClassificationCategory struct {
// The name of the category representing the document.
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
// The classifier's confidence of the category. Number represents how certain
// the classifier is that this category represents the given text.
Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ClassificationCategory) Reset() { *m = ClassificationCategory{} }
func (m *ClassificationCategory) String() string { return proto.CompactTextString(m) }
func (*ClassificationCategory) ProtoMessage() {}
func (*ClassificationCategory) Descriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{9}
}
func (m *ClassificationCategory) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ClassificationCategory.Unmarshal(m, b)
}
func (m *ClassificationCategory) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ClassificationCategory.Marshal(b, m, deterministic)
}
func (dst *ClassificationCategory) XXX_Merge(src proto.Message) {
xxx_messageInfo_ClassificationCategory.Merge(dst, src)
}
func (m *ClassificationCategory) XXX_Size() int {
return xxx_messageInfo_ClassificationCategory.Size(m)
}
func (m *ClassificationCategory) XXX_DiscardUnknown() {
xxx_messageInfo_ClassificationCategory.DiscardUnknown(m)
}
var xxx_messageInfo_ClassificationCategory proto.InternalMessageInfo
func (m *ClassificationCategory) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *ClassificationCategory) GetConfidence() float32 {
if m != nil {
return m.Confidence
}
return 0
}
// The sentiment analysis request message.
type AnalyzeSentimentRequest struct {
// Input document.
Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"`
// The encoding type used by the API to calculate sentence offsets for the
// sentence sentiment.
EncodingType EncodingType `protobuf:"varint,2,opt,name=encoding_type,json=encodingType,proto3,enum=google.cloud.language.v1beta2.EncodingType" json:"encoding_type,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *AnalyzeSentimentRequest) Reset() { *m = AnalyzeSentimentRequest{} }
func (m *AnalyzeSentimentRequest) String() string { return proto.CompactTextString(m) }
func (*AnalyzeSentimentRequest) ProtoMessage() {}
func (*AnalyzeSentimentRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{10}
}
func (m *AnalyzeSentimentRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_AnalyzeSentimentRequest.Unmarshal(m, b)
}
func (m *AnalyzeSentimentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_AnalyzeSentimentRequest.Marshal(b, m, deterministic)
}
func (dst *AnalyzeSentimentRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_AnalyzeSentimentRequest.Merge(dst, src)
}
func (m *AnalyzeSentimentRequest) XXX_Size() int {
return xxx_messageInfo_AnalyzeSentimentRequest.Size(m)
}
func (m *AnalyzeSentimentRequest) XXX_DiscardUnknown() {
xxx_messageInfo_AnalyzeSentimentRequest.DiscardUnknown(m)
}
var xxx_messageInfo_AnalyzeSentimentRequest proto.InternalMessageInfo
func (m *AnalyzeSentimentRequest) GetDocument() *Document {
if m != nil {
return m.Document
}
return nil
}
func (m *AnalyzeSentimentRequest) GetEncodingType() EncodingType {
if m != nil {
return m.EncodingType
}
return EncodingType_NONE
}
// The sentiment analysis response message.
type AnalyzeSentimentResponse struct {
// The overall sentiment of the input document.
DocumentSentiment *Sentiment `protobuf:"bytes,1,opt,name=document_sentiment,json=documentSentiment,proto3" json:"document_sentiment,omitempty"`
// The language of the text, which will be the same as the language specified
// in the request or, if not specified, the automatically-detected language.
// See [Document.language][google.cloud.language.v1beta2.Document.language] field for more details.
Language string `protobuf:"bytes,2,opt,name=language,proto3" json:"language,omitempty"`
// The sentiment for all the sentences in the document.
Sentences []*Sentence `protobuf:"bytes,3,rep,name=sentences,proto3" json:"sentences,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *AnalyzeSentimentResponse) Reset() { *m = AnalyzeSentimentResponse{} }
func (m *AnalyzeSentimentResponse) String() string { return proto.CompactTextString(m) }
func (*AnalyzeSentimentResponse) ProtoMessage() {}
func (*AnalyzeSentimentResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{11}
}
func (m *AnalyzeSentimentResponse) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_AnalyzeSentimentResponse.Unmarshal(m, b)
}
func (m *AnalyzeSentimentResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_AnalyzeSentimentResponse.Marshal(b, m, deterministic)
}
func (dst *AnalyzeSentimentResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_AnalyzeSentimentResponse.Merge(dst, src)
}
func (m *AnalyzeSentimentResponse) XXX_Size() int {
return xxx_messageInfo_AnalyzeSentimentResponse.Size(m)
}
func (m *AnalyzeSentimentResponse) XXX_DiscardUnknown() {
xxx_messageInfo_AnalyzeSentimentResponse.DiscardUnknown(m)
}
var xxx_messageInfo_AnalyzeSentimentResponse proto.InternalMessageInfo
func (m *AnalyzeSentimentResponse) GetDocumentSentiment() *Sentiment {
if m != nil {
return m.DocumentSentiment
}
return nil
}
func (m *AnalyzeSentimentResponse) GetLanguage() string {
if m != nil {
return m.Language
}
return ""
}
func (m *AnalyzeSentimentResponse) GetSentences() []*Sentence {
if m != nil {
return m.Sentences
}
return nil
}
// The entity-level sentiment analysis request message.
type AnalyzeEntitySentimentRequest struct {
// Input document.
Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"`
// The encoding type used by the API to calculate offsets.
EncodingType EncodingType `protobuf:"varint,2,opt,name=encoding_type,json=encodingType,proto3,enum=google.cloud.language.v1beta2.EncodingType" json:"encoding_type,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *AnalyzeEntitySentimentRequest) Reset() { *m = AnalyzeEntitySentimentRequest{} }
func (m *AnalyzeEntitySentimentRequest) String() string { return proto.CompactTextString(m) }
func (*AnalyzeEntitySentimentRequest) ProtoMessage() {}
func (*AnalyzeEntitySentimentRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{12}
}
func (m *AnalyzeEntitySentimentRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_AnalyzeEntitySentimentRequest.Unmarshal(m, b)
}
func (m *AnalyzeEntitySentimentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_AnalyzeEntitySentimentRequest.Marshal(b, m, deterministic)
}
func (dst *AnalyzeEntitySentimentRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_AnalyzeEntitySentimentRequest.Merge(dst, src)
}
func (m *AnalyzeEntitySentimentRequest) XXX_Size() int {
return xxx_messageInfo_AnalyzeEntitySentimentRequest.Size(m)
}
func (m *AnalyzeEntitySentimentRequest) XXX_DiscardUnknown() {
xxx_messageInfo_AnalyzeEntitySentimentRequest.DiscardUnknown(m)
}
var xxx_messageInfo_AnalyzeEntitySentimentRequest proto.InternalMessageInfo
func (m *AnalyzeEntitySentimentRequest) GetDocument() *Document {
if m != nil {
return m.Document
}
return nil
}
func (m *AnalyzeEntitySentimentRequest) GetEncodingType() EncodingType {
if m != nil {
return m.EncodingType
}
return EncodingType_NONE
}
// The entity-level sentiment analysis response message.
type AnalyzeEntitySentimentResponse struct {
// The recognized entities in the input document with associated sentiments.
Entities []*Entity `protobuf:"bytes,1,rep,name=entities,proto3" json:"entities,omitempty"`
// The language of the text, which will be the same as the language specified
// in the request or, if not specified, the automatically-detected language.
// See [Document.language][google.cloud.language.v1beta2.Document.language] field for more details.
Language string `protobuf:"bytes,2,opt,name=language,proto3" json:"language,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *AnalyzeEntitySentimentResponse) Reset() { *m = AnalyzeEntitySentimentResponse{} }
func (m *AnalyzeEntitySentimentResponse) String() string { return proto.CompactTextString(m) }
func (*AnalyzeEntitySentimentResponse) ProtoMessage() {}
func (*AnalyzeEntitySentimentResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{13}
}
func (m *AnalyzeEntitySentimentResponse) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_AnalyzeEntitySentimentResponse.Unmarshal(m, b)
}
func (m *AnalyzeEntitySentimentResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_AnalyzeEntitySentimentResponse.Marshal(b, m, deterministic)
}
func (dst *AnalyzeEntitySentimentResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_AnalyzeEntitySentimentResponse.Merge(dst, src)
}
func (m *AnalyzeEntitySentimentResponse) XXX_Size() int {
return xxx_messageInfo_AnalyzeEntitySentimentResponse.Size(m)
}
func (m *AnalyzeEntitySentimentResponse) XXX_DiscardUnknown() {
xxx_messageInfo_AnalyzeEntitySentimentResponse.DiscardUnknown(m)
}
var xxx_messageInfo_AnalyzeEntitySentimentResponse proto.InternalMessageInfo
func (m *AnalyzeEntitySentimentResponse) GetEntities() []*Entity {
if m != nil {
return m.Entities
}
return nil
}
func (m *AnalyzeEntitySentimentResponse) GetLanguage() string {
if m != nil {
return m.Language
}
return ""
}
// The entity analysis request message.
type AnalyzeEntitiesRequest struct {
// Input document.
Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"`
// The encoding type used by the API to calculate offsets.
EncodingType EncodingType `protobuf:"varint,2,opt,name=encoding_type,json=encodingType,proto3,enum=google.cloud.language.v1beta2.EncodingType" json:"encoding_type,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *AnalyzeEntitiesRequest) Reset() { *m = AnalyzeEntitiesRequest{} }
func (m *AnalyzeEntitiesRequest) String() string { return proto.CompactTextString(m) }
func (*AnalyzeEntitiesRequest) ProtoMessage() {}
func (*AnalyzeEntitiesRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{14}
}
func (m *AnalyzeEntitiesRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_AnalyzeEntitiesRequest.Unmarshal(m, b)
}
func (m *AnalyzeEntitiesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_AnalyzeEntitiesRequest.Marshal(b, m, deterministic)
}
func (dst *AnalyzeEntitiesRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_AnalyzeEntitiesRequest.Merge(dst, src)
}
func (m *AnalyzeEntitiesRequest) XXX_Size() int {
return xxx_messageInfo_AnalyzeEntitiesRequest.Size(m)
}
func (m *AnalyzeEntitiesRequest) XXX_DiscardUnknown() {
xxx_messageInfo_AnalyzeEntitiesRequest.DiscardUnknown(m)
}
var xxx_messageInfo_AnalyzeEntitiesRequest proto.InternalMessageInfo
func (m *AnalyzeEntitiesRequest) GetDocument() *Document {
if m != nil {
return m.Document
}
return nil
}
func (m *AnalyzeEntitiesRequest) GetEncodingType() EncodingType {
if m != nil {
return m.EncodingType
}
return EncodingType_NONE
}
// The entity analysis response message.
type AnalyzeEntitiesResponse struct {
// The recognized entities in the input document.
Entities []*Entity `protobuf:"bytes,1,rep,name=entities,proto3" json:"entities,omitempty"`
// The language of the text, which will be the same as the language specified
// in the request or, if not specified, the automatically-detected language.
// See [Document.language][google.cloud.language.v1beta2.Document.language] field for more details.
Language string `protobuf:"bytes,2,opt,name=language,proto3" json:"language,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *AnalyzeEntitiesResponse) Reset() { *m = AnalyzeEntitiesResponse{} }
func (m *AnalyzeEntitiesResponse) String() string { return proto.CompactTextString(m) }
func (*AnalyzeEntitiesResponse) ProtoMessage() {}
func (*AnalyzeEntitiesResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{15}
}
func (m *AnalyzeEntitiesResponse) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_AnalyzeEntitiesResponse.Unmarshal(m, b)
}
func (m *AnalyzeEntitiesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_AnalyzeEntitiesResponse.Marshal(b, m, deterministic)
}
func (dst *AnalyzeEntitiesResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_AnalyzeEntitiesResponse.Merge(dst, src)
}
func (m *AnalyzeEntitiesResponse) XXX_Size() int {
return xxx_messageInfo_AnalyzeEntitiesResponse.Size(m)
}
func (m *AnalyzeEntitiesResponse) XXX_DiscardUnknown() {
xxx_messageInfo_AnalyzeEntitiesResponse.DiscardUnknown(m)
}
var xxx_messageInfo_AnalyzeEntitiesResponse proto.InternalMessageInfo
func (m *AnalyzeEntitiesResponse) GetEntities() []*Entity {
if m != nil {
return m.Entities
}
return nil
}
func (m *AnalyzeEntitiesResponse) GetLanguage() string {
if m != nil {
return m.Language
}
return ""
}
// The syntax analysis request message.
type AnalyzeSyntaxRequest struct {
// Input document.
Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"`
// The encoding type used by the API to calculate offsets.
EncodingType EncodingType `protobuf:"varint,2,opt,name=encoding_type,json=encodingType,proto3,enum=google.cloud.language.v1beta2.EncodingType" json:"encoding_type,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *AnalyzeSyntaxRequest) Reset() { *m = AnalyzeSyntaxRequest{} }
func (m *AnalyzeSyntaxRequest) String() string { return proto.CompactTextString(m) }
func (*AnalyzeSyntaxRequest) ProtoMessage() {}
func (*AnalyzeSyntaxRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{16}
}
func (m *AnalyzeSyntaxRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_AnalyzeSyntaxRequest.Unmarshal(m, b)
}
func (m *AnalyzeSyntaxRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_AnalyzeSyntaxRequest.Marshal(b, m, deterministic)
}
func (dst *AnalyzeSyntaxRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_AnalyzeSyntaxRequest.Merge(dst, src)
}
func (m *AnalyzeSyntaxRequest) XXX_Size() int {
return xxx_messageInfo_AnalyzeSyntaxRequest.Size(m)
}
func (m *AnalyzeSyntaxRequest) XXX_DiscardUnknown() {
xxx_messageInfo_AnalyzeSyntaxRequest.DiscardUnknown(m)
}
var xxx_messageInfo_AnalyzeSyntaxRequest proto.InternalMessageInfo
func (m *AnalyzeSyntaxRequest) GetDocument() *Document {
if m != nil {
return m.Document
}
return nil
}
func (m *AnalyzeSyntaxRequest) GetEncodingType() EncodingType {
if m != nil {
return m.EncodingType
}
return EncodingType_NONE
}
// The syntax analysis response message.
type AnalyzeSyntaxResponse struct {
// Sentences in the input document.
Sentences []*Sentence `protobuf:"bytes,1,rep,name=sentences,proto3" json:"sentences,omitempty"`
// Tokens, along with their syntactic information, in the input document.
Tokens []*Token `protobuf:"bytes,2,rep,name=tokens,proto3" json:"tokens,omitempty"`
// The language of the text, which will be the same as the language specified
// in the request or, if not specified, the automatically-detected language.
// See [Document.language][google.cloud.language.v1beta2.Document.language] field for more details.
Language string `protobuf:"bytes,3,opt,name=language,proto3" json:"language,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *AnalyzeSyntaxResponse) Reset() { *m = AnalyzeSyntaxResponse{} }
func (m *AnalyzeSyntaxResponse) String() string { return proto.CompactTextString(m) }
func (*AnalyzeSyntaxResponse) ProtoMessage() {}
func (*AnalyzeSyntaxResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{17}
}
func (m *AnalyzeSyntaxResponse) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_AnalyzeSyntaxResponse.Unmarshal(m, b)
}
func (m *AnalyzeSyntaxResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_AnalyzeSyntaxResponse.Marshal(b, m, deterministic)
}
func (dst *AnalyzeSyntaxResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_AnalyzeSyntaxResponse.Merge(dst, src)
}
func (m *AnalyzeSyntaxResponse) XXX_Size() int {
return xxx_messageInfo_AnalyzeSyntaxResponse.Size(m)
}
func (m *AnalyzeSyntaxResponse) XXX_DiscardUnknown() {
xxx_messageInfo_AnalyzeSyntaxResponse.DiscardUnknown(m)
}
var xxx_messageInfo_AnalyzeSyntaxResponse proto.InternalMessageInfo
func (m *AnalyzeSyntaxResponse) GetSentences() []*Sentence {
if m != nil {
return m.Sentences
}
return nil
}
func (m *AnalyzeSyntaxResponse) GetTokens() []*Token {
if m != nil {
return m.Tokens
}
return nil
}
func (m *AnalyzeSyntaxResponse) GetLanguage() string {
if m != nil {
return m.Language
}
return ""
}
// The document classification request message.
type ClassifyTextRequest struct {
// Input document.
Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ClassifyTextRequest) Reset() { *m = ClassifyTextRequest{} }
func (m *ClassifyTextRequest) String() string { return proto.CompactTextString(m) }
func (*ClassifyTextRequest) ProtoMessage() {}
func (*ClassifyTextRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{18}
}
func (m *ClassifyTextRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ClassifyTextRequest.Unmarshal(m, b)
}
func (m *ClassifyTextRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ClassifyTextRequest.Marshal(b, m, deterministic)
}
func (dst *ClassifyTextRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_ClassifyTextRequest.Merge(dst, src)
}
func (m *ClassifyTextRequest) XXX_Size() int {
return xxx_messageInfo_ClassifyTextRequest.Size(m)
}
func (m *ClassifyTextRequest) XXX_DiscardUnknown() {
xxx_messageInfo_ClassifyTextRequest.DiscardUnknown(m)
}
var xxx_messageInfo_ClassifyTextRequest proto.InternalMessageInfo
func (m *ClassifyTextRequest) GetDocument() *Document {
if m != nil {
return m.Document
}
return nil
}
// The document classification response message.
type ClassifyTextResponse struct {
// Categories representing the input document.
Categories []*ClassificationCategory `protobuf:"bytes,1,rep,name=categories,proto3" json:"categories,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ClassifyTextResponse) Reset() { *m = ClassifyTextResponse{} }
func (m *ClassifyTextResponse) String() string { return proto.CompactTextString(m) }
func (*ClassifyTextResponse) ProtoMessage() {}
func (*ClassifyTextResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{19}
}
func (m *ClassifyTextResponse) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ClassifyTextResponse.Unmarshal(m, b)
}
func (m *ClassifyTextResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ClassifyTextResponse.Marshal(b, m, deterministic)
}
func (dst *ClassifyTextResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_ClassifyTextResponse.Merge(dst, src)
}
func (m *ClassifyTextResponse) XXX_Size() int {
return xxx_messageInfo_ClassifyTextResponse.Size(m)
}
func (m *ClassifyTextResponse) XXX_DiscardUnknown() {
xxx_messageInfo_ClassifyTextResponse.DiscardUnknown(m)
}
var xxx_messageInfo_ClassifyTextResponse proto.InternalMessageInfo
func (m *ClassifyTextResponse) GetCategories() []*ClassificationCategory {
if m != nil {
return m.Categories
}
return nil
}
// The request message for the text annotation API, which can perform multiple
// analysis types (sentiment, entities, and syntax) in one call.
type AnnotateTextRequest struct {
// Input document.
Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"`
// The enabled features.
Features *AnnotateTextRequest_Features `protobuf:"bytes,2,opt,name=features,proto3" json:"features,omitempty"`
// The encoding type used by the API to calculate offsets.
EncodingType EncodingType `protobuf:"varint,3,opt,name=encoding_type,json=encodingType,proto3,enum=google.cloud.language.v1beta2.EncodingType" json:"encoding_type,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *AnnotateTextRequest) Reset() { *m = AnnotateTextRequest{} }
func (m *AnnotateTextRequest) String() string { return proto.CompactTextString(m) }
func (*AnnotateTextRequest) ProtoMessage() {}
func (*AnnotateTextRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{20}
}
func (m *AnnotateTextRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_AnnotateTextRequest.Unmarshal(m, b)
}
func (m *AnnotateTextRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_AnnotateTextRequest.Marshal(b, m, deterministic)
}
func (dst *AnnotateTextRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_AnnotateTextRequest.Merge(dst, src)
}
func (m *AnnotateTextRequest) XXX_Size() int {
return xxx_messageInfo_AnnotateTextRequest.Size(m)
}
func (m *AnnotateTextRequest) XXX_DiscardUnknown() {
xxx_messageInfo_AnnotateTextRequest.DiscardUnknown(m)
}
var xxx_messageInfo_AnnotateTextRequest proto.InternalMessageInfo
func (m *AnnotateTextRequest) GetDocument() *Document {
if m != nil {
return m.Document
}
return nil
}
func (m *AnnotateTextRequest) GetFeatures() *AnnotateTextRequest_Features {
if m != nil {
return m.Features
}
return nil
}
func (m *AnnotateTextRequest) GetEncodingType() EncodingType {
if m != nil {
return m.EncodingType
}
return EncodingType_NONE
}
// All available features for sentiment, syntax, and semantic analysis.
// Setting each one to true will enable that specific analysis for the input.
type AnnotateTextRequest_Features struct {
// Extract syntax information.
ExtractSyntax bool `protobuf:"varint,1,opt,name=extract_syntax,json=extractSyntax,proto3" json:"extract_syntax,omitempty"`
// Extract entities.
ExtractEntities bool `protobuf:"varint,2,opt,name=extract_entities,json=extractEntities,proto3" json:"extract_entities,omitempty"`
// Extract document-level sentiment.
ExtractDocumentSentiment bool `protobuf:"varint,3,opt,name=extract_document_sentiment,json=extractDocumentSentiment,proto3" json:"extract_document_sentiment,omitempty"`
// Extract entities and their associated sentiment.
ExtractEntitySentiment bool `protobuf:"varint,4,opt,name=extract_entity_sentiment,json=extractEntitySentiment,proto3" json:"extract_entity_sentiment,omitempty"`
// Classify the full document into categories.
ClassifyText bool `protobuf:"varint,6,opt,name=classify_text,json=classifyText,proto3" json:"classify_text,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *AnnotateTextRequest_Features) Reset() { *m = AnnotateTextRequest_Features{} }
func (m *AnnotateTextRequest_Features) String() string { return proto.CompactTextString(m) }
func (*AnnotateTextRequest_Features) ProtoMessage() {}
func (*AnnotateTextRequest_Features) Descriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{20, 0}
}
func (m *AnnotateTextRequest_Features) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_AnnotateTextRequest_Features.Unmarshal(m, b)
}
func (m *AnnotateTextRequest_Features) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_AnnotateTextRequest_Features.Marshal(b, m, deterministic)
}
func (dst *AnnotateTextRequest_Features) XXX_Merge(src proto.Message) {
xxx_messageInfo_AnnotateTextRequest_Features.Merge(dst, src)
}
func (m *AnnotateTextRequest_Features) XXX_Size() int {
return xxx_messageInfo_AnnotateTextRequest_Features.Size(m)
}
func (m *AnnotateTextRequest_Features) XXX_DiscardUnknown() {
xxx_messageInfo_AnnotateTextRequest_Features.DiscardUnknown(m)
}
var xxx_messageInfo_AnnotateTextRequest_Features proto.InternalMessageInfo
func (m *AnnotateTextRequest_Features) GetExtractSyntax() bool {
if m != nil {
return m.ExtractSyntax
}
return false
}
func (m *AnnotateTextRequest_Features) GetExtractEntities() bool {
if m != nil {
return m.ExtractEntities
}
return false
}
func (m *AnnotateTextRequest_Features) GetExtractDocumentSentiment() bool {
if m != nil {
return m.ExtractDocumentSentiment
}
return false
}
func (m *AnnotateTextRequest_Features) GetExtractEntitySentiment() bool {
if m != nil {
return m.ExtractEntitySentiment
}
return false
}
func (m *AnnotateTextRequest_Features) GetClassifyText() bool {
if m != nil {
return m.ClassifyText
}
return false
}
// The text annotations response message.
type AnnotateTextResponse struct {
// Sentences in the input document. Populated if the user enables
// [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_syntax].
Sentences []*Sentence `protobuf:"bytes,1,rep,name=sentences,proto3" json:"sentences,omitempty"`
// Tokens, along with their syntactic information, in the input document.
// Populated if the user enables
// [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_syntax].
Tokens []*Token `protobuf:"bytes,2,rep,name=tokens,proto3" json:"tokens,omitempty"`
// Entities, along with their semantic information, in the input document.
// Populated if the user enables
// [AnnotateTextRequest.Features.extract_entities][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entities].
Entities []*Entity `protobuf:"bytes,3,rep,name=entities,proto3" json:"entities,omitempty"`
// The overall sentiment for the document. Populated if the user enables
// [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment].
DocumentSentiment *Sentiment `protobuf:"bytes,4,opt,name=document_sentiment,json=documentSentiment,proto3" json:"document_sentiment,omitempty"`
// The language of the text, which will be the same as the language specified
// in the request or, if not specified, the automatically-detected language.
// See [Document.language][google.cloud.language.v1beta2.Document.language] field for more details.
Language string `protobuf:"bytes,5,opt,name=language,proto3" json:"language,omitempty"`
// Categories identified in the input document.
Categories []*ClassificationCategory `protobuf:"bytes,6,rep,name=categories,proto3" json:"categories,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *AnnotateTextResponse) Reset() { *m = AnnotateTextResponse{} }
func (m *AnnotateTextResponse) String() string { return proto.CompactTextString(m) }
func (*AnnotateTextResponse) ProtoMessage() {}
func (*AnnotateTextResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_language_service_62b42b651f43d67b, []int{21}
}
func (m *AnnotateTextResponse) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_AnnotateTextResponse.Unmarshal(m, b)
}
func (m *AnnotateTextResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_AnnotateTextResponse.Marshal(b, m, deterministic)
}
func (dst *AnnotateTextResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_AnnotateTextResponse.Merge(dst, src)
}
func (m *AnnotateTextResponse) XXX_Size() int {
return xxx_messageInfo_AnnotateTextResponse.Size(m)
}
func (m *AnnotateTextResponse) XXX_DiscardUnknown() {
xxx_messageInfo_AnnotateTextResponse.DiscardUnknown(m)
}
var xxx_messageInfo_AnnotateTextResponse proto.InternalMessageInfo
func (m *AnnotateTextResponse) GetSentences() []*Sentence {
if m != nil {
return m.Sentences
}
return nil
}
func (m *AnnotateTextResponse) GetTokens() []*Token {
if m != nil {
return m.Tokens
}
return nil
}
func (m *AnnotateTextResponse) GetEntities() []*Entity {
if m != nil {
return m.Entities
}
return nil
}
func (m *AnnotateTextResponse) GetDocumentSentiment() *Sentiment {
if m != nil {
return m.DocumentSentiment
}
return nil
}
func (m *AnnotateTextResponse) GetLanguage() string {
if m != nil {
return m.Language
}
return ""
}
func (m *AnnotateTextResponse) GetCategories() []*ClassificationCategory {
if m != nil {
return m.Categories
}
return nil
}
func init() {
proto.RegisterType((*Document)(nil), "google.cloud.language.v1beta2.Document")
proto.RegisterType((*Sentence)(nil), "google.cloud.language.v1beta2.Sentence")
proto.RegisterType((*Entity)(nil), "google.cloud.language.v1beta2.Entity")
proto.RegisterMapType((map[string]string)(nil), "google.cloud.language.v1beta2.Entity.MetadataEntry")
proto.RegisterType((*Token)(nil), "google.cloud.language.v1beta2.Token")
proto.RegisterType((*Sentiment)(nil), "google.cloud.language.v1beta2.Sentiment")
proto.RegisterType((*PartOfSpeech)(nil), "google.cloud.language.v1beta2.PartOfSpeech")
proto.RegisterType((*DependencyEdge)(nil), "google.cloud.language.v1beta2.DependencyEdge")
proto.RegisterType((*EntityMention)(nil), "google.cloud.language.v1beta2.EntityMention")
proto.RegisterType((*TextSpan)(nil), "google.cloud.language.v1beta2.TextSpan")
proto.RegisterType((*ClassificationCategory)(nil), "google.cloud.language.v1beta2.ClassificationCategory")
proto.RegisterType((*AnalyzeSentimentRequest)(nil), "google.cloud.language.v1beta2.AnalyzeSentimentRequest")
proto.RegisterType((*AnalyzeSentimentResponse)(nil), "google.cloud.language.v1beta2.AnalyzeSentimentResponse")
proto.RegisterType((*AnalyzeEntitySentimentRequest)(nil), "google.cloud.language.v1beta2.AnalyzeEntitySentimentRequest")
proto.RegisterType((*AnalyzeEntitySentimentResponse)(nil), "google.cloud.language.v1beta2.AnalyzeEntitySentimentResponse")
proto.RegisterType((*AnalyzeEntitiesRequest)(nil), "google.cloud.language.v1beta2.AnalyzeEntitiesRequest")
proto.RegisterType((*AnalyzeEntitiesResponse)(nil), "google.cloud.language.v1beta2.AnalyzeEntitiesResponse")
proto.RegisterType((*AnalyzeSyntaxRequest)(nil), "google.cloud.language.v1beta2.AnalyzeSyntaxRequest")
proto.RegisterType((*AnalyzeSyntaxResponse)(nil), "google.cloud.language.v1beta2.AnalyzeSyntaxResponse")
proto.RegisterType((*ClassifyTextRequest)(nil), "google.cloud.language.v1beta2.ClassifyTextRequest")
proto.RegisterType((*ClassifyTextResponse)(nil), "google.cloud.language.v1beta2.ClassifyTextResponse")
proto.RegisterType((*AnnotateTextRequest)(nil), "google.cloud.language.v1beta2.AnnotateTextRequest")
proto.RegisterType((*AnnotateTextRequest_Features)(nil), "google.cloud.language.v1beta2.AnnotateTextRequest.Features")
proto.RegisterType((*AnnotateTextResponse)(nil), "google.cloud.language.v1beta2.AnnotateTextResponse")
proto.RegisterEnum("google.cloud.language.v1beta2.EncodingType", EncodingType_name, EncodingType_value)
proto.RegisterEnum("google.cloud.language.v1beta2.Document_Type", Document_Type_name, Document_Type_value)
proto.RegisterEnum("google.cloud.language.v1beta2.Entity_Type", Entity_Type_name, Entity_Type_value)
proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Tag", PartOfSpeech_Tag_name, PartOfSpeech_Tag_value)
proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Aspect", PartOfSpeech_Aspect_name, PartOfSpeech_Aspect_value)
proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Case", PartOfSpeech_Case_name, PartOfSpeech_Case_value)
proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Form", PartOfSpeech_Form_name, PartOfSpeech_Form_value)
proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Gender", PartOfSpeech_Gender_name, PartOfSpeech_Gender_value)
proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Mood", PartOfSpeech_Mood_name, PartOfSpeech_Mood_value)
proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Number", PartOfSpeech_Number_name, PartOfSpeech_Number_value)
proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Person", PartOfSpeech_Person_name, PartOfSpeech_Person_value)
proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Proper", PartOfSpeech_Proper_name, PartOfSpeech_Proper_value)
proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Reciprocity", PartOfSpeech_Reciprocity_name, PartOfSpeech_Reciprocity_value)
proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Tense", PartOfSpeech_Tense_name, PartOfSpeech_Tense_value)
proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Voice", PartOfSpeech_Voice_name, PartOfSpeech_Voice_value)
proto.RegisterEnum("google.cloud.language.v1beta2.DependencyEdge_Label", DependencyEdge_Label_name, DependencyEdge_Label_value)
proto.RegisterEnum("google.cloud.language.v1beta2.EntityMention_Type", EntityMention_Type_name, EntityMention_Type_value)
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConn
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion4
// LanguageServiceClient is the client API for LanguageService service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type LanguageServiceClient interface {
// Analyzes the sentiment of the provided text.
AnalyzeSentiment(ctx context.Context, in *AnalyzeSentimentRequest, opts ...grpc.CallOption) (*AnalyzeSentimentResponse, error)
// Finds named entities (currently proper names and common nouns) in the text
// along with entity types, salience, mentions for each entity, and
// other properties.
AnalyzeEntities(ctx context.Context, in *AnalyzeEntitiesRequest, opts ...grpc.CallOption) (*AnalyzeEntitiesResponse, error)
// Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes
// sentiment associated with each entity and its mentions.
AnalyzeEntitySentiment(ctx context.Context, in *AnalyzeEntitySentimentRequest, opts ...grpc.CallOption) (*AnalyzeEntitySentimentResponse, error)
// Analyzes the syntax of the text and provides sentence boundaries and
// tokenization along with part of speech tags, dependency trees, and other
// properties.
AnalyzeSyntax(ctx context.Context, in *AnalyzeSyntaxRequest, opts ...grpc.CallOption) (*AnalyzeSyntaxResponse, error)
// Classifies a document into categories.
ClassifyText(ctx context.Context, in *ClassifyTextRequest, opts ...grpc.CallOption) (*ClassifyTextResponse, error)
// A convenience method that provides all syntax, sentiment, entity, and
// classification features in one call.
AnnotateText(ctx context.Context, in *AnnotateTextRequest, opts ...grpc.CallOption) (*AnnotateTextResponse, error)
}
type languageServiceClient struct {
cc *grpc.ClientConn
}
func NewLanguageServiceClient(cc *grpc.ClientConn) LanguageServiceClient {
return &languageServiceClient{cc}
}
func (c *languageServiceClient) AnalyzeSentiment(ctx context.Context, in *AnalyzeSentimentRequest, opts ...grpc.CallOption) (*AnalyzeSentimentResponse, error) {
out := new(AnalyzeSentimentResponse)
err := c.cc.Invoke(ctx, "/google.cloud.language.v1beta2.LanguageService/AnalyzeSentiment", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *languageServiceClient) AnalyzeEntities(ctx context.Context, in *AnalyzeEntitiesRequest, opts ...grpc.CallOption) (*AnalyzeEntitiesResponse, error) {
out := new(AnalyzeEntitiesResponse)
err := c.cc.Invoke(ctx, "/google.cloud.language.v1beta2.LanguageService/AnalyzeEntities", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *languageServiceClient) AnalyzeEntitySentiment(ctx context.Context, in *AnalyzeEntitySentimentRequest, opts ...grpc.CallOption) (*AnalyzeEntitySentimentResponse, error) {
out := new(AnalyzeEntitySentimentResponse)
err := c.cc.Invoke(ctx, "/google.cloud.language.v1beta2.LanguageService/AnalyzeEntitySentiment", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *languageServiceClient) AnalyzeSyntax(ctx context.Context, in *AnalyzeSyntaxRequest, opts ...grpc.CallOption) (*AnalyzeSyntaxResponse, error) {
out := new(AnalyzeSyntaxResponse)
err := c.cc.Invoke(ctx, "/google.cloud.language.v1beta2.LanguageService/AnalyzeSyntax", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *languageServiceClient) ClassifyText(ctx context.Context, in *ClassifyTextRequest, opts ...grpc.CallOption) (*ClassifyTextResponse, error) {
out := new(ClassifyTextResponse)
err := c.cc.Invoke(ctx, "/google.cloud.language.v1beta2.LanguageService/ClassifyText", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *languageServiceClient) AnnotateText(ctx context.Context, in *AnnotateTextRequest, opts ...grpc.CallOption) (*AnnotateTextResponse, error) {
out := new(AnnotateTextResponse)
err := c.cc.Invoke(ctx, "/google.cloud.language.v1beta2.LanguageService/AnnotateText", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// LanguageServiceServer is the server API for LanguageService service.
type LanguageServiceServer interface {
// Analyzes the sentiment of the provided text.
AnalyzeSentiment(context.Context, *AnalyzeSentimentRequest) (*AnalyzeSentimentResponse, error)
// Finds named entities (currently proper names and common nouns) in the text
// along with entity types, salience, mentions for each entity, and
// other properties.
AnalyzeEntities(context.Context, *AnalyzeEntitiesRequest) (*AnalyzeEntitiesResponse, error)
// Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes
// sentiment associated with each entity and its mentions.
AnalyzeEntitySentiment(context.Context, *AnalyzeEntitySentimentRequest) (*AnalyzeEntitySentimentResponse, error)
// Analyzes the syntax of the text and provides sentence boundaries and
// tokenization along with part of speech tags, dependency trees, and other
// properties.
AnalyzeSyntax(context.Context, *AnalyzeSyntaxRequest) (*AnalyzeSyntaxResponse, error)
// Classifies a document into categories.
ClassifyText(context.Context, *ClassifyTextRequest) (*ClassifyTextResponse, error)
// A convenience method that provides all syntax, sentiment, entity, and
// classification features in one call.
AnnotateText(context.Context, *AnnotateTextRequest) (*AnnotateTextResponse, error)
}
func RegisterLanguageServiceServer(s *grpc.Server, srv LanguageServiceServer) {
s.RegisterService(&_LanguageService_serviceDesc, srv)
}
func _LanguageService_AnalyzeSentiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(AnalyzeSentimentRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(LanguageServiceServer).AnalyzeSentiment(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/google.cloud.language.v1beta2.LanguageService/AnalyzeSentiment",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(LanguageServiceServer).AnalyzeSentiment(ctx, req.(*AnalyzeSentimentRequest))
}
return interceptor(ctx, in, info, handler)
}
func _LanguageService_AnalyzeEntities_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(AnalyzeEntitiesRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(LanguageServiceServer).AnalyzeEntities(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/google.cloud.language.v1beta2.LanguageService/AnalyzeEntities",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(LanguageServiceServer).AnalyzeEntities(ctx, req.(*AnalyzeEntitiesRequest))
}
return interceptor(ctx, in, info, handler)
}
func _LanguageService_AnalyzeEntitySentiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(AnalyzeEntitySentimentRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(LanguageServiceServer).AnalyzeEntitySentiment(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/google.cloud.language.v1beta2.LanguageService/AnalyzeEntitySentiment",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(LanguageServiceServer).AnalyzeEntitySentiment(ctx, req.(*AnalyzeEntitySentimentRequest))
}
return interceptor(ctx, in, info, handler)
}
func _LanguageService_AnalyzeSyntax_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(AnalyzeSyntaxRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(LanguageServiceServer).AnalyzeSyntax(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/google.cloud.language.v1beta2.LanguageService/AnalyzeSyntax",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(LanguageServiceServer).AnalyzeSyntax(ctx, req.(*AnalyzeSyntaxRequest))
}
return interceptor(ctx, in, info, handler)
}
func _LanguageService_ClassifyText_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ClassifyTextRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(LanguageServiceServer).ClassifyText(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/google.cloud.language.v1beta2.LanguageService/ClassifyText",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(LanguageServiceServer).ClassifyText(ctx, req.(*ClassifyTextRequest))
}
return interceptor(ctx, in, info, handler)
}
func _LanguageService_AnnotateText_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(AnnotateTextRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(LanguageServiceServer).AnnotateText(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/google.cloud.language.v1beta2.LanguageService/AnnotateText",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(LanguageServiceServer).AnnotateText(ctx, req.(*AnnotateTextRequest))
}
return interceptor(ctx, in, info, handler)
}
var _LanguageService_serviceDesc = grpc.ServiceDesc{
ServiceName: "google.cloud.language.v1beta2.LanguageService",
HandlerType: (*LanguageServiceServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "AnalyzeSentiment",
Handler: _LanguageService_AnalyzeSentiment_Handler,
},
{
MethodName: "AnalyzeEntities",
Handler: _LanguageService_AnalyzeEntities_Handler,
},
{
MethodName: "AnalyzeEntitySentiment",
Handler: _LanguageService_AnalyzeEntitySentiment_Handler,
},
{
MethodName: "AnalyzeSyntax",
Handler: _LanguageService_AnalyzeSyntax_Handler,
},
{
MethodName: "ClassifyText",
Handler: _LanguageService_ClassifyText_Handler,
},
{
MethodName: "AnnotateText",
Handler: _LanguageService_AnnotateText_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "google/cloud/language/v1beta2/language_service.proto",
}
func init() {
proto.RegisterFile("google/cloud/language/v1beta2/language_service.proto", fileDescriptor_language_service_62b42b651f43d67b)
}
var fileDescriptor_language_service_62b42b651f43d67b = []byte{
// 3019 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x3a, 0x4b, 0x73, 0xdb, 0xc6,
0xfd, 0x06, 0x5f, 0xa2, 0x96, 0x92, 0xbc, 0x86, 0x1d, 0x9b, 0x7f, 0xfd, 0xf3, 0x70, 0xe0, 0xb8,
0x56, 0xec, 0x44, 0x8a, 0x25, 0xc7, 0x71, 0x6d, 0xe7, 0x01, 0x01, 0x4b, 0x0a, 0x32, 0x09, 0xc0,
0x0b, 0x80, 0x92, 0x7d, 0xe1, 0xc0, 0x24, 0xc4, 0x70, 0x22, 0x02, 0x2c, 0x01, 0x79, 0xac, 0x5e,
0x32, 0xcd, 0x4c, 0x8f, 0x99, 0x1e, 0xf2, 0x11, 0x7a, 0xe8, 0xb4, 0x33, 0x9d, 0xb4, 0xd3, 0x99,
0x4e, 0x7b, 0xe8, 0x27, 0xe8, 0xb1, 0x33, 0xfd, 0x04, 0xfd, 0x00, 0x3d, 0xb6, 0xb7, 0xce, 0x6f,
0x77, 0x41, 0x82, 0xb2, 0x62, 0x89, 0x8e, 0xa7, 0x93, 0xde, 0x76, 0x7f, 0xf8, 0xbd, 0x9f, 0xbb,
0x4b, 0xa2, 0x5b, 0xbd, 0x28, 0xea, 0xed, 0x07, 0x6b, 0x9d, 0xfd, 0xe8, 0xa0, 0xbb, 0xb6, 0xef,
0x87, 0xbd, 0x03, 0xbf, 0x17, 0xac, 0x3d, 0xbd, 0xf9, 0x24, 0x48, 0xfc, 0xf5, 0x31, 0xa0, 0x1d,
0x07, 0xa3, 0xa7, 0xfd, 0x4e, 0xb0, 0x3a, 0x1c, 0x45, 0x49, 0x24, 0xbf, 0xc1, 0xa9, 0x56, 0x19,
0xd5, 0x6a, 0x8a, 0xb4, 0x2a, 0xa8, 0x96, 0x5f, 0x17, 0x4c, 0xfd, 0x61, 0x7f, 0xcd, 0x0f, 0xc3,
0x28, 0xf1, 0x93, 0x7e, 0x14, 0xc6, 0x9c, 0x78, 0xf9, 0x8a, 0xf8, 0xba, 0x1f, 0x85, 0xbd, 0xd1,
0x41, 0x18, 0xf6, 0xc3, 0xde, 0x5a, 0x34, 0x0c, 0x46, 0x53, 0x48, 0x6f, 0x09, 0x24, 0xb6, 0x7b,
0x72, 0xb0, 0xb7, 0x96, 0xf4, 0x07, 0x41, 0x9c, 0xf8, 0x83, 0xa1, 0x40, 0xb8, 0x24, 0x10, 0x46,
0xc3, 0xce, 0x5a, 0x9c, 0xf8, 0xc9, 0x81, 0xa0, 0x54, 0xfe, 0x29, 0xa1, 0xb2, 0x1e, 0x75, 0x0e,
0x06, 0x41, 0x98, 0xc8, 0x9f, 0xa1, 0x42, 0x72, 0x38, 0x0c, 0xaa, 0xd2, 0x65, 0x69, 0x65, 0x69,
0xfd, 0xbd, 0xd5, 0x17, 0xea, 0xbd, 0x9a, 0x92, 0xad, 0xba, 0x87, 0xc3, 0x80, 0x32, 0x4a, 0x79,
0x19, 0xcd, 0x75, 0xa2, 0x30, 0x09, 0xc2, 0xa4, 0x9a, 0xbb, 0x2c, 0xad, 0xcc, 0x6f, 0x9d, 0xa1,
0x29, 0x40, 0x5e, 0x41, 0x67, 0x7b, 0x9d, 0xb8, 0x2d, 0xb6, 0xed, 0x83, 0x51, 0xbf, 0x9a, 0x17,
0x38, 0x8b, 0xbd, 0x4e, 0xac, 0x71, 0xb8, 0x37, 0xea, 0xcb, 0xcb, 0xa8, 0x9c, 0x4a, 0xab, 0x16,
0x00, 0x85, 0x8e, 0xf7, 0xca, 0x6d, 0x54, 0x00, 0x79, 0xf2, 0x05, 0x84, 0xdd, 0x47, 0x36, 0x69,
0x7b, 0xa6, 0x63, 0x13, 0xcd, 0xa8, 0x19, 0x44, 0xc7, 0x67, 0xe4, 0x25, 0x84, 0xec, 0x86, 0x6a,
0x98, 0x6d, 0x97, 0xec, 0xba, 0x58, 0x92, 0xcb, 0xa8, 0xb0, 0xe5, 0x36, 0x1b, 0x38, 0xb7, 0x59,
0x46, 0xa5, 0x38, 0x3a, 0x18, 0x75, 0x02, 0xe5, 0x17, 0x12, 0x2a, 0x3b, 0x01, 0x08, 0xeb, 0x04,
0xf2, 0x3d, 0x54, 0x48, 0x82, 0x67, 0x09, 0x33, 0xb9, 0xb2, 0x7e, 0xed, 0x04, 0x93, 0xdd, 0xe0,
0x59, 0xe2, 0x0c, 0xfd, 0x90, 0x32, 0x22, 0xb9, 0x86, 0xe6, 0xe3, 0x20, 0x04, 0x5f, 0x0b, 0x7b,
0x2b, 0xeb, 0x2b, 0x27, 0x70, 0x70, 0x52, 0x7c, 0x3a, 0x21, 0x55, 0xbe, 0x29, 0xa0, 0x12, 0x09,
0x93, 0x7e, 0x72, 0x28, 0xcb, 0xa8, 0x10, 0xfa, 0x03, 0x1e, 0x82, 0x79, 0xca, 0xd6, 0xf2, 0x27,
0x22, 0x2c, 0x39, 0x16, 0x96, 0xeb, 0x27, 0x48, 0xe0, 0x8c, 0xb2, 0x41, 0xb1, 0x50, 0x79, 0x10,
0x24, 0x7e, 0xd7, 0x4f, 0xfc, 0x6a, 0xfe, 0x72, 0x7e, 0xa5, 0xb2, 0xbe, 0x71, 0x3a, 0x1e, 0x4d,
0x41, 0x45, 0xc2, 0x64, 0x74, 0x48, 0xc7, 0x4c, 0x20, 0x3e, 0xb1, 0xbf, 0xdf, 0x07, 0x07, 0xb2,
0xf8, 0xe4, 0xe8, 0x78, 0x2f, 0x6f, 0x81, 0xb0, 0x90, 0x25, 0x67, 0xb5, 0xc8, 0x84, 0xbd, 0x77,
0x2a, 0x61, 0x4d, 0x4e, 0x44, 0xc7, 0xd4, 0xd3, 0xde, 0x2d, 0xbd, 0xb4, 0x77, 0x97, 0xef, 0xa1,
0xc5, 0x29, 0x43, 0x64, 0x8c, 0xf2, 0x5f, 0x04, 0x87, 0xc2, 0xc5, 0xb0, 0x94, 0x2f, 0xa0, 0xe2,
0x53, 0x7f, 0xff, 0x80, 0xbb, 0x78, 0x9e, 0xf2, 0xcd, 0xdd, 0xdc, 0x1d, 0x49, 0x39, 0x14, 0xe9,
0x56, 0x41, 0x73, 0x9e, 0xf9, 0xc0, 0xb4, 0x76, 0x4c, 0x7c, 0x46, 0x46, 0xa8, 0x64, 0x13, 0xea,
0x58, 0x26, 0x96, 0xe4, 0x05, 0x54, 0x6e, 0x58, 0x9a, 0xea, 0x1a, 0x96, 0x89, 0x73, 0x32, 0x46,
0x0b, 0x16, 0xad, 0xab, 0xa6, 0xf1, 0x98, 0x43, 0xf2, 0xf2, 0x3c, 0x2a, 0x92, 0x16, 0x31, 0x5d,
0x5c, 0x90, 0xcf, 0xa2, 0xca, 0x8e, 0x45, 0x1f, 0xb4, 0xad, 0x5a, 0x5b, 0xa5, 0x2e, 0x2e, 0xca,
0xe7, 0xd0, 0xa2, 0x66, 0x99, 0x8e, 0xd7, 0x24, 0xb4, 0x5d, 0xb7, 0x2c, 0x1d, 0x97, 0x00, 0xdd,
0x72, 0xb7, 0x08, 0xc5, 0x73, 0xca, 0xcf, 0x73, 0xa8, 0xe8, 0x46, 0x5f, 0x04, 0xe1, 0xf7, 0x4b,
0xd2, 0x87, 0x68, 0x69, 0xe8, 0x8f, 0x92, 0x76, 0xb4, 0xd7, 0x8e, 0x87, 0x41, 0xd0, 0xf9, 0x5c,
0x64, 0xea, 0x8d, 0x13, 0xd8, 0xd8, 0xfe, 0x28, 0xb1, 0xf6, 0x1c, 0x46, 0x42, 0x17, 0x86, 0x99,
0x9d, 0xdc, 0x42, 0x67, 0xbb, 0xc1, 0x30, 0x08, 0xbb, 0x41, 0xd8, 0x39, 0x6c, 0x07, 0xdd, 0x5e,
0xc0, 0x2a, 0xb9, 0xb2, 0xfe, 0xfe, 0x49, 0x2d, 0x63, 0x4c, 0x45, 0xba, 0xbd, 0x80, 0x2e, 0x75,
0xa7, 0xf6, 0x10, 0x86, 0xfd, 0x60, 0x30, 0xf0, 0x45, 0xd1, 0xf3, 0x8d, 0xf2, 0x29, 0x9a, 0x1f,
0xc7, 0x55, 0x7e, 0x1d, 0xcd, 0x0f, 0xfc, 0x5e, 0xd8, 0x4f, 0x0e, 0xba, 0x3c, 0x5a, 0x39, 0x3a,
0x01, 0x00, 0x83, 0xb8, 0x13, 0x8d, 0xb8, 0x3a, 0x39, 0xca, 0x37, 0xca, 0x9f, 0xcf, 0xa1, 0x85,
0xac, 0x35, 0xb2, 0x8a, 0xf2, 0x89, 0xdf, 0x13, 0x6d, 0x6e, 0x6d, 0x06, 0x3f, 0xac, 0xba, 0x7e,
0x8f, 0x02, 0xad, 0xbc, 0x8d, 0x4a, 0x7e, 0x3c, 0x0c, 0x3a, 0x89, 0xa8, 0xca, 0xf5, 0x59, 0xb8,
0xa8, 0x8c, 0x92, 0x0a, 0x0e, 0xb2, 0x8e, 0x0a, 0x1d, 0x3f, 0xe6, 0x4a, 0x2f, 0xad, 0x7f, 0x30,
0x0b, 0x27, 0xcd, 0x8f, 0x03, 0xca, 0xa8, 0x81, 0xcb, 0x5e, 0x34, 0x1a, 0x30, 0xdf, 0xcd, 0xc8,
0xa5, 0x16, 0x8d, 0x06, 0x94, 0x51, 0x83, 0x5d, 0x3d, 0x08, 0xc9, 0xa8, 0x5a, 0x9c, 0xdd, 0xae,
0x3a, 0xa3, 0xa4, 0x82, 0x03, 0x68, 0x34, 0x88, 0xa2, 0x2e, 0xab, 0xdd, 0x19, 0x35, 0x6a, 0x46,
0x51, 0x97, 0x32, 0x6a, 0xd0, 0x28, 0x3c, 0x18, 0x3c, 0x09, 0x46, 0xd5, 0xb9, 0xd9, 0x35, 0x32,
0x19, 0x25, 0x15, 0x1c, 0x80, 0xd7, 0x30, 0x18, 0xc5, 0x51, 0x58, 0x2d, 0xcf, 0xce, 0xcb, 0x66,
0x94, 0x54, 0x70, 0x60, 0xbc, 0x46, 0x30, 0x89, 0xab, 0xf3, 0x2f, 0xc1, 0x8b, 0x51, 0x52, 0xc1,
0x41, 0x7e, 0x84, 0x2a, 0xa3, 0xa0, 0xd3, 0x1f, 0x8e, 0xa2, 0x4e, 0x3f, 0x39, 0xac, 0x22, 0xc6,
0xf0, 0xa3, 0x59, 0x18, 0xd2, 0x09, 0x39, 0xcd, 0xf2, 0x92, 0xeb, 0xa8, 0x98, 0x04, 0x61, 0x1c,
0x54, 0x2b, 0x8c, 0xe9, 0xcd, 0x99, 0xb2, 0x1d, 0x08, 0x29, 0xa7, 0x07, 0x46, 0x4f, 0xa3, 0x7e,
0x27, 0xa8, 0x2e, 0xcc, 0xce, 0xa8, 0x05, 0x84, 0x94, 0xd3, 0x2b, 0x5f, 0x4b, 0x28, 0xef, 0xfa,
0xbd, 0xe9, 0x96, 0x3a, 0x87, 0xf2, 0xaa, 0xbe, 0x8d, 0x25, 0xbe, 0xb0, 0x71, 0x8e, 0x2f, 0x5a,
0x38, 0x0f, 0x33, 0x5c, 0xb3, 0xcc, 0x6d, 0x5c, 0x00, 0x90, 0x4e, 0xa0, 0x71, 0x96, 0x51, 0xc1,
0xb4, 0x3c, 0x13, 0x97, 0x00, 0x64, 0x7a, 0x4d, 0x3c, 0x07, 0x20, 0x9b, 0x5a, 0x26, 0x2e, 0x03,
0xc8, 0xa6, 0x2e, 0x9e, 0x87, 0x5e, 0x6a, 0x7b, 0xa6, 0xe6, 0x62, 0x04, 0x5f, 0x5b, 0x84, 0x6e,
0xe2, 0x8a, 0x5c, 0x44, 0xd2, 0x2e, 0x5e, 0x80, 0x6f, 0x6a, 0xad, 0x66, 0xec, 0xe2, 0x45, 0xc5,
0x42, 0x25, 0x5e, 0x90, 0xb2, 0x8c, 0x96, 0x54, 0x38, 0x4d, 0xb8, 0xed, 0x89, 0x62, 0x70, 0xa2,
0x20, 0xb4, 0x46, 0x34, 0xd7, 0x68, 0x11, 0x2c, 0x41, 0x87, 0x37, 0x9a, 0x19, 0x48, 0x0e, 0xda,
0xba, 0x4d, 0xad, 0x3a, 0x25, 0x8e, 0x03, 0x80, 0xbc, 0xf2, 0x2f, 0x09, 0x15, 0xa0, 0x30, 0x01,
0x57, 0x53, 0x1d, 0x32, 0xcd, 0x4d, 0xd5, 0x34, 0xcf, 0x51, 0x05, 0xb7, 0x45, 0x34, 0xaf, 0xea,
0xa0, 0x99, 0xa1, 0x36, 0x70, 0x8e, 0x0f, 0x84, 0xa6, 0xdd, 0x20, 0x4d, 0x62, 0x32, 0x8c, 0x3c,
0xcc, 0x1a, 0x9d, 0x63, 0x17, 0x60, 0xd6, 0xd4, 0x89, 0x69, 0xb0, 0x5d, 0x91, 0x69, 0x62, 0x3a,
0x2e, 0xf5, 0x00, 0x59, 0x6d, 0xe0, 0xd2, 0x64, 0x16, 0xb5, 0x08, 0x9e, 0x03, 0x59, 0xa6, 0xd5,
0x34, 0x4c, 0xbe, 0x2f, 0x83, 0xbf, 0xad, 0xcd, 0x86, 0xf1, 0xd0, 0x23, 0x78, 0x1e, 0x04, 0xdb,
0x2a, 0x75, 0x39, 0x2f, 0x04, 0x82, 0x6d, 0x4a, 0x6c, 0xcb, 0x31, 0x60, 0x6c, 0xa9, 0x0d, 0x5c,
0x01, 0x67, 0x50, 0x52, 0x6b, 0x90, 0x5d, 0xa3, 0x45, 0xda, 0x60, 0x06, 0x5e, 0x00, 0x34, 0x4a,
0x1a, 0x8c, 0x21, 0x07, 0x2d, 0x82, 0xcc, 0x56, 0x2a, 0x73, 0x49, 0xf9, 0x56, 0x42, 0x05, 0xe8,
0x26, 0xa0, 0x5c, 0xcd, 0xa2, 0xcd, 0x8c, 0xe9, 0x0b, 0xa8, 0xac, 0xea, 0xa0, 0x90, 0xda, 0x10,
0x86, 0x7b, 0xbb, 0x46, 0xc3, 0x50, 0xe9, 0x23, 0x9c, 0x03, 0x61, 0x19, 0xc3, 0x1f, 0x13, 0x8a,
0xf3, 0x8c, 0x85, 0x61, 0xaa, 0x8d, 0x36, 0x31, 0x75, 0xc3, 0xac, 0xe3, 0x02, 0xf8, 0xa2, 0x4e,
0xa8, 0x67, 0xea, 0xb8, 0x08, 0x6b, 0x4a, 0xd4, 0x86, 0xe1, 0x70, 0xbb, 0x0d, 0x2a, 0x76, 0x73,
0x10, 0x5a, 0x67, 0xcb, 0xa2, 0x2e, 0x2e, 0x43, 0xd8, 0x1b, 0x96, 0x59, 0xe7, 0xb9, 0x60, 0x51,
0x9d, 0x50, 0x8c, 0x00, 0x5b, 0x1c, 0x19, 0x35, 0x5c, 0x51, 0x08, 0x2a, 0xf1, 0xb6, 0x05, 0x3a,
0xd4, 0x89, 0xa9, 0x13, 0x3a, 0xad, 0x74, 0x8d, 0x34, 0x0d, 0xd3, 0x30, 0x45, 0xb4, 0x9a, 0xaa,
0xa3, 0x79, 0x0d, 0xd8, 0xe6, 0x40, 0x05, 0x93, 0x78, 0x2e, 0x28, 0xab, 0x7c, 0x89, 0x0a, 0xd0,
0xb3, 0x40, 0xe9, 0xa6, 0x65, 0xe9, 0x19, 0x16, 0x17, 0x10, 0xd6, 0x2c, 0x53, 0x17, 0x8e, 0x6d,
0xc3, 0x57, 0x2c, 0x41, 0x70, 0x58, 0x1a, 0xa9, 0x22, 0x89, 0x60, 0x6f, 0xea, 0x86, 0x70, 0x64,
0x1e, 0x3c, 0x6d, 0x98, 0x2e, 0xa1, 0xd4, 0xaa, 0xa7, 0xd1, 0xaf, 0xa0, 0xb9, 0x6d, 0x8f, 0xe7,
0x58, 0x11, 0x92, 0xce, 0xf1, 0x36, 0xb7, 0x21, 0xbd, 0x01, 0x50, 0x52, 0x3e, 0x43, 0x25, 0xde,
0xec, 0xc0, 0x0e, 0xd3, 0x6b, 0x6e, 0x1e, 0xb5, 0xc3, 0x31, 0xcc, 0xba, 0xd7, 0x50, 0x29, 0x96,
0xd8, 0xf9, 0xa5, 0xe1, 0x51, 0x96, 0x72, 0x65, 0x54, 0xd0, 0x3d, 0xb5, 0x81, 0xf3, 0x8a, 0x8b,
0x4a, 0xbc, 0xc5, 0x01, 0x07, 0x7e, 0xbe, 0xc9, 0x70, 0x98, 0x47, 0xc5, 0x9a, 0x41, 0x1d, 0x97,
0x93, 0x3b, 0x04, 0x6c, 0xc2, 0x39, 0x00, 0xbb, 0x5b, 0x06, 0xd5, 0x71, 0x1e, 0x0c, 0x9d, 0x24,
0x8c, 0x38, 0x1f, 0x15, 0x94, 0x3b, 0xa8, 0xc4, 0x9b, 0x1d, 0xe3, 0x4a, 0x2d, 0x7b, 0x4a, 0x2f,
0xd0, 0x84, 0xc1, 0xb8, 0x4b, 0x4c, 0xcb, 0x6d, 0x8b, 0x7d, 0x4e, 0xd9, 0x46, 0x95, 0x4c, 0x57,
0x93, 0x2f, 0xa1, 0xf3, 0x94, 0x68, 0x86, 0x4d, 0x2d, 0xcd, 0x70, 0x1f, 0x4d, 0xd7, 0x54, 0xfa,
0x81, 0xa5, 0x16, 0xd8, 0x6f, 0x99, 0xed, 0x0c, 0x2c, 0xa7, 0xc4, 0xa8, 0xc8, 0x9a, 0x19, 0xf8,
0xd5, 0x25, 0xe6, 0x54, 0x4d, 0xbe, 0x86, 0xce, 0x65, 0x03, 0xc4, 0x3e, 0x73, 0x2b, 0x6b, 0x9e,
0xeb, 0x51, 0xc2, 0x9d, 0x64, 0xab, 0x8e, 0x8b, 0xf3, 0x10, 0x04, 0x9b, 0x12, 0x87, 0x1f, 0xe8,
0x16, 0xd1, 0xfc, 0xb8, 0x17, 0xe0, 0x22, 0xbf, 0x7c, 0x78, 0xe9, 0xbe, 0xa4, 0x6c, 0xa2, 0x22,
0x6b, 0x7c, 0x20, 0xb4, 0x65, 0x19, 0x1a, 0x99, 0x36, 0x5c, 0xd5, 0x26, 0x4d, 0x40, 0x53, 0xd3,
0x9e, 0x90, 0x63, 0x22, 0xd4, 0xb4, 0x97, 0xfc, 0xbe, 0x8c, 0x96, 0xa6, 0x4f, 0x4d, 0xf2, 0x0a,
0xc2, 0x9f, 0x07, 0x7e, 0xb7, 0x9d, 0xc0, 0xd9, 0xb0, 0xdd, 0x0f, 0xbb, 0xc1, 0x33, 0x76, 0x94,
0x29, 0xd2, 0x25, 0x80, 0xb3, 0x23, 0xa3, 0x01, 0x50, 0xd9, 0x40, 0xc5, 0x7d, 0xff, 0x49, 0xb0,
0x2f, 0xce, 0x28, 0x1b, 0x33, 0x9d, 0xce, 0x56, 0x1b, 0x40, 0x4a, 0x39, 0x07, 0xe5, 0xd7, 0x73,
0xa8, 0xc8, 0x00, 0xcf, 0x9d, 0x84, 0xd5, 0xcd, 0x4d, 0x4a, 0x5a, 0x58, 0x62, 0x2d, 0x15, 0x8a,
0x98, 0x67, 0x85, 0xaa, 0xb7, 0xb4, 0x06, 0xef, 0x5f, 0xaa, 0xde, 0x6a, 0x5a, 0x3a, 0x2e, 0x80,
0x1b, 0x55, 0x58, 0x15, 0x19, 0x82, 0x6d, 0x5b, 0x50, 0xbc, 0x00, 0x74, 0x5d, 0x8a, 0xe7, 0x58,
0xc7, 0xf7, 0x76, 0x79, 0xa7, 0x52, 0xbd, 0x5d, 0x70, 0x02, 0x9e, 0x97, 0x4b, 0x28, 0xa7, 0x69,
0x18, 0x01, 0x89, 0xc6, 0xd8, 0x57, 0xc6, 0x13, 0x81, 0xb5, 0x71, 0x0d, 0xea, 0x00, 0x2f, 0x32,
0x2f, 0xc2, 0x92, 0x91, 0x2d, 0xf1, 0x59, 0x61, 0xe3, 0xb3, 0xe9, 0xd0, 0xc0, 0x80, 0xa0, 0x1b,
0x8e, 0x66, 0x79, 0xd4, 0x21, 0xf8, 0x1c, 0x4b, 0x7c, 0x6b, 0x73, 0x1b, 0xcb, 0xb0, 0x22, 0xbb,
0x76, 0x03, 0x9f, 0x67, 0x0d, 0xd6, 0x22, 0xce, 0x8e, 0xe1, 0x6e, 0xe1, 0x0b, 0x00, 0x37, 0x00,
0xe3, 0x35, 0x58, 0x35, 0x55, 0xfa, 0x00, 0x5f, 0x04, 0x6e, 0xcd, 0x1d, 0x82, 0x2f, 0xf1, 0x45,
0x0b, 0x57, 0xd9, 0x04, 0x22, 0x75, 0xfc, 0x7f, 0xa0, 0xa8, 0x69, 0xe2, 0x65, 0x60, 0x62, 0xda,
0xc2, 0xe6, 0xff, 0x07, 0x0d, 0x4d, 0xa6, 0xe1, 0xeb, 0xa0, 0x80, 0x39, 0xd6, 0xf0, 0x8d, 0x74,
0x74, 0xbd, 0xc9, 0xfa, 0x08, 0x2b, 0x58, 0xfc, 0x16, 0x8c, 0x27, 0x1b, 0x5f, 0x16, 0xed, 0x59,
0x75, 0xd5, 0x5d, 0xc3, 0xc1, 0x6f, 0xf3, 0x94, 0xa0, 0x2e, 0x70, 0x54, 0xd8, 0x58, 0x63, 0x8e,
0xb8, 0xc2, 0xf2, 0x12, 0x34, 0x7c, 0x87, 0xaf, 0x1c, 0x07, 0x5f, 0x65, 0xb8, 0x96, 0xe3, 0x82,
0x4e, 0x3f, 0x12, 0xe9, 0xca, 0xb0, 0xaf, 0x8d, 0x37, 0xe6, 0x36, 0x5e, 0xe1, 0x95, 0x47, 0xc0,
0x33, 0xef, 0xf2, 0xd9, 0x49, 0x6a, 0xf8, 0xba, 0x58, 0xd9, 0xf8, 0x06, 0x93, 0x42, 0x2d, 0xb3,
0x81, 0xdf, 0x4b, 0x07, 0xea, 0xfb, 0x60, 0xa1, 0xed, 0xe0, 0x55, 0xb0, 0xf0, 0xa1, 0xa7, 0x9a,
0x4c, 0x9f, 0x35, 0xc0, 0xa4, 0x1a, 0x2c, 0x3f, 0x80, 0x0f, 0x6c, 0x49, 0x49, 0x03, 0xdf, 0x64,
0x1f, 0x74, 0x6a, 0xd9, 0x78, 0x1d, 0x58, 0x80, 0x80, 0x0d, 0xd0, 0x81, 0x92, 0xa6, 0xa9, 0x9a,
0x2e, 0xbe, 0xc5, 0x2b, 0x17, 0xec, 0x34, 0x75, 0xaf, 0x89, 0x3f, 0x04, 0xe9, 0xd4, 0xb2, 0x5c,
0x7c, 0x1b, 0x56, 0x0e, 0x38, 0xe7, 0x23, 0xb6, 0xf2, 0x6a, 0x35, 0x7c, 0x07, 0x56, 0x4c, 0xe2,
0x8f, 0x59, 0xd3, 0xb1, 0x6c, 0x43, 0xc3, 0x77, 0xd9, 0x60, 0x07, 0xe0, 0xbd, 0xa9, 0x41, 0x74,
0x1f, 0x50, 0x76, 0x99, 0xd9, 0x1f, 0xb3, 0x76, 0xe5, 0xb1, 0x59, 0xff, 0x09, 0xa3, 0x34, 0xdc,
0x06, 0xc1, 0x9f, 0xf2, 0x79, 0xd4, 0xb2, 0xb7, 0x80, 0xfa, 0x33, 0x91, 0x72, 0x50, 0x86, 0x58,
0x65, 0xd9, 0xe9, 0xed, 0xb6, 0x5a, 0x78, 0x13, 0x96, 0x3a, 0x93, 0xaa, 0x01, 0x4a, 0xcd, 0xa2,
0xc4, 0xa8, 0x9b, 0x58, 0x07, 0x57, 0x3c, 0xd8, 0xc1, 0x84, 0x4d, 0x18, 0xc3, 0x71, 0x71, 0x8d,
0x9f, 0x49, 0x9a, 0x1a, 0xae, 0xb3, 0x04, 0xb0, 0x9a, 0x3c, 0x2f, 0xb7, 0x60, 0x22, 0xa4, 0x3b,
0x16, 0x78, 0x83, 0x61, 0x7a, 0x4d, 0x0d, 0x6f, 0x83, 0x5b, 0x34, 0xcb, 0xc6, 0x0f, 0xc0, 0x13,
0xba, 0xe1, 0xb0, 0xe1, 0x4d, 0x74, 0xdc, 0x60, 0xa5, 0xe0, 0xd8, 0xb8, 0x09, 0xb8, 0x75, 0x10,
0x6f, 0xb2, 0x15, 0xc4, 0xda, 0x02, 0x83, 0x0c, 0xb3, 0x06, 0x50, 0x9b, 0xa5, 0x21, 0x71, 0xf0,
0x43, 0x96, 0x67, 0xcc, 0x60, 0xaa, 0x7c, 0x9d, 0x43, 0x8b, 0x53, 0x97, 0xea, 0xef, 0x77, 0x81,
0x24, 0x53, 0xcf, 0x0f, 0x37, 0x67, 0xb9, 0xcd, 0x67, 0x5f, 0x21, 0xa6, 0xae, 0xf3, 0xf9, 0x97,
0x7f, 0x2c, 0xf9, 0x40, 0xdc, 0xc8, 0x31, 0x5a, 0x10, 0x0f, 0x40, 0xc7, 0x0d, 0x13, 0x84, 0x4a,
0x9a, 0xd5, 0x6c, 0xc2, 0xa5, 0x5c, 0xa9, 0xa3, 0x72, 0x6a, 0x92, 0x5c, 0x9d, 0x3c, 0x50, 0xf1,
0xfb, 0xff, 0xf8, 0x79, 0xea, 0x6d, 0xb4, 0xf0, 0x24, 0xe8, 0xf5, 0xc3, 0x76, 0xb4, 0xb7, 0x17,
0x07, 0xfc, 0x5e, 0x57, 0xa4, 0x15, 0x06, 0xb3, 0x18, 0x48, 0x69, 0xa0, 0x8b, 0xda, 0xbe, 0x1f,
0xc7, 0xfd, 0xbd, 0x7e, 0x87, 0xbd, 0xbf, 0x69, 0x7e, 0x12, 0xf4, 0xa2, 0xd1, 0xf1, 0xcf, 0x36,
0x6f, 0x22, 0xd4, 0x89, 0xc2, 0xbd, 0x7e, 0x97, 0xbd, 0x93, 0xf0, 0xbb, 0x6a, 0x06, 0xa2, 0xfc,
0x4e, 0x42, 0x97, 0xd4, 0xd0, 0xdf, 0x3f, 0xfc, 0x69, 0x30, 0x31, 0x34, 0xf8, 0xc9, 0x41, 0x10,
0x27, 0xb2, 0x86, 0xca, 0x5d, 0xf1, 0xbc, 0x76, 0xca, 0xa0, 0xa5, 0xaf, 0x71, 0x74, 0x4c, 0x28,
0xdb, 0x68, 0x31, 0x08, 0x3b, 0x51, 0xb7, 0x1f, 0xf6, 0xda, 0x99, 0x08, 0xde, 0x38, 0x31, 0x82,
0x9c, 0x86, 0xc5, 0x6e, 0x21, 0xc8, 0xec, 0x94, 0xbf, 0x4b, 0xa8, 0xfa, 0xbc, 0xca, 0xf1, 0x30,
0x82, 0xd1, 0xba, 0x83, 0xe4, 0x54, 0x74, 0x7b, 0x12, 0x69, 0x69, 0xc6, 0x48, 0x9f, 0x4b, 0x79,
0x4c, 0xee, 0xfc, 0xd9, 0xe7, 0xc0, 0xdc, 0xf4, 0x73, 0xa0, 0x4c, 0x78, 0x56, 0x81, 0x43, 0x63,
0xf1, 0xb8, 0x75, 0xed, 0x14, 0xb2, 0x00, 0x9f, 0x4e, 0x28, 0x95, 0x3f, 0x4a, 0xe8, 0x0d, 0x61,
0x18, 0x4f, 0xe0, 0xff, 0x95, 0x88, 0x7c, 0x89, 0xde, 0xfc, 0x2e, 0xbd, 0x45, 0x58, 0x54, 0x54,
0x06, 0x58, 0xd2, 0x0f, 0xe2, 0xaa, 0xc4, 0x1c, 0x74, 0xf5, 0x54, 0x25, 0x4c, 0xc7, 0x64, 0x2f,
0x0a, 0x00, 0x9c, 0xf8, 0x2f, 0x66, 0x35, 0xe8, 0x07, 0xf1, 0x0f, 0xdc, 0x65, 0xcf, 0xc6, 0x65,
0x37, 0x51, 0xf8, 0xbf, 0xe3, 0xab, 0xdf, 0x4a, 0xe8, 0x42, 0x5a, 0x3e, 0x87, 0x61, 0xe2, 0x3f,
0xfb, 0x81, 0x7b, 0xea, 0x4f, 0x12, 0x7a, 0xed, 0x88, 0xbe, 0xc2, 0x51, 0x53, 0x65, 0x27, 0xbd,
0x6c, 0xd9, 0xc9, 0xf7, 0x51, 0x89, 0x9d, 0x62, 0xe3, 0x6a, 0x8e, 0xf1, 0x78, 0xe7, 0xa4, 0xc9,
0x04, 0xc8, 0x54, 0xd0, 0x4c, 0xb9, 0x3a, 0x7f, 0xc4, 0xd5, 0x8f, 0xd1, 0x79, 0xd1, 0xaa, 0x0f,
0xa1, 0xf7, 0xbf, 0x4a, 0x47, 0x2b, 0x03, 0x74, 0x61, 0x9a, 0xb7, 0x70, 0x8a, 0x87, 0x50, 0x87,
0x0f, 0x84, 0x49, 0xfe, 0x7c, 0x78, 0x02, 0xfb, 0xe3, 0xe7, 0x09, 0xcd, 0x30, 0x52, 0x7e, 0x56,
0x40, 0xe7, 0x55, 0xfe, 0xbb, 0x50, 0xf0, 0xaa, 0x6d, 0x91, 0x77, 0x50, 0x79, 0x2f, 0xf0, 0x93,
0x83, 0x51, 0x10, 0x8b, 0x77, 0xe1, 0x7b, 0x27, 0x30, 0x39, 0x46, 0x95, 0xd5, 0x9a, 0x60, 0x41,
0xc7, 0xcc, 0x9e, 0xcf, 0xc6, 0xfc, 0xf7, 0xcc, 0xc6, 0xe5, 0x7f, 0x4b, 0xa8, 0x9c, 0x0a, 0x92,
0xaf, 0xa2, 0xa5, 0xe0, 0x59, 0x32, 0xf2, 0x3b, 0x49, 0x3b, 0x66, 0xa9, 0xc9, 0x5c, 0x50, 0xa6,
0x8b, 0x02, 0xca, 0xf3, 0x55, 0x7e, 0x17, 0xe1, 0x14, 0x6d, 0x5c, 0xd8, 0x39, 0x86, 0x78, 0x56,
0xc0, 0xd3, 0x1e, 0x20, 0xdf, 0x47, 0xcb, 0x29, 0xea, 0x31, 0x63, 0x2c, 0xcf, 0x88, 0xaa, 0x02,
0x43, 0x7f, 0x6e, 0x46, 0xdd, 0x41, 0xd5, 0x29, 0x41, 0x87, 0x19, 0xda, 0x02, 0xa3, 0xbd, 0x98,
0x15, 0x38, 0xe9, 0xd3, 0xf2, 0x15, 0xb4, 0xd8, 0x11, 0xd9, 0xd4, 0x66, 0x87, 0xb4, 0x12, 0x43,
0x5f, 0xe8, 0x64, 0x52, 0x4c, 0xf9, 0x4d, 0x1e, 0x3a, 0x47, 0xd6, 0xf1, 0x3f, 0xa4, 0x42, 0xcc,
0xb6, 0xcd, 0xfc, 0xcb, 0xb5, 0xcd, 0xe3, 0x0f, 0x0f, 0x85, 0x57, 0x7b, 0x78, 0x28, 0x1e, 0x39,
0x3c, 0x4c, 0x17, 0x6c, 0xe9, 0x15, 0x15, 0xec, 0xf5, 0x3b, 0x68, 0x21, 0x9b, 0xc6, 0xfc, 0x66,
0x60, 0x12, 0x7c, 0x06, 0x56, 0x9e, 0x5b, 0xbb, 0xc3, 0x2f, 0xcb, 0x9e, 0x5b, 0xbb, 0x79, 0x9b,
0x5f, 0x96, 0x3d, 0xb7, 0xb6, 0xb1, 0x8e, 0xf3, 0xeb, 0x7f, 0x29, 0xa3, 0xb3, 0x0d, 0x21, 0xd1,
0xe1, 0xbf, 0x21, 0xcb, 0x7f, 0x90, 0x10, 0x3e, 0x7a, 0xe6, 0x92, 0x6f, 0x9f, 0x58, 0xa4, 0xc7,
0x9e, 0x2b, 0x97, 0x3f, 0x9a, 0x99, 0x8e, 0xe7, 0x99, 0xb2, 0xfa, 0xd5, 0xdf, 0xfe, 0xf1, 0x4d,
0x6e, 0x45, 0xb9, 0x32, 0xfe, 0xb1, 0x3b, 0x75, 0x75, 0x7c, 0xd7, 0x3f, 0x42, 0x74, 0x57, 0xba,
0x2e, 0x7f, 0x2b, 0xa1, 0xb3, 0x47, 0xa6, 0xac, 0xfc, 0xe1, 0xe9, 0x84, 0x1f, 0x39, 0x46, 0x2c,
0xdf, 0x9e, 0x95, 0x4c, 0xa8, 0xfc, 0x3e, 0x53, 0xf9, 0x9a, 0xa2, 0x7c, 0xb7, 0xca, 0x29, 0x0d,
0x68, 0xfc, 0xd7, 0x23, 0x07, 0x99, 0x4c, 0x89, 0xde, 0x9f, 0x41, 0x83, 0xe7, 0x4e, 0x8e, 0xcb,
0x1f, 0xbf, 0x24, 0xb5, 0x30, 0xe3, 0x16, 0x33, 0x63, 0x55, 0x79, 0xf7, 0x04, 0x33, 0x0e, 0xa7,
0xfc, 0xff, 0x2b, 0x09, 0x2d, 0x4e, 0x8d, 0x6e, 0x79, 0xe3, 0x94, 0xa1, 0xcf, 0x1e, 0x4c, 0x96,
0x6f, 0xcd, 0x46, 0x24, 0x54, 0xbe, 0xc1, 0x54, 0xbe, 0xaa, 0x5c, 0x7e, 0x41, 0xb2, 0x30, 0x0a,
0xd0, 0xf4, 0x97, 0x12, 0x5a, 0xc8, 0x8e, 0x53, 0x79, 0xfd, 0x74, 0x15, 0x98, 0x9d, 0xeb, 0xcb,
0x1b, 0x33, 0xd1, 0x08, 0x35, 0xaf, 0x33, 0x35, 0xdf, 0x51, 0xde, 0x3a, 0x46, 0xcd, 0x6c, 0xf7,
0x4d, 0xb5, 0xcc, 0x36, 0xe0, 0x13, 0xb5, 0x3c, 0x66, 0x4c, 0x2e, 0x6f, 0xcc, 0x44, 0x73, 0x0a,
0x2d, 0xfd, 0x0c, 0xc1, 0x5d, 0xe9, 0xfa, 0xe6, 0x57, 0x12, 0x7a, 0xbb, 0x13, 0x0d, 0x5e, 0x2c,
0x66, 0xf3, 0xc2, 0x91, 0x16, 0x63, 0x8f, 0xa2, 0x24, 0xb2, 0xa5, 0xc7, 0x44, 0x90, 0xf5, 0x22,
0x20, 0x59, 0x8d, 0x46, 0xbd, 0xb5, 0x5e, 0x10, 0xb2, 0xff, 0x89, 0xac, 0xf1, 0x4f, 0xfe, 0xb0,
0x1f, 0x7f, 0xc7, 0x9f, 0x5f, 0xee, 0xa5, 0x80, 0x27, 0x25, 0x46, 0xb1, 0xf1, 0x9f, 0x00, 0x00,
0x00, 0xff, 0xff, 0xcc, 0x93, 0x36, 0x44, 0x2d, 0x23, 0x00, 0x00,
}