24
24
import argparse
25
25
26
26
from google .cloud import language
27
+ import six
27
28
28
29
29
30
def sentiment_text (text ):
30
31
"""Detects sentiment in the text."""
31
32
language_client = language .Client ()
32
33
34
+ if isinstance (text , six .binary_type ):
35
+ text = text .decode ('utf-8' )
36
+
33
37
# Instantiates a plain text document.
34
38
document = language_client .document_from_text (text )
35
39
@@ -60,6 +64,9 @@ def entities_text(text):
60
64
"""Detects entities in the text."""
61
65
language_client = language .Client ()
62
66
67
+ if isinstance (text , six .binary_type ):
68
+ text = text .decode ('utf-8' )
69
+
63
70
# Instantiates a plain text document.
64
71
document = language_client .document_from_text (text )
65
72
@@ -69,11 +76,11 @@ def entities_text(text):
69
76
70
77
for entity in entities :
71
78
print ('=' * 20 )
72
- print ('{:<16}: {}' .format ('name' , entity .name ))
73
- print ('{:<16}: {}' .format ('type' , entity .entity_type ))
74
- print ('{:<16}: {}' .format ('metadata' , entity .metadata ))
75
- print ('{:<16}: {}' .format ('salience' , entity .salience ))
76
- print ('{:<16}: {}' .format ('wikipedia_url' ,
79
+ print (u '{:<16}: {}' .format ('name' , entity .name ))
80
+ print (u '{:<16}: {}' .format ('type' , entity .entity_type ))
81
+ print (u '{:<16}: {}' .format ('metadata' , entity .metadata ))
82
+ print (u '{:<16}: {}' .format ('salience' , entity .salience ))
83
+ print (u '{:<16}: {}' .format ('wikipedia_url' ,
77
84
entity .metadata .get ('wikipedia_url' , '-' )))
78
85
79
86
@@ -90,18 +97,21 @@ def entities_file(gcs_uri):
90
97
91
98
for entity in entities :
92
99
print ('=' * 20 )
93
- print ('{:<16}: {}' .format ('name' , entity .name ))
94
- print ('{:<16}: {}' .format ('type' , entity .entity_type ))
95
- print ('{:<16}: {}' .format ('metadata' , entity .metadata ))
96
- print ('{:<16}: {}' .format ('salience' , entity .salience ))
97
- print ('{:<16}: {}' .format ('wikipedia_url' ,
100
+ print (u '{:<16}: {}' .format ('name' , entity .name ))
101
+ print (u '{:<16}: {}' .format ('type' , entity .entity_type ))
102
+ print (u '{:<16}: {}' .format ('metadata' , entity .metadata ))
103
+ print (u '{:<16}: {}' .format ('salience' , entity .salience ))
104
+ print (u '{:<16}: {}' .format ('wikipedia_url' ,
98
105
entity .metadata .get ('wikipedia_url' , '-' )))
99
106
100
107
101
108
def syntax_text (text ):
102
109
"""Detects syntax in the text."""
103
110
language_client = language .Client ()
104
111
112
+ if isinstance (text , six .binary_type ):
113
+ text = text .decode ('utf-8' )
114
+
105
115
# Instantiates a plain text document.
106
116
document = language_client .document_from_text (text )
107
117
@@ -110,7 +120,7 @@ def syntax_text(text):
110
120
tokens = document .analyze_syntax ().tokens
111
121
112
122
for token in tokens :
113
- print ('{}: {}' .format (token .part_of_speech , token .text_content ))
123
+ print (u '{}: {}' .format (token .part_of_speech , token .text_content ))
114
124
115
125
116
126
def syntax_file (gcs_uri ):
@@ -125,7 +135,7 @@ def syntax_file(gcs_uri):
125
135
tokens = document .analyze_syntax ().tokens
126
136
127
137
for token in tokens :
128
- print ('{}: {}' .format (token .part_of_speech , token .text_content ))
138
+ print (u '{}: {}' .format (token .part_of_speech , token .text_content ))
129
139
130
140
131
141
if __name__ == '__main__' :
0 commit comments