Skip to main content

Multilingual Classification

Make multilingual predictions


The Clarifai API supports many languages in addition to English. When making a predict API request, you can pass in the language you would like the concepts returned in.

When you create a new Application, you must specify a default language, which will be the language of the returned concepts, if not specified in the predict request.

Predict By Specific Language

You can predict concepts in a language other than the Application's default, by explicitly passing in the language.

Below is an example of how you would predict concepts in Chinese using Clarifai's general-image-recognition model.

info

The initialization code used in the following examples is outlined in detail on the client installation page.

#########################################################################################################
# In this section, we set the user authentication, user and app ID, model details, URL of the image
# we want as an input, and prediction language. Change these strings to run your own example.
#########################################################################################################

# Your PAT (Personal Access Token) can be found in the Account's Security section
PAT = 'YOUR_PAT_HERE'
# Specify the correct user_id/app_id pairings
# Since you're making inferences outside your app's scope
USER_ID = 'clarifai'
APP_ID = 'main'
# Change these to whatever you want to process
MODEL_ID = 'general-image-recognition'
IMAGE_URL = 'https://samples.clarifai.com/metro-north.jpg'
PREDICT_LANGUAGE = "zh" # Chinese

############################################################################
# YOU DO NOT NEED TO CHANGE ANYTHING BELOW THIS LINE TO RUN THIS EXAMPLE
############################################################################

from clarifai_grpc.channel.clarifai_channel import ClarifaiChannel
from clarifai_grpc.grpc.api import resources_pb2, service_pb2, service_pb2_grpc
from clarifai_grpc.grpc.api.status import status_code_pb2

channel = ClarifaiChannel.get_grpc_channel()
stub = service_pb2_grpc.V2Stub(channel)

metadata = (('authorization', 'Key ' + PAT),)

userDataObject = resources_pb2.UserAppIDSet(user_id=USER_ID, app_id=APP_ID)

post_model_outputs_response = stub.PostModelOutputs(
service_pb2.PostModelOutputsRequest(
user_app_id=userDataObject, # The userDataObject is created in the overview and is required when using a PAT
model_id=MODEL_ID,
inputs=[
resources_pb2.Input(
data=resources_pb2.Data(
image=resources_pb2.Image(
url=IMAGE_URL
)
)
)
],
model=resources_pb2.Model(
output_info=resources_pb2.OutputInfo(
output_config=resources_pb2.OutputConfig(
language=PREDICT_LANGUAGE
)
)
)
),
metadata=metadata
)

if post_model_outputs_response.status.code != status_code_pb2.SUCCESS:
print(post_model_outputs_response.status)
raise Exception("Post model outputs failed, status: " + post_model_outputs_response.status.description)

# Since we have one input, one output will exist here.
output = post_model_outputs_response.outputs[0]

print("Predicted concepts:")
for concept in output.data.concepts:
print("\t%s %.2f" % (concept.name, concept.value))

# Uncomment this line to print the raw output
#print(output)
Text Output Example
Predicted concepts:
铁路列车 1.00
铁路 1.00
地铁 1.00
站 1.00
火车 1.00
运输系统 1.00
旅游 0.99
通勤 0.98
平台 0.98
光 0.97
铁路车站 0.97
模煳 0.97
城市 0.96
马路 0.96
城市的 0.96
交通 0.96
街道 0.95
公共 0.93
有轨电车(工业) 0.93
商业 0.93
Raw Output Example
id: "1b2bda0911ed4a58a70198f495aaf8bc"
status {
code: SUCCESS
description: "Ok"
}
created_at {
seconds: 1701799513
nanos: 885844210
}
model {
id: "general-image-recognition"
name: "Image Recognition"
created_at {
seconds: 1457543499
nanos: 608845000
}
app_id: "main"
model_version {
id: "aa7f35c01e0642fda5cf400f543e7c40"
created_at {
seconds: 1520370624
nanos: 454834000
}
status {
code: MODEL_TRAINED
description: "Model is trained and ready"
}
visibility {
gettable: PUBLIC
}
app_id: "main"
user_id: "clarifai"
metadata {
}
}
user_id: "clarifai"
model_type_id: "visual-classifier"
visibility {
gettable: PUBLIC
}
modified_at {
seconds: 1694180313
nanos: 148401000
}
workflow_recommended {
}
}
input {
id: "d89a929f60584d5a84854ec60f2243ab"
data {
image {
url: "https://samples.clarifai.com/metro-north.jpg"
}
}
}
data {
concepts {
id: "ai_HLmqFqBf"
name: "\351\223\201\350\267\257\345\210\227\350\275\246"
value: 0.9996053576469421
app_id: "main"
}
concepts {
id: "ai_fvlBqXZR"
name: "\351\223\201\350\267\257"
value: 0.9992986917495728
app_id: "main"
}
concepts {
id: "ai_SHNDcmJ3"
name: "\345\234\260\351\223\201"
value: 0.9982585310935974
app_id: "main"
}
concepts {
id: "ai_6kTjGfF6"
name: "\347\253\231"
value: 0.9980133771896362
app_id: "main"
}
concepts {
id: "ai_RRXLczch"
name: "\347\201\253\350\275\246"
value: 0.9972604513168335
app_id: "main"
}
concepts {
id: "ai_Xxjc3MhT"
name: "\350\277\220\350\276\223\347\263\273\347\273\237"
value: 0.9969792366027832
app_id: "main"
}
concepts {
id: "ai_VRmbGVWh"
name: "\346\227\205\346\270\270"
value: 0.9889689683914185
app_id: "main"
}
concepts {
id: "ai_jlb9q33b"
name: "\351\200\232\345\213\244"
value: 0.9809139370918274
app_id: "main"
}
concepts {
id: "ai_2gkfMDsM"
name: "\345\271\263\345\217\260"
value: 0.9806650876998901
app_id: "main"
}
concepts {
id: "ai_n9vjC1jB"
name: "\345\205\211"
value: 0.9741945266723633
app_id: "main"
}
concepts {
id: "ai_sQQj52KZ"
name: "\351\223\201\350\267\257\350\275\246\347\253\231"
value: 0.9688410758972168
app_id: "main"
}
concepts {
id: "ai_l4WckcJN"
name: "\346\250\241\347\205\263"
value: 0.9673133492469788
app_id: "main"
}
concepts {
id: "ai_WBQfVV0p"
name: "\345\237\216\345\270\202"
value: 0.9615091681480408
app_id: "main"
}
concepts {
id: "ai_TZ3C79C6"
name: "\351\251\254\350\267\257"
value: 0.9613693356513977
app_id: "main"
}
concepts {
id: "ai_CpFBRWzD"
name: "\345\237\216\345\270\202\347\232\204"
value: 0.960391640663147
app_id: "main"
}
concepts {
id: "ai_tr0MBp64"
name: "\344\272\244\351\200\232"
value: 0.9599775075912476
app_id: "main"
}
concepts {
id: "ai_GjVpxXrs"
name: "\350\241\227\351\201\223"
value: 0.9475197196006775
app_id: "main"
}
concepts {
id: "ai_mcSHVRfS"
name: "\345\205\254\345\205\261"
value: 0.934360921382904
app_id: "main"
}
concepts {
id: "ai_J6d1kV8t"
name: "\346\234\211\350\275\250\347\224\265\350\275\246\357\274\210\345\267\245\344\270\232\357\274\211"
value: 0.9320586323738098
app_id: "main"
}
concepts {
id: "ai_6lhccv44"
name: "\345\225\206\344\270\232"
value: 0.9294787645339966
app_id: "main"
}
}

Search Concepts in Languages

You can search for concepts in other languages even if the default language of your application is English. When you add inputs to your application, concepts are predicted for every language.

Below is an example of how your would search for '人', which is simplified Chinese for 'people'.

##########################################################################################
# In this section, we set the user authentication, app ID, and concept name and language.
# Change these strings to run your own example.
##########################################################################################

USER_ID = 'YOUR_USER_ID_HERE'
# Your PAT (Personal Access Token) can be found in the Account's Security section
PAT = 'YOUR_PAT_HERE'
APP_ID = 'YOUR_APP_ID_HERE'
# Change these to whatever you want to process
CONCEPT_NAME = '人'
CONCEPT_LANGUAGE = "zh" # Chinese

############################################################################
# YOU DO NOT NEED TO CHANGE ANYTHING BELOW THIS LINE TO RUN THIS EXAMPLE
############################################################################

from clarifai_grpc.channel.clarifai_channel import ClarifaiChannel
from clarifai_grpc.grpc.api import resources_pb2, service_pb2, service_pb2_grpc
from clarifai_grpc.grpc.api.status import status_code_pb2

channel = ClarifaiChannel.get_grpc_channel()
stub = service_pb2_grpc.V2Stub(channel)

metadata = (('authorization', 'Key ' + PAT),)

userDataObject = resources_pb2.UserAppIDSet(user_id=USER_ID, app_id=APP_ID)

post_concepts_searches_response = stub.PostConceptsSearches(
service_pb2.PostConceptsSearchesRequest(
user_app_id=userDataObject, # The userDataObject is created in the overview and is required when using a PAT
concept_query=resources_pb2.ConceptQuery(
name=CONCEPT_NAME,
language=CONCEPT_LANGUAGE
)
),
metadata=metadata
)

if post_concepts_searches_response.status.code != status_code_pb2.SUCCESS:
print(post_concepts_searches_response.status)
raise Exception("Post concepts searches failed, status: " + post_concepts_searches_response.status.description)

print("Found concepts:")
for concept in post_concepts_searches_response.concepts:
print("\t%s %.2f" % (concept.name, concept.value))

# Uncomment this line to print the raw output
#print(post_concepts_searches_response)
Text Output Example
Found concepts:
人 1.00
人 1.00
Raw Output Example
status {
code: SUCCESS
description: "Ok"
req_id: "ca65f42148166781ce557b825945ec60"
}
concepts {
id: "ai_ZKJ48TFz"
name: "\344\272\272"
value: 1.0
created_at {
seconds: 1458214981
nanos: 223962000
}
language: "zh"
app_id: "main"
visibility {
gettable: PUBLIC
}
user_id: "clarifai"
}
concepts {
id: "ai_l8TKp2h5"
name: "\344\272\272"
value: 1.0
created_at {
seconds: 1458214981
nanos: 223962000
}
language: "zh"
app_id: "main"
visibility {
gettable: PUBLIC
}
user_id: "clarifai"
}