Workflow Predict
Make predictions with your workflows
The Workflow Predict API allows you make predictions using one or more models, whether they are Clarifai's pre-built models or custom creations — all in a single API call.
The maximum number of inputs that can be processed at once with any given workflow is 32.
After you're set up, you can initiate predictions under a specific workflow by utilizing the POST /v2/workflows/WORKFLOW_ID_HERE/results
endpoint, where WORKFLOW_ID_HERE
corresponds to the unique ID you assigned to your workflow.
When crafting the request body, its layout remains consistent with the usual approach for making a prediction call. The response body will include a results
object, with each sub-object representing a response from the models, maintaining the same order as specified in the workflow you configured.
You can also use the Workflow Builder in the Clarifai Portal to build your workflows and see the results of their predictions on a given input.
The initialization code used in the following example is outlined in detail on the client installation page.
If you want to make a predict call with an external workflow that is outside the scope of your app, you need to use a PAT while specifying the app_id
and the user_id
associated with the workflow you want to use.
Images
Let's illustrate how you would get predictions from image inputs using Clarifai's Face-Sentiment workflow. The workflow combines these three models:
- A visual detector model that detects bounding box regions in an image;
- An image cropper model that extracts the specific region of interest from an image;
- A visual classifier model that classifies an image into a set of concepts.
Note that the base64
output representation of the image in bytes is already in binary format. It is not encoded, so you do not need to decode it for further downstream tasks.
- Python
- JavaScript (REST)
- NodeJS
- Java
- PHP
- cURL
##############################################################################
# In this section, we set the user authentication, app ID, workflow ID, and
# image URL. Change these strings to run your own example.
##############################################################################
# Your PAT (Personal Access Token) can be found in the Account's Security section
PAT = "YOUR_PAT_HERE"
USER_ID = "clarifai"
APP_ID = "main"
# Change these to make your own predictions
WORKFLOW_ID = "Face-Sentiment"
IMAGE_URL = "https://samples.clarifai.com/celebrity.jpeg"
# Or, to use a local text file, assign the location variable
# IMAGE_FILE_LOCATION = "YOUR_IMAGE_FILE_LOCATION_HERE"
##########################################################################
# YOU DO NOT NEED TO CHANGE ANYTHING BELOW THIS LINE TO RUN THIS EXAMPLE
##########################################################################
from clarifai_grpc.channel.clarifai_channel import ClarifaiChannel
from clarifai_grpc.grpc.api import resources_pb2, service_pb2, service_pb2_grpc
from clarifai_grpc.grpc.api.status import status_code_pb2
channel = ClarifaiChannel.get_grpc_channel()
stub = service_pb2_grpc.V2Stub(channel)
metadata = (("authorization", "Key " + PAT),)
userDataObject = resources_pb2.UserAppIDSet(
user_id=USER_ID, app_id=APP_ID
) # The userDataObject is required when using a PAT
# To use a local text file, uncomment the following lines
# with open(IMAGE_FILE_LOCATION, "rb") as f:
# file_bytes = f.read()
post_workflow_results_response = stub.PostWorkflowResults(
service_pb2.PostWorkflowResultsRequest(
user_app_id=userDataObject,
workflow_id=WORKFLOW_ID,
inputs=[
resources_pb2.Input(
data=resources_pb2.Data(
image=resources_pb2.Image(
url=IMAGE_URL
# base64=file_bytes
)
)
)
],
),
metadata=metadata,
)
if post_workflow_results_response.status.code != status_code_pb2.SUCCESS:
print(post_workflow_results_response.status)
raise Exception(
"Post workflow results failed, status: "
+ post_workflow_results_response.status.description
)
# We'll get one WorkflowResult for each input we used above. Because of one input, we have here one WorkflowResult
results = post_workflow_results_response.results[0]
# Each model we have in the workflow will produce one output.
for output in results.outputs:
model = output.model
print("Predicted concepts for the model `%s`" % model.id)
for concept in output.data.regions:
for item in concept.data.concepts:
print("\t%s %.2f" % (item.name, item.value))
# Uncomment this line to print the raw output
# print(results)
<!--index.html file-->
<script>
////////////////////////////////////////////////////////////////////////////////////////
// In this section, we set the user authentication, app ID, workflow ID, and
// image URL. Change these strings to run your own example.
///////////////////////////////////////////////////////////////////////////////////////
// Your PAT (Personal Access Token) can be found in the Account's Security section
const PAT = "YOUR_PAT_HERE";
const USER_ID = "clarifai";
const APP_ID = "main";
// Change these to make your own predictions
const WORKFLOW_ID = "Face-Sentiment";
const IMAGE_URL = "https://samples.clarifai.com/celebrity.jpeg";
///////////////////////////////////////////////////////////////////////////////////
// YOU DO NOT NEED TO CHANGE ANYTHING BELOW THIS LINE TO RUN THIS EXAMPLE
///////////////////////////////////////////////////////////////////////////////////
const raw = JSON.stringify({
"user_app_id": {
"user_id": USER_ID,
"app_id": APP_ID
},
"inputs": [
{
"data": {
"image": {
"url": IMAGE_URL
}
}
}
]
});
const requestOptions = {
method: 'POST',
headers: {
'Accept': 'application/json',
'Authorization': 'Key ' + PAT
},
body: raw
};
fetch(`https://api.clarifai.com/v2/workflows/${WORKFLOW_ID}/results`, requestOptions)
.then(response => response.text())
.then(result => console.log(result))
.catch(error => console.log('error', error));
</script>
//index.js file
///////////////////////////////////////////////////////////////////////////////////
// In this section, we set the user authentication, app ID, workflow ID, and
// image URL. Change these strings to run your own example.
///////////////////////////////////////////////////////////////////////////////////
// Your PAT (Personal Access Token) can be found in the Account's Security section
const PAT = "YOUR_PAT_HERE";
// Specify the correct user_id/app_id pairings
// Since you're making inferences outside your app's scope
const USER_ID = "clarifai";
const APP_ID = "main";
// Change these to make your own predictions
const WORKFLOW_ID = "Face-Sentiment";
const IMAGE_URL = "https://samples.clarifai.com/celebrity.jpeg";
// Or, to use a local text file, assign the location variable
// const IMAGE_FILE_LOCATION = "YOUR_IMAGE_FILE_LOCATION_HERE";
/////////////////////////////////////////////////////////////////////////////
// YOU DO NOT NEED TO CHANGE ANYTHING BELOW THIS LINE TO RUN THIS EXAMPLE
/////////////////////////////////////////////////////////////////////////////
const { ClarifaiStub, grpc } = require("clarifai-nodejs-grpc");
const stub = ClarifaiStub.grpc();
// This will be used by every Clarifai endpoint call
const metadata = new grpc.Metadata();
metadata.set("authorization", "Key " + PAT);
// To use a local text file, uncomment the following lines
// const fs = require("fs");
// const imageBytes = fs.readFileSync(IMAGE_FILE_LOCATION);
stub.PostWorkflowResults({
user_app_id: {
"user_id": USER_ID,
"app_id": APP_ID,
},
workflow_id: WORKFLOW_ID,
inputs: [{
data: {
image: {
url: IMAGE_URL,
// base64: imageBytes
}
}
}],
},
metadata,
(err, response) => {
if (err) {
throw new Error(err);
}
if (response.status.code !== 10000) {
throw new Error(
"Post workflow results failed, status: " + response.status.description
);
}
// We'll get one WorkflowResult for each input we used above. Because of one input, we have here
// one WorkflowResult
const results = response.results[0];
// Each model we have in the workflow will produce one output.
for (const output of results.outputs) {
const model = output.model;
console.log(`Predicted concepts for the model '${model.id}'`);
for (const concept of output.data.regions) {
for (const item of concept.data.concepts) {
console.log(`\t${item.name} ${item.value.toFixed(2)}`);
}
}
}
// Uncomment this line to print the raw output
// console.log(results);
}
);
package com.clarifai.example;
import com.clarifai.channel.ClarifaiChannel;
import com.clarifai.credentials.ClarifaiCallCredentials;
import com.clarifai.grpc.api.*;
import com.clarifai.grpc.api.status.StatusCode;
import com.google.protobuf.ByteString;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
public class ClarifaiExample {
///////////////////////////////////////////////////////////////////////////////////
// In this section, we set the user authentication, app ID, workflow ID, and
// image URL. Change these strings to run your own example.
///////////////////////////////////////////////////////////////////////////////////
//Your PAT (Personal Access Token) can be found in the portal under Authentication
static final String PAT = "YOUR_PAT_HERE";
static final String USER_ID = "clarifai";
static final String APP_ID = "main";
// Change these to make your own predictions
static final String WORKFLOW_ID = "Face-Sentiment";
static final String IMAGE_URL = "https://samples.clarifai.com/celebrity.jpeg";
// Or, to use a local text file, assign the location variable
// static final String IMAGE_FILE_LOCATION = "YOUR_IMAGE_FILE_LOCATION_HERE";
///////////////////////////////////////////////////////////////////////////////////
// YOU DO NOT NEED TO CHANGE ANYTHING BELOW THIS LINE TO RUN THIS EXAMPLE
///////////////////////////////////////////////////////////////////////////////////
public static void main(String[] args) throws IOException {
V2Grpc.V2BlockingStub stub = V2Grpc.newBlockingStub(ClarifaiChannel.INSTANCE.getGrpcChannel())
.withCallCredentials(new ClarifaiCallCredentials(PAT));
PostWorkflowResultsResponse postWorkflowResultsResponse = stub.postWorkflowResults(
PostWorkflowResultsRequest.newBuilder()
.setUserAppId(UserAppIDSet.newBuilder().setUserId(USER_ID).setAppId(APP_ID))
.setWorkflowId(WORKFLOW_ID)
.addInputs(
Input.newBuilder().setData(
Data.newBuilder().setImage(
Image.newBuilder().setUrl(IMAGE_URL)
// To use a local text file, uncomment the following lines
//Image.newBuilder().setBase64(ByteString.copyFrom(Files.readAllBytes(
// new File(IMAGE_FILE_LOCATION).toPath()
//)))
)
)
)
.build()
);
if (postWorkflowResultsResponse.getStatus().getCode() != StatusCode.SUCCESS) {
throw new RuntimeException("Post workflow results failed, status: " + postWorkflowResultsResponse.getStatus());
}
// We'll get one WorkflowResult for each input we used above. Because of one input, we have here
// one WorkflowResult
WorkflowResult results = postWorkflowResultsResponse.getResults(0);
// Each model we have in the workflow will produce its output
for (Output output : results.getOutputsList()) {
Model model = output.getModel();
System.out.println("Predicted concepts for the model '" + model.getId() + "'");
for (Region concept : output.getData().getRegionsList()) {
for (Concept item : concept.getData().getConceptsList()) {
System.out.printf("\t%s %.2f%n", item.getName(), item.getValue());
}
}
}
// Uncomment this line to print the raw output
// System.out.println(results);
}
}
<?php
require __DIR__ . "/vendor/autoload.php";
/////////////////////////////////////////////////////////////////////////////////
// In this section, we set the user authentication, app ID, workflow ID, and
// image URL. Change these strings to run your own example.
/////////////////////////////////////////////////////////////////////////////////
// Your PAT (Personal Access Token) can be found in the Account's Security section
$PAT = "YOUR_PAT_HERE";
$USER_ID = "clarifai";
$APP_ID = "main";
// Change these to make your own predictions
$WORKFLOW_ID = "Face-Sentiment";
$IMAGE_URL = "https://samples.clarifai.com/celebrity.jpeg";
# Or, to use a local text file, assign the location variable
// $IMAGE_BYTES_STRING = "YOUR_IMAGE_FILE_LOCATION_HERE";
///////////////////////////////////////////////////////////////////////////////////
// YOU DO NOT NEED TO CHANGE ANYTHING BELOW THIS LINE TO RUN THIS EXAMPLE
///////////////////////////////////////////////////////////////////////////////////
use Clarifai\ClarifaiClient;
use Clarifai\Api\Data;
use Clarifai\Api\Image;
use Clarifai\Api\Input;
use Clarifai\Api\PostWorkflowResultsRequest;
use Clarifai\Api\Status\StatusCode;
use Clarifai\Api\UserAppIDSet;
$client = ClarifaiClient::grpc();
$metadata = ["Authorization" => ["Key " . $PAT]];
$userDataObject = new UserAppIDSet([
"user_id" => $USER_ID,
"app_id" => $APP_ID,
]);
// To use a local text file, uncomment the following lines
// $imageData = file_get_contents($IMAGE_BYTES_STRING);
// Let's make a RPC call to the Clarifai platform. It uses the opened gRPC client channel to communicate a
// request and then wait for the response
[$response, $status] = $client
->PostWorkflowResults(
// The request object carries the request along with the request status and other metadata related to the request itself
new PostWorkflowResultsRequest([
"user_app_id" => $userDataObject,
"workflow_id" => $WORKFLOW_ID,
"inputs" => [
new Input([
// The Input object wraps the Data object in order to meet the API specification
"data" => new Data([
// The Data object is constructed around the Image object. It offers a container that has additional image independent
// metadata. In this particular use case, no other metadata is needed to be specified
"image" => new Image([
// In the Clarifai platform, an image is defined by a special Image object
"url" => $IMAGE_URL,
// "base64" => $imageData,
]),
]),
]),
],
]),
$metadata
)
->wait();
// A response is returned and the first thing we do is check the status of it
// A successful response will have a status code of 0; otherwise, there is some error
if ($status->code !== 0) {
throw new Exception("Error: {$status->details}");
}
// In addition to the RPC response status, there is a Clarifai API status that reports if the operation was a success or failure
// (not just that the communication was successful)
if ($response->getStatus()->getCode() != StatusCode::SUCCESS) {
throw new Exception(
"Failure response: " .
$response->getStatus()->getDescription() .
" " .
$response->getStatus()->getDetails()
);
}
// We'll get one WorkflowResult for each input we used above. Because of one input, we have here one WorkflowResult
$results = $response->getResults()[0];
// Each model we have in the workflow will produce one output
foreach ($results->getOutputs() as $output) {
$model = $output->getModel();
echo "Predicted concepts for the model '{$model->getId()}'" . "\n";
foreach ($output->getData()->getRegions() as $concept) {
foreach ($concept->getData()->getConcepts() as $item) {
echo "\t{$item->getName()} {$item->getValue()}" . "\n";
}
}
}
// Uncomment this line to print the raw output
// print_r($results);
?>
curl -X POST "https://api.clarifai.com/v2/users/clarifai/apps/main/workflows/Face-Sentiment/results" \
-H "authorization: Key YOUR_PAT_HERE" \
-H "content-type: application/json" \
-d '{
"inputs": [
{
"data": {
"image": {
"url": "https://samples.clarifai.com/celebrity.jpeg"
}
}
}
]
}'
Text Output Example
Predicted concepts for the model `face-detection`
BINARY_POSITIVE 1.00
Predicted concepts for the model `margin-110-image-crop`
Predicted concepts for the model `face-sentiment-recognition`
happiness 1.00
disgust 0.00
fear 0.00
sadness-contempt 0.00
surprise 0.00
anger 0.00
neutral 0.00
Raw Output Example
status {
code: SUCCESS
description: "Ok"
}
input {
id: "5865e5d55a164beebc6f7a5682269cb4"
data {
image {
url: "https://samples.clarifai.com/celebrity.jpeg"
}
}
}
outputs {
id: "01e76acba82d4453a1d3c10b1777066e"
status {
code: SUCCESS
description: "Ok"
}
created_at {
seconds: 1700656970
nanos: 361613613
}
model {
id: "face-detection"
name: "Face"
created_at {
seconds: 1606323024
nanos: 453038000
}
modified_at {
seconds: 1665509418
nanos: 21257000
}
app_id: "main"
model_version {
id: "6dc7e46bc9124c5c8824be4822abe105"
created_at {
seconds: 1614879626
nanos: 81729000
}
status {
code: MODEL_TRAINED
description: "Model is trained and ready"
}
visibility {
gettable: PUBLIC
}
app_id: "main"
user_id: "clarifai"
metadata {
}
}
user_id: "clarifai"
model_type_id: "visual-detector"
visibility {
gettable: PUBLIC
}
workflow_recommended {
}
}
data {
regions {
id: "32b383f26ce26a4ff16642447f9317b8"
region_info {
bounding_box {
top_row: 0.151694223
left_col: 0.285768479
bottom_row: 0.614028037
right_col: 0.762517869
}
}
data {
concepts {
id: "ai_b1b1b1b1"
name: "BINARY_POSITIVE"
value: 0.999997377
app_id: "main"
}
}
value: 0.999997377
}
}
}
outputs {
id: "a521f59aeaf94d06901da76ab01fd0e0"
status {
code: SUCCESS
description: "Ok"
}
created_at {
seconds: 1700656970
nanos: 361621134
}
model {
id: "margin-110-image-crop"
name: "margin-110"
created_at {
seconds: 1590505298
nanos: 387731000
}
modified_at {
seconds: 1634716390
nanos: 69050000
}
app_id: "main"
model_version {
id: "b9987421b40a46649566826ef9325303"
created_at {
seconds: 1590505298
nanos: 387731000
}
status {
code: MODEL_TRAINED
description: "Model is trained and ready"
}
visibility {
gettable: PUBLIC
}
app_id: "main"
user_id: "clarifai"
metadata {
}
}
display_name: "margin-110-image-crop"
user_id: "clarifai"
model_type_id: "image-crop"
visibility {
gettable: PUBLIC
}
workflow_recommended {
}
}
data {
regions {
id: "6b5ea07bdaec9a8e48ff9424bf1f03b7"
region_info {
bounding_box {
top_row: 0.12857753
left_col: 0.261931
bottom_row: 0.637144744
right_col: 0.786355317
}
}
data {
image {
base64: "\377\330\377\340\000\020JFIF\000\001\001\000\000\001\000\001\000\000\377\333\000C\000\010\006\006\007\006\005\010\007\007\007\t\t\010\n\014\024\r\014\013\013\014\031\022\023\017\024\035\032\037\036\035\032\034\034 $.\' \",#\034\034(7),01444\037\'9=82<.342\377\333\000C\001\t\t\t\014\013\014\030\r\r\0302!\034!22222222222222222222222222222222222222222222222222\377\300\000\021\010\000\373\000\267\003\001\"\000\002\021\001\003\021\001\377\304\000\037\000\000\001\005\001\001\001\001\001\001\000\000\000\000\000\000\000\000\001\002\003\004\005\006\007\010\t\n\013\377\304\000\265\020\000\002\001\003\003\002\004\003\005\005\004\004\000\000\001}\001\002\003\000\004\021\005\022!1A\006\023Qa\007\"q\0242\201\221\241\010#B\261\301\025R\321\360$3br\202\t\n\026\027\030\031\032%&\'()*456789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz\203\204\205\206\207\210\211\212\222\223\224\225\226\227\230\231\232\242\243\244\245\246\247\250\251\252\262\263\264\265\266\267\270\271\272\302\303\304\305\306\307\310\311\312\322\323\324\325\326\327\330\331\332\341\342\343\344\345\346\347\350\351\352\361\362\363\364\365\366\367\370\371\372\377\304\000\037\001\000\003\001\001\001\001\001\001\001\001\001\000\000\000\000\000\000\001\002\003\004\005\006\007\010\t\n\013\377\304\000\265\021\000\002\001\002\004\004\003\004\007\005\004\004\000\001\002w\000\001\002\003\021\004\005!1\006\022AQ\007aq\023\"2\201\010\024B\221\241\261\301\t#3R\360\025br\321\n\026$4\341%\361\027\030\031\032&\'()*56789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz\202\203\204\205\206\207\210\211\212\222\223\224\225\226\227\230\231\232\242\243\244\245\246\247\250\251\252\262\263\264\265\266\267\270\271\272\302\303\304\305\306\307\310\311\312\322\323\324\325\326\327\330\331\332\342\343\344\345\346\347\350\351\352\362\363\364\365\366\367\370\371\372\377\332\000\014\003\001\000\002\021\003\021\000?\000\267\343\235r\342\037\t\333E\013\235\363I\2169\030Q\237\347\\\265\215\203jSGi\031;\374\244s\307^I\374\263Z^(\267i\264\235.$\004\262\242\310A\350s\237\360\244\360\254e\365\213F3}\236\342%\302\223\321\306\343\3764\001\351\332T\253\035\252Z_),\000\344\216\234W-\343\035\021\217\225%\244\212\033\177,8;{\217\344kw\\\274\362\356\360\315\203\264d\017\245ajz\221\273\323\221\320\375\322:\367\355@\036s\250xZ\005i\245\211\260\n\347\216\233\272g\333>\225\227\006\200\310\025\267\t\037~\000\000\355\353\324\237\351]\253\022f6\345\306\030\202\247\361\310\255\333-\r\036\305rw\004l\341G\271\377\000\032\000\363m?\303\327S\3523\302\301v\344\202q\216\343\374kN\037\006;\224\215\260VF\034\036\307\034\327\253\330\370~<\264\221\242\251f\335\300\357\232\261.\206\360\344\214\026 \250\366\316y\240\016\"\307B\032\177\237:\"\220cs\273\035\277\310\247Ec$\020:\020\024\2220\007\030\256\272{IJ\371J\277\"\205\014{\340\036\237\245U\276fX\302\210\301y\216\336?\204\347\257\363\240\014\030\356e\265\272DV\312\217\223\036\265\'\372F\307|1\031\013\237\344+F\332\311d\236Id\371QO\310H\357\334\377\000:\275\025\210r&e;\006B(\356Oz\000\347\247\232D\266\\\256\013\026\177\256zT6o<\022<\216>|\371\216\007n0\005uR\351RN\342F^\177\204zS\323\303\341U\211o\235\310$\216\302\2009i/\356\r\350\220\234mR\t=NNO\341\305Nu\2276R+g\356\005\006\266\245\360\366\340W\037/Oz\241q\243,\205c\013\373\265\345\273~\024\001\315\245\334\205\240NA\003%\207\247\245hG\256I#\3033\266\317(\025\367=\305O>\230\322\254\202%\332\25200?J\306\271\323\347\267u.\273Q\017\034\346\200:\353\rN;\253\313\270\311\300h9\317J\3263\304\342&\016\016r\016\017\260\2578\266\2756\333\211\353\214u\353\351Zv\032\226\350\330<\234\242\234z\002h\003\244\273\324\0323\264\267\312\t\340\037j~\241j\232\305\206\340\271\223\030$\236\325\312_j\r$\214K\3416\200=\316k_N\325\231-d\001\263\200[\035\370\240\016CP\265kk\247\211\307*v\321Z\272\233\303\177p\262\2467\225\303\003\353E\000u\257\341\370u\037\rXN\337,\221B\252\304}3\237\314\326\236\211\242i\255hM\324)\347!\311n\230\367\025\223\341\335Y\227G\212)C\035\243\033\010\350*\245\376\245${\326#\264\'\336\301\352(\003/\304\372\203C\2513#\356D\316\016y=\205gAr\346\312x\316?\325\356\307\247z\212X^\372\340H\352|\265\301,\303\001\217`*\255\373\0338\2340\313\310v\214~\202\200\036\203\315\273\261d\\\310X\222\276\243\322\275kD\264\217\354\352\0356\260\025\346>\036\322\346\325<Ug\032\037\335Y\307\346JG|\364\025\354\321F\021@\034z\320\004\211\032F\273Tq\232qU#\007\030\244\343<\322|\304\216(\001\277g\214\251\033G=j\205\336\233\033+2\24785\250\240\016i~\\r3@\0306Z:\340\263\216;\003\332\265\026\326%P6\214\212\263\200\017#\360\246\266Nx\240\010X/L\016;Ty\300\341A\'\323\245Y\362\301\031\301\246\371aE\000SeG\0075Vh\"+\200:v\305h\262\214\236*\273 \310\376T\001\2155\266\341\205L`\366\025\205\253X\374\247 \202Eu\323\241*~`3\336\262.\255\325\211\016\t\343\363\240\017>\273\260*\300\356\3713\220j \202+g;\260O\312:\364\356k\256\222\3364gP\204\203\323w\364\254k\210\222&`\352\014x\310R?\255\000b\223\373\264g\371\202\345\261\370\325h\365S\000wn\025br\336\331\007\217\326\254\336L\233\260\321\252\2560\025{\366\305s\372\204M\345\262\026t\36306u\300\240\013\376\036\327\343.^\3458\031\000\217\322\212\311\264\267`\256\233F\305\352\336\2474P\007\277\315e\021\214In\361\355\003<\014\223X3\350\357p\314\315\014\222\023\317M\240Um\007Y\221\003yw,\253\350Fq\3765\253<wW\270\377\000\211\204\254\247\000\202\330^~\224\001\314jWv\372_\311\2729$\350\261\203\234\037\177LV\034v\363\334\317&\243x\017\244Jz\022z`Wc\250h\232.\222\206\346\376\345e\003\222\250r\307\330zVW\205\235\274W\2565\371\203\311\322l\330\210#\376\373z\232\000\355<!\242\r#IS(\037j\233\347\224\217~\337\205t\200\214\340\032\256\216U@\343\350*T9 c\221@\023\237\347M\030\335\326\202s\212\\\216\224\000\345\037\205(\004\365\241A\3174\376\270\240\010\302\363\311\240\376\224\363\223\326\230\001\351\332\200\024\023A\300\240\374\275)\t\343\332\200\030\300c\007\255U\220\000j\314\215\305T\231\266\343\034\212\000\206^\325R]\205J\262\3765;\261\301\006\253JKpzP\006\035\364,\23709\003\277z\240cYa\"E\030c\234\343\245l\314NJ\2209\252\022\241\213\250\001\033\217\245\000sZ\215\202\014\200\210\007P\007J\345u\013;\250\013\375\235\304\201\316Hn\253]\305\3623\206\034nO\302\271K\331\266F\305\230\206\355\333\360\240\014(%h\243dh\360\304\374\300\234QD\227K.\010\3032\365\337E\000oCt-\333z\034\251\3523Zp\353~J\022\315\2663\334\265r3j0\305\302\262\273\001\323\251\025\231s\252\\J\305Ps\330\365\305\000t\272\275\343k\027\221XB\371y\231Q\025[$\347\2515\354\332.\231o\243i\026\266P(\013\032\000}\317rk\310\276\030i?i\325\337Q\234\026\021ga=Kz\327\262+\226l\364\240\013\3611\351\351V\324n\301\025\235\021\310\034\326\204O\362\216E\000J\026\225z\364\244\335\307\024\354c\221@\013\234\323\263\232`<\360)w{P\002\367\034Q\317^\242\202h\317\313@\r9\357HN3\315) \212\211\330\347\212\000l\234\367\346\251J\333z\232\264\315\200j\235\307Pq@\025d`:\032\205\232\245\224t8\346\241 \234\373\320\0059\2241\306MF\310\n\225\353\237Z\236^\270\3075\024\2146\373\032\000\311\274\2044{\270\035\363\374\253\216\324\204n\\\224\034\251\312\221\320\327es4e\nd\025c\264z\203\\\027\210&{y\231\362IS\363/\257\257\351@\034\325\355\273\t\314\260\253\025=\207Ph\246=\314\236yh\231\260\334\2145\024\000\267\260\375\214\006p7\036p:/\370\232\312\336\323L\002\214\0268\000Qqy-\323\226\221\263\316y\251\264\225\337\252\332\217\372h(\003\334\274\'g\016\223\241\304\230U\302\006v?\255A7\304M\"\033\267\201\030\220\274o\317\004\373W5\250__j\302=\023M\334A\000\\88\037\356\347\371\325\330\276\027D\360|\316\212\344`\220I\240\r1\361KJV+\363px\000WI\243\370\327L\324\231B\316\253\273\200\254y5\347\223|4\236\021\225\270\214c\257\313\367\253<\370v\357O\271R\244\200\2479\006\200=\372;\204\221@G\006\245\317C\232\363\037\013\353\363DV\031\3130^2s^\203oy\034\3439\340\320\005\360h,\000\357P\211\007c\305<\236\347\232\000p;\273\364\2449\307\007\245B\322\0055\237\250\352\253gn\354\010\335\320s@\027\345\270H\207\316\352\276\344\342\262o|I\247\331g\316\270E\300\317Z\363m{_\276\232\340\204i\030\366\332+\232\223G\325\365I\204\263\006,\307\030c\322\200=B\357\342>\215\006vLd#\262\214\326L\337\023\364\361!-\033l\355\201\315qi\360\373Q.\t%\201\351\216\200\326\245\207\3039\311\3374\215\274t\'\374(\003\252\265\361\356\215r\312\257r\020\236\306\272\010\256\241\236!,,\035\010\340\203^e}\340\023\n\223\261\213\003\367\223\255g\332j\272\267\206.>m\363Z\217\225\220\366\240\017T\270\234\003\3375FK\236\240`Vu\236\265o\252\331\245\315\273eXr;\203U.\3570\335(\000\325.\214E\231c\335\225\344\017\342\025\304\370\222\364I\n4l_\003\031\376\360\367\367\025\257{~\300\375\354\257S\354k\216\326.\243\226]\3216\033\270\316CP\006ls\274O\224\3069\371H\310\037\235\025\017\037\375j(\002\032\273\246\022\267\321\262\214\260\316\337\256*\216kOB]\372\274\003\031\031\311\372b\200=\223\302\332|\032}\212\310\374\312\377\00037\326\272t\272\007\201^r\372\371\203dQ\236\247\004\3243\370\345-\031QX7\030f\'\200\177\n\000\365 \206\3410z\223Yz\226\215&\326oQ\315y\342\374C\215o\221\336y\314D\r\376_\312\007\323\275Y\377\000\204\372\322\346\351\201\270\274\2113\362\026\223\250\307\241\343\326\200.Ne\261\270\310\007\000\372WC\242k\245\230)=;\023X\021_C\254ea\270[\206\003$\343k/\324w\250|\251-&\334\231\340\363\305\000z\315\235\302\314\231\004sW\310!s\236\265\315\370js4+\237\316\272Y~X\375q@\031\327s\210\262A\351\\\236\2538\234\341\316@<\001[Z\224\340\006$\376\025\306\335\\\031f(9\240\002\025\217\314\312\307\320\365\002\266-\"g#\344\343\351Y\261\315\r\244`\277\007\323\031\346\2337\211\'\264\031\021C\002u\335p\370\375\005\000v\020)E\031^\235\252S0\0305\300\311\343k\225\212I~\323dc\214)%Q\261\317\276j\025\361\344>c+\354r:\264\017\237\320\320\007\241<\210\337\210\357Xz\266\231k{\023#D\231n\370\254\253/\024[_\251he\014\243\257\265^\376\322\216U\371\\\037\306\200<\372\346\326_\njd\306\316\326\222\237\233\330\325\313\213\360\321\356V\030<\203Z\372\3641_\332\272\267$\202\001\035Eq\0214\220\333\233yO*p>\224\001WV\272|\374\2140G\"\260\035\211bOS[\2271\006L\267^\325\227<\014\024\222=\350\002\256h\244\242\200\021\3431\271Rs\203[\036\031\266\226\343Qo$\002\352\204\200{\326k\220\343\236\243\371Wc\360\336\317\355\032\274\255\214\205Q@\030\372\325\304\221H\312\021\342byF\0042\236\377\000\205c[[Kw:\306\210\356I\347h\316\005{\256\271\243G\251D\366\361X\2031\340J\313\362\250\365\367\247h^\031]\n\315a\206$bG\317&\316X\320\007\221\352>\026\274Ia6v\256\321\310\240\000:\206\367\253#\300^\"\220+\0356vR\200)\310\340\373\347\265{\2641o\330\246\331>NG\313\322\264\225.\'M\245B\363\351@\036Q\'\201\256t\333{y\364\206\232;\270\243\006A\267\211\030\016\303\353]u\235\215\305\377\000\207\342\270\276\265\362/TbU\365\367\256\276;R\000\334rEA\250\355\216\335\300<\343\237z\000\316\360\314%\006\010\357[\367\317\262,\017\312\251hv\306(A\365\251\2657\371v\320\007+\252\273\2630N\265\026\217\240\033\200\322\271\307\275Z\232\"\362\217C[\272>\321j` \014\346\200<\307\304~$\264\323\356Z\316\3126\236\355N\001U\310\007\353Qi\337\017\365\rv\321\365\rN\354\215\310\305`L\023\234w&\2756m\006\331\030\225\265\2079\316\340\2035\037\331!R\240\240M\277\335\342\200>b!\343\027\021\3102W\013\206r\n\220};\324R,\221\025\334@\334\241\206\030\036?\n\367\253\337\006\350W\006`\360\306\246^X\205\347>\271\256~\177\207\332Kp\234v8\034\375E\000yu\246\253sf\341\342|0\353\236\343\320\372\327M\242j\315\3441yX\314\355\300=\251u_\207w\360>\3759^\342>\352F\030\177\215Y\321|7u\001\204\311l\353:\311\226\363T\200\007\265\000t\221\244\262\333\006*@\"\261.\264\360\362\226a\217J\364[]9V\313.0q\323\025\313\353H\250\330\000~\024\001\305\337\300\253\036\007P;U\033\273\177*\325$P\013\021\223\315j\335:\253\355+\222GAP\334\306\222\330\034G\363\001\307l\320\007\"\337x\343\326\212V]\254G\241\242\200-\317lcp\303\247Z\364/\205\360\005\202\346~\357&\321\370\n\345\257\341X,\313u%\000\317\340k\264\370s\031]\0266\035Y\330\376\264\001\352\326I\033\"\356\\\326\232B\244}\321\212\315\323\301\"\266c\031Z\000\214B\243\242\216)J\201V6\344}*\'\024\001\004\207\007\247\025\203\251\312e\224F\243\2775\265t\352\220\261$\014\016\365\207`\246\362\351\244# \036(\003r\301Dv\312=\253;R\220\031\017?\205l\010\202F@\364\256gTvF>\335(\002\263\221\346\n\323\323\376\367N\225\315\303xL\300\036y\256\237Le\220\016}\350\003]\016\341\206\024\311-\243q\310\372\324\301G\245!\\s\322\2003\246\322\255\344\004\355\025\007\366T\013\320sZ\215\320\217J\201\370$\320\005Qf\240\0001\365\025\024\220\242\222@\031\251\236}\247\031\342\250\334\335\016I4\001R\376\\!\031\256\023\\\224e\210\346\272}F\360mc\234`W\237\353w\237+\022\324\001\211wq\275\213\203\323\201\212#\235\232\035\256\273\243+\216:\203Y\023^\035\344g\275Y\264\272\362\340U\007\234\367\035\215\000d\\\200\263\270\034\340\365\365\242\226g\01736\321\317j(\003\245\361S\254@F\270\034\355\037\205v\377\000\017v\215\026\330\036\371?\255y\217\210u\024\3245F1g\312O\225k\321|\007(\032]\262\347\267\365\240\017]\261\n\020`\365\255T\340qXZl\243n3\316+\\J\252\271\310\034P\005\226uQ\232\315\272\324b\213?0\252\367\332\200\211\033\006\261\326\316]E\332G,\023\327\326\200\027R\325\267\302\334\341O\025\241\240<-\000!\201\317J\362?\031x\212\347A\277\223L(X\201\271\030\367\006\261t\177\210\02762r\314\253\237\273\234\212\000\372VFA\021\344t\2567[\275\267B\353\271Kt\034\327\233]|S\236H\331\025\233$W%?\214\257f\234\310\006\356{\232\000\365\253u\337.\345\025\245m\177\366\031\201f\342\274\237O\361\375\305\263\001u\006\007\250\251\365?\034\244\361\376\340\222O@(\003\336\255\265(\247EepA\025m$\017\320\365\257*\360,\272\245\376\223\366\211I\306\357\222\272\3305)\355\247\362\347R\255\333=\350\003\253\000\021\322\252\316\241G\2756\332\365fQ\315\023>T\232\000\312\271b\271\037\344V\035\355\307\004V\305\353\360Oz\346\257\245\000\034\342\2002\365+\203\214g\250\346\270\035n\34030\311\300\342\272\235Fs\222Mp\272\264\305\230\364\240\014\263\206#\236\264\371\'\371\021A\035\016\177:\257\223\270\232N\364\000\346b\3074U\324\261\337\246\265\321b\010`\000\366\242\2003s\236k\321\374\027u\215:\334\017\341%O\347^l+\263\360d\245\240\226<\375\307\007\363\377\000\365P\007\263\351\227\300\250\311\347\025\2515\360Hr\307\363\256KJ\'+\357\355[\363\333\264\266\307\035\372\032\000\316I\344\325u/%[\367hr\344\036\202\272\370\212E\010\2165\300\003\025\205\244\330\303\246\332\223\307\230\347s1\352i\367\376\"\323\264\250\032k\313\230\341A\375\343\311\372P\006W\213\374\re\342\242\222\311#Cr\203\013*\214\361\350}\253\313\265\317\205\332\276\225\033On\313w\010\376\340\303~U\334\\\374Z\322\225\312[@\362\217\357\023\2675=\277\304\313I\206^\317\344\030\316$\024\001\343\013\246\310\271\016\245[\241\004r*h4\271$\177.$gs\306\024W\264\333\352\036\016\327\245\222Y\255B\314\243s\356^\337\205[\267\324\274#\246e\255\243Tb:\210\215\000y}\217\303-f\370y\222\005\204\036@s\315ni\337\010\245\022\253^^\250\214\036V1\311\374\353\273\377\000\204\317CB\000\271\333\236r\312kB\327]\323\257\0245\275\334NO`\324\001kK\323\340\323l\243\265\201\002\307\030\332\242\233\252\332\307sl@\0370\345H\353\232\177\332\224\216\032\221\245\004u\3104\001\211\246\337\024c\013\237\231N+`\335\202\235A\025\316k6\257mp\267p)\3018p)\022\361\232.N\r\000]\275\272R\017\"\271\235J}\303\nFMK}t\352\016\017\342k\032y\031\311$\376T\001\233\250\022P\201\311\355\\\215\362\206\007 dWYp\254\3753\317j\300\324!\332\033\201\232\000\347[\216)\326\320\233\211\2261\336\222p\025\310\024\266\263<\022\356NX\214P\007k\243h\277\333Z\315\266\211\027\372\250\3432\316\376\234p?2(\257@\360\026\223\026\221\2424\316\237\351\267{d\225\317Q\334/\345E\000|\376+\245\360e\307\225\252I\021<H\237\310\3277\232\275\244\334\375\223T\267\233\260p\017\320\361@\036\353\244\220\0311\322\273\033xL\221\006\035\205q:K\215\250A\353\214W\240iN\257\006\334\320\0075\254\255\3540H\360F\317\267\'\002\276~\361\006\257w\253jr\275\303\266\325b\252\204\375\332\372\314\304\204\036\0075\341\337\025<\024\260j\203T\323\321Q\'\037\274\214\014\r\343\277\343@\036qa\241jZ\225\264\327\026\226\317$P\214\273\016\325\320\332|;\361d\206\333\313\262u\027+\2712\334c\336\272\277\205\336#\322\364\233;\273MJx\241/\367\226R\007\267z\367\013;\273;\264\215\255\246\212@\027\215\214\016\007\341@\0374\334x_\304\332\r\303%\314\027QH\352q\345|\312\376\325V\347\303\236/\020\255\324\232}\312\240\030\017\267\025\365-\305\274S\264~b\253ml\214\323o\0266\265ulm\307~\224\001\362E\325\226\275m\t\236\342\033\225\210pX\216*\240\237R\211D\352fE^7\250 \017\306\276\252\327\364\353I\364\031\255\345\2161\031\\\034\201^{\343\213]#M\360\352i\266\220\242\3119X\325\207\\\236\246\200<\313K\361\366\271\247:\206\2717\021\016\251\'\247\326\275\007E\370\211c\250\354\216F0Jx*\347\277\265y\336\263\243Z\330\334\332[\2271\202\205\234\236\244V~\213\241j\032\356\244\266\332l.\355\273\357\364\010=I\240\017\240\215\3547P|\254\034\021\326\251Kdb^\001\372\325\275\037\303?\3316\021D\363\031\245\n\003\261=O\255^\272\211v\355\316q@\034u\364d\251\025\232-\t\346\272;\270\027q\307j\242\361\355R1@\030\023\306\0279\256oU\306\327\302\223\306q\351]E\351\332H5\305\353W\030,\007Lc\212\000\347\347?\274<\346\257\370v\320_k\326\220\2666\264\203v};\326[\034\234\326\327\205\031\223^\205\301\373\240\237\322\200=\252\363S\216\336U\265\214\025M\240\223\236\364W%\254\335\307\r\314/+\223\373\275\334t\311\242\200<\216\224\034RQ@\036\271\340\335T^\351\321\2536dN\010\372W\247\350\327\253\201\333\327\025\363\237\205\365f\323\265\025R\330\216N\277Z\366}\022\377\000s#\003\303s@\036\220\256\031{r+\220\361\325\217\333\264Y\325rdE.\200z\212\350\255&\337\030\365\252\272\242\357\211\201\031\355@\0372H\366WW\031\2342\311\234\0208\255\255;L\236/\336\350\272\235\345\264\354061\371\275\270\253\3762\360d\242\362K\35557\0079x\307\\\372\212\347t\275GS\321\256A\362\244F^\205\201\030\240\016\204j\177\020t\373\230\330jwnS;|\307\3349\366j\237P\327\274q\251\351\362[_j\350\220H0\352\252\001#\352\005^\203\306\0272K\r\304\3067eS\234\340\326F\275\343\031o\025\221\2216\360\000E\003\247\320P\006n\241\252\370\210X\233\013\255vy-\3602\214\304\344u\353\326\2625\035F\366\365\340\373f\242\363\030T\010\311\352\005K\366\035GW`-\355&r{\355\300\307\326\272\215\017\341\314\222:\313\2521\307\374\363S\374\315\000d\350\332E\357\212\365$\226R\356\027\001\344\220q\201\330W\271xoA\264\321\254\226+h\202\223\367\237\034\261\252ZF\223oa\002G\014A\025G\000\016\225\321\300\307!E\000X\333\234\344t\254\333\250\302\263\036+Wn\330\311j\310\276\225y\307z\000\307\225\003\312y\342\251^ U8\0305x\261\031=\253/Q\272TS\316O\265\000r\372\274\253\031c\221\221^u\251]\031\245+\236\225\324x\233P*\215\317,{W\022\314Y\211=\350\001\246\265\264\t\326\336\361\244l\234\251Q\217z\3115n\300\220de\'r\000\343\352\r\000tZ\315\324\227VV\227\n\331\300(\331\365\242\253\3463\031\266\220\200\214w\251\3169\357E\000r\335h\242\2279\000zP\002\202A\004\032\364\357\005\352\222\313\014Ipp\330\312\347\270\365\2570\357Z\232V\2555\206\241\025\301b@\300#=\250\003\351\215.\340<K\223Z7\021\231a#\326\270\215\003WK\210cpr\010\007\212\354\255\256<\305\344\346\2009\313\313&26\006k\"\343D\023\022\036\020A\366\256\342[q$\231\355O\212\321\030\014\257\343@\036n\336\020\261\221\376{Q\317\267Z\277k\341K\030@\333g\027\035\366\327\241\213\010\217%\006}i>\304\2128\035z\320\007/\026\233\024*\004q\205\307\240\253\021\332\205#\217\306\266\315\230\335\234S\205\242\343$P\006zC\264\021\216\225n\004\331\320sS:\004\007\212\256\323\004\3474\000\373\231\210C\315a\\\266\362w\034{\232\236\366\354\355!M`\335\337\252d\356\372P\003\357.B+\021\315q\332\346\256\261+\020y\366\246\353:\350\215\030\207\300\035\253\317\265-VK\331[\007\3444\001\026\243z\327\227,\354x\317\025N\223\245\'S@\001\253\232`f\273\001:\3438\317Z\246MZ\323]\322\365\nu\351@\032m\021\273s\000\307\313\363\017\306\212\275\004\r\r\364\222\205\371\037\246}h\240\016B\224\243\005\016G\004\340SsZ\006h\344\320\026-\243\315\206\340\266{\225e\377\000\021\372\320\003\277\263\310\265\016A\004&\366\374z\017\313\237\306\251\306\233\330\214\200\000\'&\264\236\355M\274\212:\030\366\203\370\017\360\254\326p|\302\000\344\342\200;\217\004kf<\332H\374\251\371F{W\255i\272\232\262\257<\032\371\306\326yl\256c\231r\247\257=\305z6\221\342%1\241\337\236\231\240\017d\216\355_\201Z08\003\232\363\213=}p\016\377\000\326\267m\365\356\203w\353@\035\242\277Jv29\256Z=pn\004\2605|k\211\264|\302\2005\330\016\375\2151\346D\007\245bM\256 \'\347\030\254\373\215uUX\226\037\215\000l\\\336\204\r\310\305b]j+\264\220\337\255s\227\376!BH\337\\\346\241\342%\215Nd\343\0353@\0355\376\250\250\255\363r}\372\327\025\254x\201c\334<\317\326\260\265\037\022\274\233\2226\3115\316K$\263\266\347\311\240\0137\332\203\335\310r\307mT\003\214\346\236\210;\216i\316\000\007\024\001\r\024\231\245\024\000\204\325\213\031\204\027q\310T\020\017qU\315[\261\212Gf\"=\351\2140\316(\003\253k\241k:\272\223\345\3122\271\301\000\372QX\261\316m\024Gp\331\210\363\033u\307\261\242\2009\354\322\251 \020\016\001\340\321\212\231,\356d\031H$a\354\246\200\"9S\212\003e\271\351\232\234[J\3213\224#a\332s\353H\266\2638\312!a\3543@\014\232f\232M\314z\000\007\260\253\026\327OnA\004\200j\365\247\206\257n`i\014l\230\344\002:\325\033\253I-d1\310\2440\365\240\016\202\327Y\221\025K7\312z\232\334\265\327N\321\206 W\007\003\272\020\001\343\371\326\305\263\0020T\202{\251\240\016\3115\351U\211/\322\244\036\"v\377\000\226\2035\310\0377\202\254\307\352*2.2:\343\324\320\007b\376 \220\003\227\254\273\317\021q\203&\177\032\347\245Y\316>r\rW6N\355\363\261\317\326\200-]\353\354\344\204\344\326\\\255ux\3376pj\362X\242\002B\375j\302\306\241@\003\247S@\031\261X\004\033\237\222(h\325[\247\036\225\240\370\031\007\223Te<\344\363\217z\000\211\276^\265ZF\353\374\252G\221\216y5]\271&\200\033J))h\000\247\3073\307\235\214@\356=i\206\222\2004\"\324\314h\020\304\254\203\370I\310\242\263\350\240\017y\321\376\033\350\332n\014\361y\362c\253t\256\217\376\021\335?f\324\201\000\372U\370\376\351\2531\216\007\322\200<\263\304\336\010\363\'qk\204I>\366\007\353\365\251t\237\r\332\331\230\266\303\227\214`q^\207x\212I\310\025V\030\243\005\210A\232\000\307M8ua\222{b\270\317\032\370dKm\366\270\023\014\234\266\007Q^\240\31288\355Yz\252+\332\312\030\002\n\320\007\201G\010\316;\326\235\254d\036\237\235Gv\252\232\224\310\243\n\034\360*\344\003\030\305\000ZX\307\007oZR\213\323\212U<\023A\352h\002\002\243\323\212\0361\216;\366\024\366\357H\334\n\000\210\306v\001\330\324\022\355^\230\253c\221\315U\237\225\003\265\000R\235\200\310\007\267Z\250\303 zU\211G\312\247\275Vj\000\202C\200x\252\335\352i\017\312j\032\000\005\024\n;P\002\036\264R\236\264P\001E\'\255\024\001\377\331"
image_info {
width: 183
height: 251
format: "JPEG"
color_mode: "UnknownColorMode"
}
}
}
}
}
}
outputs {
id: "7ab40bb98cad4133b490d7183095d9b5"
status {
code: SUCCESS
description: "Ok"
}
created_at {
seconds: 1700656970
nanos: 361626613
}
model {
id: "face-sentiment-recognition"
name: "face-sentiment"
created_at {
seconds: 1620837542
nanos: 718331000
}
modified_at {
seconds: 1652994708
nanos: 222496000
}
app_id: "main"
model_version {
id: "a5d7776f0c064a41b48c3ce039049f65"
created_at {
seconds: 1620837542
nanos: 812738000
}
status {
code: MODEL_TRAINED
description: "Model is trained and ready"
}
visibility {
gettable: PUBLIC
}
app_id: "main"
user_id: "clarifai"
metadata {
}
}
user_id: "clarifai"
model_type_id: "visual-classifier"
visibility {
gettable: PUBLIC
}
workflow_recommended {
}
}
data {
regions {
id: "6b5ea07bdaec9a8e48ff9424bf1f03b7"
region_info {
bounding_box {
top_row: 0.12857753
left_col: 0.261931
bottom_row: 0.637144744
right_col: 0.786355317
}
}
data {
concepts {
id: "ai_CrBPDCM6"
name: "happiness"
value: 0.999999821
app_id: "main"
}
concepts {
id: "ai_5fbLSP06"
name: "disgust"
value: 3.61372668e-006
app_id: "main"
}
concepts {
id: "ai_8PZvz0N1"
name: "fear"
value: 1.39605234e-007
app_id: "main"
}
concepts {
id: "ai_KdS5fmgb"
name: "sadness-contempt"
value: 4.07719938e-008
app_id: "main"
}
concepts {
id: "ai_59ZvTKz7"
name: "surprise"
value: 1.09932232e-008
app_id: "main"
}
concepts {
id: "ai_96KLdq72"
name: "anger"
value: 6.72241196e-009
app_id: "main"
}
concepts {
id: "ai_MqGSWdbN"
name: "neutral"
value: 4.21860102e-009
app_id: "main"
}
}
}
}
}
Videos
When you input a video into the Workflow Predict API, the response includes a list of predicted concepts for each frame of the video. By default, the video is processed at 1 frame per second (FPS), but this rate can be customized in the predict request. This means you’ll receive a set of concepts for every second (1000 milliseconds) of your video.
To adjust the FPS rate, use the sample_ms
parameter in your predict request. The sample_ms
value specifies the time interval (in milliseconds) between frames selected for inference, determining how frequently frames are processed.
The valid range for sample_ms
is between 100 and 60,000 milliseconds.
FPS is calculated as: FPS = 1000 / sample_ms
For example, if sample_ms
is set to 1000, the FPS rate will be 1 (the default value).
The Workflow Predict API has size and duration limitations for video inputs:
- Videos uploaded via URL can be up to 100 MB in size or 10 minutes in length.
- Videos sent as byte data are limited to 10 MB in size.
If your video exceeds these limits, you can refer to this tutorial on splitting large videos into smaller segments for processing. Exceeding these limits may cause the process to time out and result in an error response.
- Python
- JavaScript (REST)
- NodeJS
- Java
- PHP
- cURL
######################################################################################################
# In this section, we set the user authentication, user and app ID, workflow ID, video input,
# and sample_ms. Change these strings to run your own example.
######################################################################################################
# Your PAT (Personal Access Token) can be found in the Account's Security section
PAT = "YOUR_PAT_HERE"
USER_ID = "YOUR_USER_ID_HERE"
APP_ID = "YOUR_APP_ID_HERE"
# Change these to make your own predictions
WORKFLOW_ID = "YOUR_WORKFLOW_ID_HERE"
VIDEO_URL = "https://samples.clarifai.com/beer.mp4"
# Or, to use a local video file, assign the location variable
# VIDEO_FILE_LOCATION = "YOUR_VIDEO_FILE_LOCATION_HERE"
# Change this to configure the FPS rate (If it's not configured, it defaults to 1 FPS)
SAMPLE_MS = 500
############################################################################
# YOU DO NOT NEED TO CHANGE ANYTHING BELOW THIS LINE TO RUN THIS EXAMPLE
############################################################################
from clarifai_grpc.channel.clarifai_channel import ClarifaiChannel
from clarifai_grpc.grpc.api import resources_pb2, service_pb2, service_pb2_grpc
from clarifai_grpc.grpc.api.status import status_code_pb2
channel = ClarifaiChannel.get_grpc_channel()
stub = service_pb2_grpc.V2Stub(channel)
metadata = (("authorization", "Key " + PAT),)
userDataObject = resources_pb2.UserAppIDSet(user_id=USER_ID, app_id=APP_ID)
# To use a local video file, uncomment the following lines
# with open(VIDEO_FILE_LOCATION, "rb") as f:
# file_bytes = f.read()
post_workflow_results_response = stub.PostWorkflowResults(
service_pb2.PostWorkflowResultsRequest(
user_app_id=userDataObject,
workflow_id=WORKFLOW_ID,
inputs=[
resources_pb2.Input(
data=resources_pb2.Data(
video=resources_pb2.Video(
url=VIDEO_URL,
# base64=file_bytes
)
)
)
],
output_config=resources_pb2.OutputConfig(
sample_ms=SAMPLE_MS
)
),
metadata=metadata,
)
if post_workflow_results_response.status.code != status_code_pb2.SUCCESS:
print(post_workflow_results_response.status)
raise Exception(
"Post workflow results failed, status: "
+ post_workflow_results_response.status.description
)
# We'll get one WorkflowResult for each input we used above. Because of one input, we have here one WorkflowResult
results = post_workflow_results_response.results[0]
# Uncomment this line to print the raw output
print(results)
<!--index.html file-->
<script>
////////////////////////////////////////////////////////////////////////////////////////////////////
// In this section, we set the user authentication, user and app ID, workflow ID, video input,
// and sample_ms. Change these strings to run your own example.
///////////////////////////////////////////////////////////////////////////////////////////////////
// Your PAT (Personal Access Token) can be found in the Account's Security section
const PAT = "YOUR_PAT_HERE";
const USER_ID = "YOUR_USER_ID_HERE";
const APP_ID = "YOUR_APP_ID_HERE";
// Change these to make your own predictions
const WORKFLOW_ID = "YOUR_WORKFLOW_ID_HERE";
const VIDEO_URL = "https://samples.clarifai.com/beer.mp4";
// Change this to configure the FPS rate (If it's not configured, it defaults to 1 FPS)
const SAMPLE_MS = 500;
///////////////////////////////////////////////////////////////////////////////////
// YOU DO NOT NEED TO CHANGE ANYTHING BELOW THIS LINE TO RUN THIS EXAMPLE
///////////////////////////////////////////////////////////////////////////////////
const raw = JSON.stringify({
"user_app_id": {
"user_id": USER_ID,
"app_id": APP_ID
},
"inputs": [
{
"data": {
"video": {
"url": VIDEO_URL
}
}
}
],
"output_config": {
"sample_ms": SAMPLE_MS
}
});
const requestOptions = {
method: "POST",
headers: {
"Accept": "application/json",
"Authorization": "Key " + PAT
},
body: raw
};
fetch(`https://api.clarifai.com/v2/workflows/${WORKFLOW_ID}/results`, requestOptions)
.then(response => response.text())
.then(result => console.log(result))
.catch(error => console.log("error", error));
</script>
//index.js file
//////////////////////////////////////////////////////////////////////////////////////////////////////
// In this section, we set the user authentication, user and app ID, workflow ID, video input,
// and sample_ms. Change these strings to run your own example.
/////////////////////////////////////////////////////////////////////////////////////////////////////
// Your PAT (Personal Access Token) can be found in the Account's Security section
const PAT = "YOUR_PAT_HERE";
const USER_ID = "YOUR_USER_ID_HERE";
const APP_ID = "YOUR_APP_ID_HERE";
// Change these to make your own predictions
const WORKFLOW_ID = "YOUR_WORKFLOW_ID_HERE";
const VIDEO_URL = "https://samples.clarifai.com/beer.mp4";
// Or, to use a local video file, assign the location variable
// const VIDEO_FILE_LOCATION = "YOUR_VIDEO_FILE_LOCATION_HERE"
// Change this to configure the FPS rate (If it's not configured, it defaults to 1 FPS)
const SAMPLE_MS = 500;
/////////////////////////////////////////////////////////////////////////////
// YOU DO NOT NEED TO CHANGE ANYTHING BELOW THIS LINE TO RUN THIS EXAMPLE
/////////////////////////////////////////////////////////////////////////////
const { ClarifaiStub, grpc } = require("clarifai-nodejs-grpc");
const stub = ClarifaiStub.grpc();
// This will be used by every Clarifai endpoint call
const metadata = new grpc.Metadata();
metadata.set("authorization", "Key " + PAT);
// To use a local text file, uncomment the following lines
// const fs = require("fs");
// const videoBytes = fs.readFileSync(VIDEO_FILE_LOCATION);
stub.PostWorkflowResults({
user_app_id: {
"user_id": USER_ID,
"app_id": APP_ID,
},
workflow_id: WORKFLOW_ID,
inputs: [{
data: {
video: {
url: VIDEO_URL,
// base64: videoBytes
}
}
}],
output_config: {
sample_ms: SAMPLE_MS
}
},
metadata,
(err, response) => {
if (err) {
throw new Error(err);
}
if (response.status.code !== 10000) {
throw new Error(
"Post workflow results failed, status: " + response.status.description
);
}
// We'll get one WorkflowResult for each input we used above. Because of one input, we have here
// one WorkflowResult
const results = response.results[0];
// Uncomment this line to print the raw output
console.log(results);
}
);
package com.clarifai.example;
import com.clarifai.channel.ClarifaiChannel;
import com.clarifai.credentials.ClarifaiCallCredentials;
import com.clarifai.grpc.api.*;
import com.clarifai.grpc.api.status.StatusCode;
import com.google.protobuf.ByteString;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
public class ClarifaiExample {
/////////////////////////////////////////////////////////////////////////////////////////////////////
// In this section, we set the user authentication, user and app ID, workflow ID, video input,
// and sample_ms. Change these strings to run your own example.
/////////////////////////////////////////////////////////////////////////////////////////////////////
//Your PAT (Personal Access Token) can be found in the portal under Authentication
static final String PAT = "YOUR_PAT_HERE";
static final String USER_ID = "YOUR_USER_ID_HERE";
static final String APP_ID = "YOUR_APP_ID_HERE";
// Change these to make your own predictions
static final String WORKFLOW_ID = "YOUR_WORKFLOW_ID_HERE";
static final String VIDEO_URL = "https://samples.clarifai.com/beer.mp4";
// Or, to use a local video file, assign the location variable
// static final String VIDEO_FILE_LOCATION = "YOUR_VIDEO_FILE_LOCATION_HERE";
// Change this to configure the FPS rate (If it's not configured, it defaults to 1 FPS)
static final int SAMPLE_MS = 500;
///////////////////////////////////////////////////////////////////////////////////
// YOU DO NOT NEED TO CHANGE ANYTHING BELOW THIS LINE TO RUN THIS EXAMPLE
///////////////////////////////////////////////////////////////////////////////////
public static void main(String[] args) throws IOException {
V2Grpc.V2BlockingStub stub = V2Grpc.newBlockingStub(ClarifaiChannel.INSTANCE.getGrpcChannel())
.withCallCredentials(new ClarifaiCallCredentials(PAT));
PostWorkflowResultsResponse postWorkflowResultsResponse = stub.postWorkflowResults(
PostWorkflowResultsRequest.newBuilder()
.setUserAppId(UserAppIDSet.newBuilder().setUserId(USER_ID).setAppId(APP_ID))
.setWorkflowId(WORKFLOW_ID)
.addInputs(
Input.newBuilder().setData(
Data.newBuilder().setVideo(
Video.newBuilder().setUrl(VIDEO_URL)
// Video.newBuilder().setBase64(ByteString.copyFrom(Files.readAllBytes(
// new File(VIDEO_FILE_LOCATION).toPath()
//)))
)
)
)
.setOutputConfig(OutputConfig.newBuilder()
.setSampleMs(SAMPLE_MS)
)
.build()
);
if (postWorkflowResultsResponse.getStatus().getCode() != StatusCode.SUCCESS) {
throw new RuntimeException("Post workflow results failed, status: " + postWorkflowResultsResponse.getStatus());
}
// We'll get one WorkflowResult for each input we used above. Because of one input, we have here
// one WorkflowResult
WorkflowResult results = postWorkflowResultsResponse.getResults(0);
// Uncomment this line to print the raw output
System.out.println(results);
}
}
<?php
require __DIR__ . "/vendor/autoload.php";
/////////////////////////////////////////////////////////////////////////////////////////////////////
// In this section, we set the user authentication, user and app ID, workflow ID, video input,
// and sample_ms. Change these strings to run your own example.
/////////////////////////////////////////////////////////////////////////////////////////////////////
// Your PAT (Personal Access Token) can be found in the Account's Security section
$PAT = "YOUR_PAT_HERE";
$USER_ID = "YOUR_USER_ID_HERE";
$APP_ID = "YOUR_APP_ID_HERE";
// Change these to make your own predictions
$WORKFLOW_ID = "YOUR_WORKFLOW_ID_HERE";
$VIDEO_URL = "https://samples.clarifai.com/beer.mp4";
# Or, to use a local video file, assign the location variable
# $VIDEO_FILE_LOCATION = "YOUR_VIDEO_FILE_LOCATION_HERE";
# Change this to configure the FPS rate (If it's not configured, it defaults to 1 FPS)
$SAMPLE_MS = 500;
///////////////////////////////////////////////////////////////////////////////////
// YOU DO NOT NEED TO CHANGE ANYTHING BELOW THIS LINE TO RUN THIS EXAMPLE
///////////////////////////////////////////////////////////////////////////////////
use Clarifai\ClarifaiClient;
use Clarifai\Api\Data;
use Clarifai\Api\Video;
use Clarifai\Api\Input;
use Clarifai\Api\PostWorkflowResultsRequest;
use Clarifai\Api\Status\StatusCode;
use Clarifai\Api\UserAppIDSet;
use Clarifai\Api\OutputConfig;
$client = ClarifaiClient::grpc();
$metadata = ["Authorization" => ["Key " . $PAT]];
$userDataObject = new UserAppIDSet([
"user_id" => $USER_ID,
"app_id" => $APP_ID,
]);
// To use a local text file, uncomment the following lines
// $videoData = file_get_contents($VIDEO_FILE_LOCATION);
// Let's make a RPC call to the Clarifai platform. It uses the opened gRPC client channel to communicate a
// request and then wait for the response
[$response, $status] = $client
->PostWorkflowResults(
// The request object carries the request along with the request status and other metadata related to the request itself
new PostWorkflowResultsRequest([
"user_app_id" => $userDataObject,
"workflow_id" => $WORKFLOW_ID,
"inputs" => [
new Input([
// The Input object wraps the Data object in order to meet the API specification
"data" => new Data([
// The Data object is constructed around the Video object. It offers a container that has additional independent
// metadata. In this particular use case, no other metadata is needed to be specified
"video" => new Video([
// In the Clarifai platform, a Video is defined by a special Video object
"url" => $VIDEO_URL
// "base64" => $videoData
]),
]),
]),
],
"output_config" => new OutputConfig([
"sample_ms" => $SAMPLE_MS
])
]),
$metadata
)
->wait();
// A response is returned and the first thing we do is check the status of it
// A successful response will have a status code of 0; otherwise, there is some error
if ($status->code !== 0) {
throw new Exception("Error: {$status->details}");
}
// In addition to the RPC response status, there is a Clarifai API status that reports if the operation was a success or failure
// (not just that the communication was successful)
if ($response->getStatus()->getCode() != StatusCode::SUCCESS) {
throw new Exception(
"Failure response: " .
$response->getStatus()->getDescription() .
" " .
$response->getStatus()->getDetails()
);
}
// We'll get one WorkflowResult for each input we used above. Because of one input, we have here one WorkflowResult
$results = $response->getResults()[0];
// Uncomment this line to print the raw output
print_r($results);
?>
curl -X POST "https://api.clarifai.com/v2/users/YOUR_USER_ID_HERE/apps/YOUR_APP_ID_HERE/workflows/YOUR_WORKFLOW_ID_HERE/results" \
-H "authorization: Key YOUR_PAT_HERE" \
-H "content-type: application/json" \
-d '{
"inputs": [
{
"data": {
"video": {
"url": "https://samples.clarifai.com/beer.mp4"
}
}
}
],
"output_config": {
"sample_ms": 500
}
}'
Text
Let's illustrate how you would produce embeddings and clusters from text inputs using Clarifai's Language-Understanding text workflow.
- Python
- JavaScript (REST)
- NodeJS
- Java
- PHP
- cURL
######################################################################################################
# In this section, we set the user authentication, user and app ID, workflow ID, and the text
# we want as an input. Change these strings to run your own example.
######################################################################################################
# Your PAT (Personal Access Token) can be found in the Account's Security section
PAT = "YOUR_PAT_HERE"
USER_ID = "clarifai"
APP_ID = "main"
# Change these to make your own predictions
WORKFLOW_ID = "Language-Understanding"
RAW_TEXT = "This is a test text for testing"
# To use a hosted text file, assign the URL variable
# TEXT_FILE_URL = "https://samples.clarifai.com/negative_sentence_12.txt"
# Or, to use a local text file, assign the location variable
# TEXT_FILE_LOCATION = "YOUR_TEXT_FILE_LOCATION_HERE"
############################################################################
# YOU DO NOT NEED TO CHANGE ANYTHING BELOW THIS LINE TO RUN THIS EXAMPLE
############################################################################
from clarifai_grpc.channel.clarifai_channel import ClarifaiChannel
from clarifai_grpc.grpc.api import resources_pb2, service_pb2, service_pb2_grpc
from clarifai_grpc.grpc.api.status import status_code_pb2
channel = ClarifaiChannel.get_grpc_channel()
stub = service_pb2_grpc.V2Stub(channel)
metadata = (("authorization", "Key " + PAT),)
userDataObject = resources_pb2.UserAppIDSet(user_id=USER_ID, app_id=APP_ID)
# To use a local text file, uncomment the following lines
# with open(TEXT_FILE_LOCATION, "rb") as f:
# file_bytes = f.read()
post_workflow_results_response = stub.PostWorkflowResults(
service_pb2.PostWorkflowResultsRequest(
user_app_id=userDataObject,
workflow_id=WORKFLOW_ID,
inputs=[
resources_pb2.Input(
data=resources_pb2.Data(
text=resources_pb2.Text(
raw=RAW_TEXT
# url=TEXT_FILE_URL
# raw=file_bytes
)
)
)
],
),
metadata=metadata,
)
if post_workflow_results_response.status.code != status_code_pb2.SUCCESS:
print(post_workflow_results_response.status)
raise Exception(
"Post workflow results failed, status: "
+ post_workflow_results_response.status.description
)
# We'll get one WorkflowResult for each input we used above. Because of one input, we have here one WorkflowResult
results = post_workflow_results_response.results[0]
# Each model we have in the workflow will produce one output.
for output in results.outputs:
model = output.model
print("Predicted concepts for the model `%s`" % model.id)
print(output.data)
# Uncomment this line to print the raw output
# print(results)
<!--index.html file-->
<script>
////////////////////////////////////////////////////////////////////////////////////////////////////
// In this section, we set the user authentication, user and app ID, workflow ID, and the text
// we want as an input. Change these strings to run your own example.
///////////////////////////////////////////////////////////////////////////////////////////////////
// Your PAT (Personal Access Token) can be found in the Account's Security section
const PAT = "YOUR_PAT_HERE";
const USER_ID = "clarifai";
const APP_ID = "main";
// Change these to make your own predictions
const WORKFLOW_ID = "Language-Understanding";
const RAW_TEXT = "This is a test text for testing";
// To use a hosted text file, assign the URL variable
// const TEXT_FILE_URL = "https://samples.clarifai.com/negative_sentence_12.txt";
///////////////////////////////////////////////////////////////////////////////////
// YOU DO NOT NEED TO CHANGE ANYTHING BELOW THIS LINE TO RUN THIS EXAMPLE
///////////////////////////////////////////////////////////////////////////////////
const raw = JSON.stringify({
"user_app_id": {
"user_id": USER_ID,
"app_id": APP_ID
},
"inputs": [
{
"data": {
"text": {
"raw": RAW_TEXT
// "url": TEXT_FILE_URL
}
}
}
]
});
const requestOptions = {
method: "POST",
headers: {
"Accept": "application/json",
"Authorization": "Key " + PAT
},
body: raw
};
fetch(`https://api.clarifai.com/v2/workflows/${WORKFLOW_ID}/results`, requestOptions)
.then(response => response.text())
.then(result => console.log(result))
.catch(error => console.log("error", error));
</script>
//index.js file
//////////////////////////////////////////////////////////////////////////////////////////////////////
// In this section, we set the user authentication, user and app ID, workflow ID, and the text
// we want as an input. Change these strings to run your own example.
/////////////////////////////////////////////////////////////////////////////////////////////////////
// Your PAT (Personal Access Token) can be found in the Account's Security section
const PAT = "YOUR_PAT_HERE";
const USER_ID = "clarifai";
const APP_ID = "main";
// Change these to make your own predictions
const WORKFLOW_ID = "Language-Understanding";
const RAW_TEXT = "This is a test text for testing";
// To use a hosted text file, assign the URL variable
// const TEXT_FILE_URL = "https://samples.clarifai.com/negative_sentence_12.txt"
// Or, to use a local text file, assign the location variable
// TEXT_FILE_LOCATION = "YOUR_TEXT_FILE_LOCATION_HERE"
/////////////////////////////////////////////////////////////////////////////
// YOU DO NOT NEED TO CHANGE ANYTHING BELOW THIS LINE TO RUN THIS EXAMPLE
/////////////////////////////////////////////////////////////////////////////
const { ClarifaiStub, grpc } = require("clarifai-nodejs-grpc");
const stub = ClarifaiStub.grpc();
// This will be used by every Clarifai endpoint call
const metadata = new grpc.Metadata();
metadata.set("authorization", "Key " + PAT);
// To use a local text file, uncomment the following lines
// const fs = require("fs");
// const fileBytes = fs.readFileSync(TEXT_FILE_LOCATION);
stub.PostWorkflowResults({
user_app_id: {
"user_id": USER_ID,
"app_id": APP_ID,
},
workflow_id: WORKFLOW_ID,
inputs: [{
data: {
text: {
raw: RAW_TEXT
// url: TEXT_FILE_URL,
// raw: fileBytes
}
}
}],
},
metadata,
(err, response) => {
if (err) {
throw new Error(err);
}
if (response.status.code !== 10000) {
throw new Error(
"Post workflow results failed, status: " + response.status.description
);
}
// We'll get one WorkflowResult for each input we used above. Because of one input, we have here
// one WorkflowResult
const results = response.results[0];
// Each model we have in the workflow will produce one output.
for (const output of results.outputs) {
const model = output.model;
console.log(`Predicted concepts for the model '${model.id}'`);
console.log(output.data);
}
// Uncomment this line to print the raw output
// console.log(results);
}
);
package com.clarifai.example;
import com.clarifai.channel.ClarifaiChannel;
import com.clarifai.credentials.ClarifaiCallCredentials;
import com.clarifai.grpc.api.*;
import com.clarifai.grpc.api.status.StatusCode;
import com.google.protobuf.ByteString;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
public class ClarifaiExample {
/////////////////////////////////////////////////////////////////////////////////////////////////////
// In this section, we set the user authentication, user and app ID, workflow ID, and the text
// we want as an input. Change these strings to run your own example.
/////////////////////////////////////////////////////////////////////////////////////////////////////
//Your PAT (Personal Access Token) can be found in the portal under Authentication
static final String PAT = "YOUR_PAT_HERE";
static final String USER_ID = "clarifai";
static final String APP_ID = "main";
// Change these to make your own predictions
static final String WORKFLOW_ID = "Language-Understanding";
static final String RAW_TEXT = "This is a test text for testing";
// To use a hosted text file, assign the URL variable
// static final String TEXT_FILE_URL = "https://samples.clarifai.com/negative_sentence_12.txt";
// Or, to use a local text file, assign the location variable
// static final String TEXT_FILE_LOCATION = "YOUR_TEXT_FILE_LOCATION_HERE";
///////////////////////////////////////////////////////////////////////////////////
// YOU DO NOT NEED TO CHANGE ANYTHING BELOW THIS LINE TO RUN THIS EXAMPLE
///////////////////////////////////////////////////////////////////////////////////
public static void main(String[] args) throws IOException {
V2Grpc.V2BlockingStub stub = V2Grpc.newBlockingStub(ClarifaiChannel.INSTANCE.getGrpcChannel())
.withCallCredentials(new ClarifaiCallCredentials(PAT));
PostWorkflowResultsResponse postWorkflowResultsResponse = stub.postWorkflowResults(
PostWorkflowResultsRequest.newBuilder()
.setUserAppId(UserAppIDSet.newBuilder().setUserId(USER_ID).setAppId(APP_ID))
.setWorkflowId(WORKFLOW_ID)
.addInputs(
Input.newBuilder().setData(
Data.newBuilder().setText(
Text.newBuilder().setRaw(RAW_TEXT)
// Text.newBuilder().setUrl(TEXT_FILE_URL)
// Text.newBuilder().setRawBytes(ByteString.copyFrom(Files.readAllBytes(
// new File(TEXT_FILE_LOCATION).toPath()
//)))
)
)
)
.build()
);
if (postWorkflowResultsResponse.getStatus().getCode() != StatusCode.SUCCESS) {
throw new RuntimeException("Post workflow results failed, status: " + postWorkflowResultsResponse.getStatus());
}
// We'll get one WorkflowResult for each input we used above. Because of one input, we have here
// one WorkflowResult
WorkflowResult results = postWorkflowResultsResponse.getResults(0);
// Each model we have in the workflow will produce its output
for (Output output : results.getOutputsList()) {
Model model = output.getModel();
System.out.println("Predicted concepts for the model '" + model.getId() + "'");
System.out.println(output.getData());
}
// Uncomment this line to print the raw output
// System.out.println(results);
}
}
<?php
require __DIR__ . "/vendor/autoload.php";
/////////////////////////////////////////////////////////////////////////////////////////////////////
// In this section, we set the user authentication, user and app ID, workflow ID, and the text
// we want as an input. Change these strings to run your own example.
/////////////////////////////////////////////////////////////////////////////////////////////////////
// Your PAT (Personal Access Token) can be found in the Account's Security section
$PAT = "YOUR_PAT_HERE";
$USER_ID = "clarifai";
$APP_ID = "main";
// Change these to make your own predictions
$WORKFLOW_ID = "Language-Understanding";
$RAW_TEXT = "This is a test text for testing";
// To use a hosted text file, assign the URL variable
// $TEXT_FILE_URL = "https://samples.clarifai.com/negative_sentence_12.txt";
// Or, to use a local text file, assign the location variable
// $TEXT_FILE_LOCATION = "YOUR_TEXT_FILE_LOCATION_HERE";
///////////////////////////////////////////////////////////////////////////////////
// YOU DO NOT NEED TO CHANGE ANYTHING BELOW THIS LINE TO RUN THIS EXAMPLE
///////////////////////////////////////////////////////////////////////////////////
use Clarifai\ClarifaiClient;
use Clarifai\Api\Data;
use Clarifai\Api\Text;
use Clarifai\Api\Input;
use Clarifai\Api\PostWorkflowResultsRequest;
use Clarifai\Api\Status\StatusCode;
use Clarifai\Api\UserAppIDSet;
$client = ClarifaiClient::grpc();
$metadata = ["Authorization" => ["Key " . $PAT]];
$userDataObject = new UserAppIDSet([
"user_id" => $USER_ID,
"app_id" => $APP_ID,
]);
// To use a local text file, uncomment the following lines
// $textData = file_get_contents($TEXT_FILE_LOCATION);
// Let's make a RPC call to the Clarifai platform. It uses the opened gRPC client channel to communicate a
// request and then wait for the response
[$response, $status] = $client
->PostWorkflowResults(
// The request object carries the request along with the request status and other metadata related to the request itself
new PostWorkflowResultsRequest([
"user_app_id" => $userDataObject,
"workflow_id" => $WORKFLOW_ID,
"inputs" => [
new Input([
// The Input object wraps the Data object in order to meet the API specification
"data" => new Data([
// The Data object is constructed around the Image object. It offers a container that has additional image independent
// metadata. In this particular use case, no other metadata is needed to be specified
"text" => new Text([
// In the Clarifai platform, a text is defined by a special Text object
"raw" => $RAW_TEXT
// "url" => $TEXT_FILE_URL
// "raw" => $textData
]),
]),
]),
],
]),
$metadata
)
->wait();
// A response is returned and the first thing we do is check the status of it
// A successful response will have a status code of 0; otherwise, there is some error
if ($status->code !== 0) {
throw new Exception("Error: {$status->details}");
}
// In addition to the RPC response status, there is a Clarifai API status that reports if the operation was a success or failure
// (not just that the communication was successful)
if ($response->getStatus()->getCode() != StatusCode::SUCCESS) {
throw new Exception(
"Failure response: " .
$response->getStatus()->getDescription() .
" " .
$response->getStatus()->getDetails()
);
}
// We'll get one WorkflowResult for each input we used above. Because of one input, we have here one WorkflowResult
$results = $response->getResults()[0];
// Each model we have in the workflow will produce one output
foreach ($results->getOutputs() as $output) {
$model = $output->getModel();
echo "Predicted concepts for the model '{$model->getId()}'" . "\n";
$convertDataToJSONString = $output->getData()->serializeToJsonString();
echo $convertDataToJSONString . "\n";
}
// Uncomment this line to print the raw output
// print_r($results);
?>
curl -X POST "https://api.clarifai.com/v2/users/clarifai/apps/main/workflows/Language-Understanding/results" \
-H "authorization: Key YOUR_PAT_HERE" \
-H "content-type: application/json" \
-d '{
"inputs": [
{
"data": {
"text": {
"raw": "This is a test text for testing"
}
}
}
]
}'
Text Output Example
Predicted concepts for the model `multilingual-text-embedding`
embeddings {
vector: 0.0255603846
vector: 0.0256410129
vector: 0.0107929539
vector: 0.030718796
vector: 0.0150386961
vector: -0.0226166341
vector: -0.0263089519
vector: 0.00326520158
vector: 0.0102104917
vector: -0.044386141
vector: -0.0195556302
vector: 0.00893216766
vector: 0.015003683
vector: -0.0130465208
vector: -0.0104514267
vector: 0.0217112433
vector: -0.0362542719
vector: -0.00232617464
vector: 0.0566842183
vector: -0.0483675748
vector: -0.010495048
vector: 0.0236451961
vector: -0.0139718978
vector: -0.0204272885
vector: 0.00862451177
vector: 0.0366721936
vector: 0.0400005206
vector: -0.0113559328
vector: -0.0424929187
vector: -0.0034513874
vector: -0.0322748311
vector: -0.00985202659
vector: -0.012448323
vector: -0.0394972041
vector: 0.0184240248
vector: 0.000438908
vector: -0.0020233281
vector: 0.0129011944
vector: 0.0250889361
vector: -0.0152506949
vector: 0.0270063598
vector: -0.0264613442
vector: 0.0258420501
vector: -0.0289224759
vector: -0.0264949258
vector: 0.0334232263
vector: -0.0338085629
vector: 0.0334763639
vector: -0.03762725
vector: 0.00782276411
vector: -0.00409373501
vector: 0.0120968325
vector: -0.0106019555
vector: -0.0269890483
vector: -0.072263509
vector: -0.012660739
vector: 0.0308089778
vector: 0.0298142675
vector: -0.0146902641
vector: 0.0218613446
vector: 0.0323576666
vector: -0.0436149947
vector: -0.0174307581
vector: 0.084372744
vector: 0.0181334354
vector: -0.0199910812
vector: 0.0123974094
vector: -0.0193333048
vector: 0.0339736864
vector: -0.0253562946
vector: 0.00288021402
vector: 0.0169731714
vector: -0.0200157464
vector: -0.0637308881
vector: 7.40556279e-005
vector: 0.00873292703
vector: -0.0170422606
vector: 0.0415907614
vector: -0.00372848427
vector: 0.0341904387
vector: 0.0267014429
vector: -0.0209374689
vector: -0.0327630155
vector: -0.0457439
vector: 0.00402727
vector: -0.00840501208
vector: 0.0180109739
vector: -0.00936154835
vector: -0.0110226534
vector: -0.00436874479
vector: 0.0163043
vector: 0.0262424368
vector: 0.0101943593
vector: -0.0400694683
vector: -0.00884656888
vector: -0.0427878574
vector: -0.0641586557
vector: -0.0212010648
vector: -0.00159631309
vector: 0.0310680382
vector: -0.0647975504
vector: -0.023672644
vector: 0.0459937714
vector: -0.00774210179
vector: 0.000529117067
vector: -0.0292916577
vector: -0.0145822288
vector: 0.00338335894
vector: -0.0156141864
vector: -0.00935915578
vector: 0.0299793016
vector: -0.00355648622
vector: 0.0202946737
vector: 0.0302724876
vector: 0.00297537982
vector: 0.0380662605
vector: 0.0350826
vector: 0.0141671756
vector: 0.0307802558
vector: 0.0251820423
vector: -0.04314005
vector: 0.0967362
vector: 0.0179795
vector: -0.0144064706
vector: 0.0614442118
vector: 0.0418301858
vector: 0.0298902467
vector: -0.00762633048
vector: 0.00442519924
vector: 0.000885691727
vector: -0.0406515226
vector: -0.0188011918
vector: 0.0137273492
vector: 0.00622383412
vector: 0.0335491821
vector: 0.00737484824
vector: 0.0139906872
vector: 0.0109717203
vector: 0.010755497
vector: 0.0112457387
vector: -0.00598319899
vector: 0.0259225015
vector: -0.000439705298
vector: -0.017860774
vector: -0.0371661447
vector: -0.0182125829
vector: -0.000586743816
vector: 0.0030561774
vector: 0.00668949727
vector: -0.0123718204
vector: -0.0365500264
vector: 0.0134482831
vector: -0.0129413838
vector: -0.00557328854
vector: 0.0504806265
vector: 0.0707531124
vector: 0.0188564844
vector: 0.0097397631
vector: -0.0409301817
vector: 0.00957701914
vector: -0.0113784261
vector: 0.0362381637
vector: 0.00238611782
vector: -0.0190066174
vector: -0.0160514135
vector: -0.0437530763
vector: -0.00567471469
vector: -0.0242700893
vector: 0.0125929378
vector: -0.00250009913
vector: -0.0128744598
vector: -0.0602364838
vector: -0.0363118686
vector: 0.0310818329
vector: -0.0280554648
vector: 0.00686237263
vector: 0.051632829
vector: -0.0259241946
vector: 0.000656295859
vector: -0.0104174372
vector: 0.0143860672
vector: -0.0219465848
vector: -0.0635990351
vector: 0.00397105515
vector: -0.0171896238
vector: -0.000487698242
vector: 0.0557192415
vector: 0.0290173776
vector: -0.0216272268
vector: 0.0197027903
vector: 0.00293333107
vector: -0.0256508272
vector: 0.0145954657
vector: 0.0147802
vector: 0.0199410208
vector: 0.0362814479
vector: 0.0114512853
vector: 0.0516074747
vector: -0.00228457758
vector: -0.0247946437
vector: 0.0531231686
vector: 0.0412405729
vector: -0.00312332762
vector: -0.0384900719
vector: 0.0265060011
vector: 0.0188652594
vector: 0.0254785381
vector: -0.0120410575
vector: -0.055106923
vector: -0.041226916
vector: -0.0110674649
vector: -0.00518939504
vector: 0.0258076955
vector: -4.47058883e-005
vector: 0.00584268896
vector: 0.0291903764
vector: 0.0544795282
vector: -0.0141743645
vector: 0.0383740328
vector: -0.0609933324
vector: 0.0196725428
vector: -0.0131683592
vector: 0.0928284079
vector: 0.0304285903
vector: -0.0431602523
vector: -0.000314288016
vector: 0.0481073223
vector: -0.03109896
vector: 0.0305516273
vector: -0.0531298704
vector: -0.0364271179
vector: 0.0249502454
vector: -0.035180334
vector: -0.00412273454
vector: 0.0286418777
vector: 0.00197291095
vector: -0.0143354721
vector: 0.0143140703
vector: 0.044354897
vector: 0.0567986146
vector: 0.0701035857
vector: 0.010885776
vector: -0.00677968934
vector: -0.0355549529
vector: 0.0214009341
vector: -0.0396741442
vector: 0.0010890587
vector: 0.0230288804
vector: 0.0160423983
vector: 0.00770209916
vector: -0.00134117063
vector: 0.00584157603
vector: -0.0506436527
vector: 0.0167286471
vector: -0.0331318229
vector: 0.0315938741
vector: -0.0301273353
vector: 0.00558987679
vector: 0.00451010419
vector: -0.0279915575
vector: -0.0209878609
vector: -0.0199963469
vector: -0.000246938725
vector: -0.0405185111
vector: 0.00242121704
vector: 0.00407472113
vector: -0.00968741067
vector: -0.038693171
vector: -0.0187584516
vector: 0.0341708027
vector: -0.0132718869
vector: 0.00288945087
vector: 0.0163044799
vector: -0.0546093583
vector: -0.0190316942
vector: 0.0211417172
vector: 0.0336544812
vector: -0.00693607656
vector: 0.0127230892
vector: 0.0135776838
vector: -0.0617968142
vector: -0.00195178052
vector: 0.0471284464
vector: -0.0395634659
vector: 0.0295374077
vector: -0.0362583138
vector: 0.0599530078
vector: -0.0492380969
vector: 0.0120025882
vector: -0.0152681777
vector: 0.00973533373
vector: 0.0677181706
vector: 0.0222762376
vector: -0.0271166284
vector: 0.0360873379
vector: -0.0109270047
vector: 0.00856078602
vector: 0.0292164441
vector: -0.0243707
vector: -0.0110178702
vector: 0.0125099961
vector: -0.000995316426
vector: 0.00649002707
vector: -0.0221561305
vector: 0.0468515716
vector: 0.00950729568
vector: -0.0216504335
vector: -0.0135809742
vector: -0.587396502
vector: -0.0500078537
vector: 0.0219854936
vector: -0.01205255
vector: 0.0175891258
vector: -0.0113249039
vector: 0.0286584757
vector: 0.00650032377
vector: -0.00326811569
vector: 0.022440603
vector: -0.0182182249
vector: 0.0761179253
vector: -0.0167741235
vector: 0.0361539535
vector: -0.0113675604
vector: -0.00895621907
vector: -0.0258418024
vector: 0.0181844737
vector: 0.0115667935
vector: 0.0318501815
vector: 0.00093767792
vector: 0.00287587265
vector: 0.0216217488
vector: 0.0230962206
vector: -0.0256820396
vector: 0.0218300279
vector: -0.0249342
vector: 0.0561292693
vector: -0.0231751911
vector: 0.016221609
vector: 0.00636604
vector: -0.0150087075
vector: -0.00729617896
vector: 0.031151155
vector: -0.0164434742
vector: -0.00316140847
vector: 0.0480747186
vector: -0.00684076874
vector: -0.0348987654
vector: 0.0300664771
vector: -0.0080304658
vector: -0.00318912556
vector: 0.0232989155
vector: -0.0248846486
vector: -0.074139826
vector: -0.017566992
vector: 0.00341211492
vector: -0.00683950912
vector: 0.0451601073
vector: -0.0306811985
vector: -0.0506822355
vector: 0.0148111014
vector: -0.0168116689
vector: 0.0245187134
vector: -0.0385940857
vector: -0.00666388543
vector: 0.00905002933
vector: 0.0306002442
vector: 0.0104561793
vector: -0.0616600476
vector: -0.000910751347
vector: -0.0383770578
vector: -0.0319519192
vector: -0.0191159304
vector: 0.00798001699
vector: 0.0162921902
vector: -0.00722851697
vector: -0.00984974951
vector: 0.00443824846
vector: 0.0458599813
vector: 0.00428895839
vector: -0.016468009
vector: 0.00984302443
vector: -0.0848148167
vector: 0.0138867963
vector: -0.0347199
vector: -0.012926463
vector: -0.0195842963
vector: 0.00821305159
vector: 0.0116449213
vector: -0.0596969128
vector: 0.0139209842
vector: 0.0124949655
vector: 0.0564839914
vector: 0.073116228
vector: 0.0054912949
vector: 0.008373552
vector: 0.049428314
vector: 0.00387572078
vector: 0.00328427833
vector: 0.0574303158
vector: -0.0401720814
vector: 0.00543978252
vector: 0.0125507237
vector: -0.0223911144
vector: 0.0530512854
vector: -0.0193119962
vector: -0.056736242
vector: 0.028417591
vector: 0.0039758184
vector: 0.0603403524
vector: 0.00940856431
vector: -0.00775589608
vector: -0.00202059839
vector: -0.0325067
vector: 0.00763982581
vector: -0.0420214422
vector: -0.0248172414
vector: 0.0121375453
vector: -0.0279675424
vector: -0.0139899682
vector: 0.0103907259
vector: 0.00846104417
vector: 0.000477065099
vector: -0.0172488894
vector: 0.0307512395
vector: -0.0159618128
vector: -0.0448177345
vector: -0.0378222652
vector: -0.00430452963
vector: -0.0221919119
vector: -0.0052908631
vector: -0.0408164859
vector: -0.0093395263
vector: -0.0526061207
vector: -0.0033937979
vector: -0.0111951698
vector: -0.018287722
vector: 0.0332923383
vector: 0.0202295
vector: -0.0026297241
vector: -0.0169538446
vector: -0.000453287736
vector: 0.0429562218
vector: -0.000680702738
vector: 0.0273506679
vector: -0.0683761761
vector: 0.0667696
vector: 0.0106630186
vector: -0.0298528746
vector: -0.000488598191
vector: -0.0440973416
vector: 0.0534471236
vector: -0.0181944631
vector: -0.00166536344
vector: -0.0655773953
vector: 0.0274203978
vector: 0.00751716364
vector: 0.012719023
vector: -0.0299365614
vector: 0.0668713
vector: 0.0176321361
vector: 0.00121316453
vector: 0.0211630538
vector: -0.0457814448
vector: 0.0290366914
vector: 0.0472914241
vector: 1.481e-005
vector: -0.0201600771
vector: 0.0741583481
vector: -0.0177378468
vector: -0.018208934
vector: -0.0424425118
vector: -0.0322192535
vector: 0.0212140493
vector: -0.0333384909
vector: -0.0089503089
vector: 0.00865053944
vector: -0.0114420308
vector: -0.0323484875
vector: -0.0319988914
vector: -0.0404753834
vector: -0.0135618644
vector: 0.0243302248
vector: -0.00248846621
vector: -0.00764315343
vector: 0.0407035202
vector: 0.0072440342
vector: -0.00446285773
vector: -0.00628219312
vector: 0.00204555667
vector: -0.0171564016
vector: 0.00325665134
vector: -0.0169214662
vector: -0.00940683484
vector: 0.0141208908
vector: 0.00711128581
vector: -0.0218882859
vector: -0.0365912281
vector: -0.0138345128
vector: 0.028417252
vector: -0.0632902831
vector: 0.0563294031
vector: -0.0139016444
vector: 0.0173191428
vector: 0.0109188249
vector: -0.00881743524
vector: 0.0335570648
vector: 0.0302902944
vector: 0.00306223263
vector: 0.0300811697
vector: 0.0165142342
vector: -0.0220398791
vector: -0.0190337524
vector: -0.0218247
vector: -0.00892979838
vector: 0.00296077109
vector: -0.00667882059
vector: 0.0369906649
vector: 0.00189560978
vector: 0.0496911258
vector: -0.0371349975
vector: -0.0232151151
vector: 0.0153016197
vector: -0.00322092674
vector: -0.0189977195
vector: -0.0241388399
vector: -0.0209848098
vector: -0.00240087532
vector: -0.0097488109
vector: 0.0277912915
vector: 0.0129486732
vector: -0.0418183915
vector: -0.0172175094
vector: 0.014063701
vector: 0.0179245677
vector: 0.0329503492
vector: -0.0451413542
vector: 0.0375629142
vector: -0.00336613436
vector: 0.0837872624
vector: 0.0155244442
vector: -0.0319327973
vector: 0.0177341402
vector: -0.0694914684
vector: -0.0230065882
vector: -0.0349770822
vector: -0.000637284247
vector: -0.0171240233
vector: -0.000385974447
vector: -0.0154445628
vector: -0.00813626312
vector: -0.0018971978
vector: -0.0101503106
vector: -0.0577919446
vector: 0.00862083118
vector: 0.0169755798
vector: -0.0585522167
vector: -0.0303873625
vector: 0.0278049447
vector: 0.000132163666
vector: 0.0184809174
vector: 0.0111429971
vector: 0.0186991747
vector: 0.0336764
vector: 0.00854624715
vector: 0.0118956529
vector: 5.09967458e-006
vector: -0.0374510773
vector: 0.0125189032
vector: 0.0146504
vector: 0.0372200981
vector: -0.0323317759
vector: 0.0184809044
vector: -0.0637908429
vector: 0.00348871434
vector: 0.0116535509
vector: -0.0170193538
vector: -0.00596837653
vector: 0.00763026299
vector: -0.0396812037
vector: 0.00393839693
vector: 0.0243215691
vector: 0.0736935437
vector: 0.0649354607
vector: -0.0693660229
vector: -0.0173935127
vector: 0.0645925924
vector: 0.0218239073
vector: 0.0443394184
vector: 0.00201784237
vector: 0.000589759089
vector: 0.0140056768
vector: 0.0208582152
vector: -0.0178501364
vector: 0.0198773723
vector: 0.0119620683
vector: -0.00607993035
vector: 0.0137936343
vector: 0.015486001
vector: 0.0217939913
vector: 0.000516918313
vector: -0.0155286901
vector: -0.0236418
vector: 0.00602063863
vector: -0.0117947338
vector: -0.0382487215
vector: -0.0253913198
vector: 0.0540902093
vector: -0.00069479784
vector: -0.0030842768
vector: 0.00678593758
vector: -0.0158268567
vector: -0.0301091801
vector: -0.047721222
vector: 0.0486889854
vector: -0.0031820375
vector: -0.00780140189
vector: -0.0473188572
vector: -0.0377694
vector: 0.00300174044
vector: 0.00725157047
vector: 0.0334912054
vector: -0.0190392211
vector: -0.00786152
vector: -0.00393901
vector: 0.0346906558
vector: -0.0048984047
vector: -0.0289619621
vector: -0.0132993627
vector: -0.0260517057
vector: -0.0194860194
vector: -0.00903460104
vector: -0.0720684156
vector: -0.0326718502
vector: -0.0178757478
vector: 0.0116493832
vector: -0.0249003451
vector: -0.012765849
vector: -0.0367143154
vector: 0.0253556
vector: -0.000750295934
vector: 0.00800141
vector: -0.0399938263
vector: -0.0156591497
vector: 0.00355092739
vector: 0.00527952937
vector: -0.00371489814
vector: 0.00979584455
vector: 0.0128817623
vector: -0.0257944819
vector: 0.00532698631
vector: -0.0054745716
vector: -0.0402425155
vector: 0.0023749
vector: -0.0191466119
vector: -0.000185457975
vector: 0.0123729743
vector: -0.0285796933
vector: 0.027248418
vector: -0.00682593649
vector: -0.027824128
vector: -0.0483085215
vector: 0.0509723
vector: 0.0150561249
vector: -0.0671003759
vector: -0.0302604195
vector: 0.0335228
vector: -0.00811030716
vector: -0.0476540774
vector: -0.0483691692
vector: 0.0257905629
vector: 0.0216702223
vector: 0.0756625161
vector: -0.0114407977
vector: 0.0250642728
vector: 0.0492128
vector: 0.0104585467
vector: 0.0354731493
vector: 0.0342974328
vector: -0.0315436497
vector: -0.0416372307
vector: 0.0251945127
vector: 0.0550534874
vector: -0.00769103458
vector: -0.0269156434
vector: 0.0629401803
vector: 0.00849406514
vector: 0.0277529527
vector: 0.00905152876
vector: -0.0214481559
vector: -0.0336390026
vector: 0.0172805414
vector: -0.0254873466
vector: 0.00181326456
vector: -0.00262851967
vector: 0.00212677
vector: 0.021096956
vector: -0.0310921967
vector: 0.0090319775
vector: -0.0244406015
vector: -0.00450366875
vector: 0.0024087145
vector: -0.021715302
vector: -0.0187266618
vector: 0.00639256975
vector: 0.00592296245
vector: -0.0126604829
vector: -0.00361264497
vector: 0.018905757
vector: 0.0180640183
vector: -0.0142810205
vector: 0.0201476328
vector: -0.0371763296
vector: -0.016901549
vector: 0.0173841435
vector: 0.0144714024
vector: 0.0371279158
vector: -0.0259519368
vector: -0.0135354148
vector: 0.0480304547
vector: 0.0241338
vector: 0.00995781645
vector: -0.0145506114
vector: -0.0132244434
vector: 0.0104087936
vector: -0.0019786309
vector: -0.010059624
vector: 0.0418220572
vector: 0.00906385668
vector: -0.0608912781
vector: 0.0269323979
vector: 0.0267148074
vector: 0.0114678359
vector: -0.00542484596
vector: 0.0605565161
vector: 0.00729974685
vector: 0.0284503475
vector: 0.000257256615
vector: -0.029183466
vector: 0.029569421
vector: 0.000915852608
vector: 0.0109864781
vector: 0.0160177927
vector: -0.0311278421
vector: -0.00568914367
vector: -0.045658119
vector: -0.0380534567
vector: 0.00769931311
vector: -0.0169286355
vector: -0.0414473452
vector: 0.0346236937
vector: 0.00468268059
vector: -0.000171230145
vector: -0.0555126853
vector: -0.0213408619
vector: -0.00562082976
vector: -0.042139709
vector: 0.00932554808
vector: 0.0544001274
vector: 0.0186972376
vector: 0.0127014797
vector: 0.0039651487
vector: 0.0317394622
vector: 0.00775914779
vector: -0.0223883521
vector: -0.0120635182
vector: 0.0196055751
vector: 0.0291586574
vector: 0.0337944
num_dimensions: 768
}
Predicted concepts for the model `multilingual-text-clustering`
clusters {
id: "9_17"
projection: 0.210836634
projection: -0.248410583
}
Raw Output Example
status {
code: SUCCESS
description: "Ok"
}
input {
id: "f58574ce6a304c39ba298ecb4f4eef5e"
data {
text {
raw: "This is a test text for testing"
url: "https://samples.clarifai.com/placeholder.gif"
}
}
}
outputs {
id: "0ff24540c786470e912984865c03efc0"
status {
code: SUCCESS
description: "Ok"
}
created_at {
seconds: 1700723375
nanos: 640229252
}
model {
id: "multilingual-text-embedding"
name: "multilingual-text-embedding"
created_at {
seconds: 1581694729
nanos: 522174000
}
modified_at {
seconds: 1655211303
nanos: 454205000
}
app_id: "main"
model_version {
id: "9b33adf15280465b857163ddaaacdcb1"
created_at {
seconds: 1606747915
nanos: 848030000
}
status {
code: MODEL_TRAINED
description: "Model is trained and ready"
}
visibility {
gettable: PUBLIC
}
app_id: "main"
user_id: "clarifai"
metadata {
}
}
user_id: "clarifai"
model_type_id: "text-embedder"
visibility {
gettable: PUBLIC
}
workflow_recommended {
}
}
data {
embeddings {
vector: 0.0255603846
vector: 0.0256410129
vector: 0.0107929539
vector: 0.030718796
vector: 0.0150386961
vector: -0.0226166341
vector: -0.0263089519
vector: 0.00326520158
vector: 0.0102104917
vector: -0.044386141
vector: -0.0195556302
vector: 0.00893216766
vector: 0.015003683
vector: -0.0130465208
vector: -0.0104514267
vector: 0.0217112433
vector: -0.0362542719
vector: -0.00232617464
vector: 0.0566842183
vector: -0.0483675748
vector: -0.010495048
vector: 0.0236451961
vector: -0.0139718978
vector: -0.0204272885
vector: 0.00862451177
vector: 0.0366721936
vector: 0.0400005206
vector: -0.0113559328
vector: -0.0424929187
vector: -0.0034513874
vector: -0.0322748311
vector: -0.00985202659
vector: -0.012448323
vector: -0.0394972041
vector: 0.0184240248
vector: 0.000438908
vector: -0.0020233281
vector: 0.0129011944
vector: 0.0250889361
vector: -0.0152506949
vector: 0.0270063598
vector: -0.0264613442
vector: 0.0258420501
vector: -0.0289224759
vector: -0.0264949258
vector: 0.0334232263
vector: -0.0338085629
vector: 0.0334763639
vector: -0.03762725
vector: 0.00782276411
vector: -0.00409373501
vector: 0.0120968325
vector: -0.0106019555
vector: -0.0269890483
vector: -0.072263509
vector: -0.012660739
vector: 0.0308089778
vector: 0.0298142675
vector: -0.0146902641
vector: 0.0218613446
vector: 0.0323576666
vector: -0.0436149947
vector: -0.0174307581
vector: 0.084372744
vector: 0.0181334354
vector: -0.0199910812
vector: 0.0123974094
vector: -0.0193333048
vector: 0.0339736864
vector: -0.0253562946
vector: 0.00288021402
vector: 0.0169731714
vector: -0.0200157464
vector: -0.0637308881
vector: 7.40556279e-005
vector: 0.00873292703
vector: -0.0170422606
vector: 0.0415907614
vector: -0.00372848427
vector: 0.0341904387
vector: 0.0267014429
vector: -0.0209374689
vector: -0.0327630155
vector: -0.0457439
vector: 0.00402727
vector: -0.00840501208
vector: 0.0180109739
vector: -0.00936154835
vector: -0.0110226534
vector: -0.00436874479
vector: 0.0163043
vector: 0.0262424368
vector: 0.0101943593
vector: -0.0400694683
vector: -0.00884656888
vector: -0.0427878574
vector: -0.0641586557
vector: -0.0212010648
vector: -0.00159631309
vector: 0.0310680382
vector: -0.0647975504
vector: -0.023672644
vector: 0.0459937714
vector: -0.00774210179
vector: 0.000529117067
vector: -0.0292916577
vector: -0.0145822288
vector: 0.00338335894
vector: -0.0156141864
vector: -0.00935915578
vector: 0.0299793016
vector: -0.00355648622
vector: 0.0202946737
vector: 0.0302724876
vector: 0.00297537982
vector: 0.0380662605
vector: 0.0350826
vector: 0.0141671756
vector: 0.0307802558
vector: 0.0251820423
vector: -0.04314005
vector: 0.0967362
vector: 0.0179795
vector: -0.0144064706
vector: 0.0614442118
vector: 0.0418301858
vector: 0.0298902467
vector: -0.00762633048
vector: 0.00442519924
vector: 0.000885691727
vector: -0.0406515226
vector: -0.0188011918
vector: 0.0137273492
vector: 0.00622383412
vector: 0.0335491821
vector: 0.00737484824
vector: 0.0139906872
vector: 0.0109717203
vector: 0.010755497
vector: 0.0112457387
vector: -0.00598319899
vector: 0.0259225015
vector: -0.000439705298
vector: -0.017860774
vector: -0.0371661447
vector: -0.0182125829
vector: -0.000586743816
vector: 0.0030561774
vector: 0.00668949727
vector: -0.0123718204
vector: -0.0365500264
vector: 0.0134482831
vector: -0.0129413838
vector: -0.00557328854
vector: 0.0504806265
vector: 0.0707531124
vector: 0.0188564844
vector: 0.0097397631
vector: -0.0409301817
vector: 0.00957701914
vector: -0.0113784261
vector: 0.0362381637
vector: 0.00238611782
vector: -0.0190066174
vector: -0.0160514135
vector: -0.0437530763
vector: -0.00567471469
vector: -0.0242700893
vector: 0.0125929378
vector: -0.00250009913
vector: -0.0128744598
vector: -0.0602364838
vector: -0.0363118686
vector: 0.0310818329
vector: -0.0280554648
vector: 0.00686237263
vector: 0.051632829
vector: -0.0259241946
vector: 0.000656295859
vector: -0.0104174372
vector: 0.0143860672
vector: -0.0219465848
vector: -0.0635990351
vector: 0.00397105515
vector: -0.0171896238
vector: -0.000487698242
vector: 0.0557192415
vector: 0.0290173776
vector: -0.0216272268
vector: 0.0197027903
vector: 0.00293333107
vector: -0.0256508272
vector: 0.0145954657
vector: 0.0147802
vector: 0.0199410208
vector: 0.0362814479
vector: 0.0114512853
vector: 0.0516074747
vector: -0.00228457758
vector: -0.0247946437
vector: 0.0531231686
vector: 0.0412405729
vector: -0.00312332762
vector: -0.0384900719
vector: 0.0265060011
vector: 0.0188652594
vector: 0.0254785381
vector: -0.0120410575
vector: -0.055106923
vector: -0.041226916
vector: -0.0110674649
vector: -0.00518939504
vector: 0.0258076955
vector: -4.47058883e-005
vector: 0.00584268896
vector: 0.0291903764
vector: 0.0544795282
vector: -0.0141743645
vector: 0.0383740328
vector: -0.0609933324
vector: 0.0196725428
vector: -0.0131683592
vector: 0.0928284079
vector: 0.0304285903
vector: -0.0431602523
vector: -0.000314288016
vector: 0.0481073223
vector: -0.03109896
vector: 0.0305516273
vector: -0.0531298704
vector: -0.0364271179
vector: 0.0249502454
vector: -0.035180334
vector: -0.00412273454
vector: 0.0286418777
vector: 0.00197291095
vector: -0.0143354721
vector: 0.0143140703
vector: 0.044354897
vector: 0.0567986146
vector: 0.0701035857
vector: 0.010885776
vector: -0.00677968934
vector: -0.0355549529
vector: 0.0214009341
vector: -0.0396741442
vector: 0.0010890587
vector: 0.0230288804
vector: 0.0160423983
vector: 0.00770209916
vector: -0.00134117063
vector: 0.00584157603
vector: -0.0506436527
vector: 0.0167286471
vector: -0.0331318229
vector: 0.0315938741
vector: -0.0301273353
vector: 0.00558987679
vector: 0.00451010419
vector: -0.0279915575
vector: -0.0209878609
vector: -0.0199963469
vector: -0.000246938725
vector: -0.0405185111
vector: 0.00242121704
vector: 0.00407472113
vector: -0.00968741067
vector: -0.038693171
vector: -0.0187584516
vector: 0.0341708027
vector: -0.0132718869
vector: 0.00288945087
vector: 0.0163044799
vector: -0.0546093583
vector: -0.0190316942
vector: 0.0211417172
vector: 0.0336544812
vector: -0.00693607656
vector: 0.0127230892
vector: 0.0135776838
vector: -0.0617968142
vector: -0.00195178052
vector: 0.0471284464
vector: -0.0395634659
vector: 0.0295374077
vector: -0.0362583138
vector: 0.0599530078
vector: -0.0492380969
vector: 0.0120025882
vector: -0.0152681777
vector: 0.00973533373
vector: 0.0677181706
vector: 0.0222762376
vector: -0.0271166284
vector: 0.0360873379
vector: -0.0109270047
vector: 0.00856078602
vector: 0.0292164441
vector: -0.0243707
vector: -0.0110178702
vector: 0.0125099961
vector: -0.000995316426
vector: 0.00649002707
vector: -0.0221561305
vector: 0.0468515716
vector: 0.00950729568
vector: -0.0216504335
vector: -0.0135809742
vector: -0.587396502
vector: -0.0500078537
vector: 0.0219854936
vector: -0.01205255
vector: 0.0175891258
vector: -0.0113249039
vector: 0.0286584757
vector: 0.00650032377
vector: -0.00326811569
vector: 0.022440603
vector: -0.0182182249
vector: 0.0761179253
vector: -0.0167741235
vector: 0.0361539535
vector: -0.0113675604
vector: -0.00895621907
vector: -0.0258418024
vector: 0.0181844737
vector: 0.0115667935
vector: 0.0318501815
vector: 0.00093767792
vector: 0.00287587265
vector: 0.0216217488
vector: 0.0230962206
vector: -0.0256820396
vector: 0.0218300279
vector: -0.0249342
vector: 0.0561292693
vector: -0.0231751911
vector: 0.016221609
vector: 0.00636604
vector: -0.0150087075
vector: -0.00729617896
vector: 0.031151155
vector: -0.0164434742
vector: -0.00316140847
vector: 0.0480747186
vector: -0.00684076874
vector: -0.0348987654
vector: 0.0300664771
vector: -0.0080304658
vector: -0.00318912556
vector: 0.0232989155
vector: -0.0248846486
vector: -0.074139826
vector: -0.017566992
vector: 0.00341211492
vector: -0.00683950912
vector: 0.0451601073
vector: -0.0306811985
vector: -0.0506822355
vector: 0.0148111014
vector: -0.0168116689
vector: 0.0245187134
vector: -0.0385940857
vector: -0.00666388543
vector: 0.00905002933
vector: 0.0306002442
vector: 0.0104561793
vector: -0.0616600476
vector: -0.000910751347
vector: -0.0383770578
vector: -0.0319519192
vector: -0.0191159304
vector: 0.00798001699
vector: 0.0162921902
vector: -0.00722851697
vector: -0.00984974951
vector: 0.00443824846
vector: 0.0458599813
vector: 0.00428895839
vector: -0.016468009
vector: 0.00984302443
vector: -0.0848148167
vector: 0.0138867963
vector: -0.0347199
vector: -0.012926463
vector: -0.0195842963
vector: 0.00821305159
vector: 0.0116449213
vector: -0.0596969128
vector: 0.0139209842
vector: 0.0124949655
vector: 0.0564839914
vector: 0.073116228
vector: 0.0054912949
vector: 0.008373552
vector: 0.049428314
vector: 0.00387572078
vector: 0.00328427833
vector: 0.0574303158
vector: -0.0401720814
vector: 0.00543978252
vector: 0.0125507237
vector: -0.0223911144
vector: 0.0530512854
vector: -0.0193119962
vector: -0.056736242
vector: 0.028417591
vector: 0.0039758184
vector: 0.0603403524
vector: 0.00940856431
vector: -0.00775589608
vector: -0.00202059839
vector: -0.0325067
vector: 0.00763982581
vector: -0.0420214422
vector: -0.0248172414
vector: 0.0121375453
vector: -0.0279675424
vector: -0.0139899682
vector: 0.0103907259
vector: 0.00846104417
vector: 0.000477065099
vector: -0.0172488894
vector: 0.0307512395
vector: -0.0159618128
vector: -0.0448177345
vector: -0.0378222652
vector: -0.00430452963
vector: -0.0221919119
vector: -0.0052908631
vector: -0.0408164859
vector: -0.0093395263
vector: -0.0526061207
vector: -0.0033937979
vector: -0.0111951698
vector: -0.018287722
vector: 0.0332923383
vector: 0.0202295
vector: -0.0026297241
vector: -0.0169538446
vector: -0.000453287736
vector: 0.0429562218
vector: -0.000680702738
vector: 0.0273506679
vector: -0.0683761761
vector: 0.0667696
vector: 0.0106630186
vector: -0.0298528746
vector: -0.000488598191
vector: -0.0440973416
vector: 0.0534471236
vector: -0.0181944631
vector: -0.00166536344
vector: -0.0655773953
vector: 0.0274203978
vector: 0.00751716364
vector: 0.012719023
vector: -0.0299365614
vector: 0.0668713
vector: 0.0176321361
vector: 0.00121316453
vector: 0.0211630538
vector: -0.0457814448
vector: 0.0290366914
vector: 0.0472914241
vector: 1.481e-005
vector: -0.0201600771
vector: 0.0741583481
vector: -0.0177378468
vector: -0.018208934
vector: -0.0424425118
vector: -0.0322192535
vector: 0.0212140493
vector: -0.0333384909
vector: -0.0089503089
vector: 0.00865053944
vector: -0.0114420308
vector: -0.0323484875
vector: -0.0319988914
vector: -0.0404753834
vector: -0.0135618644
vector: 0.0243302248
vector: -0.00248846621
vector: -0.00764315343
vector: 0.0407035202
vector: 0.0072440342
vector: -0.00446285773
vector: -0.00628219312
vector: 0.00204555667
vector: -0.0171564016
vector: 0.00325665134
vector: -0.0169214662
vector: -0.00940683484
vector: 0.0141208908
vector: 0.00711128581
vector: -0.0218882859
vector: -0.0365912281
vector: -0.0138345128
vector: 0.028417252
vector: -0.0632902831
vector: 0.0563294031
vector: -0.0139016444
vector: 0.0173191428
vector: 0.0109188249
vector: -0.00881743524
vector: 0.0335570648
vector: 0.0302902944
vector: 0.00306223263
vector: 0.0300811697
vector: 0.0165142342
vector: -0.0220398791
vector: -0.0190337524
vector: -0.0218247
vector: -0.00892979838
vector: 0.00296077109
vector: -0.00667882059
vector: 0.0369906649
vector: 0.00189560978
vector: 0.0496911258
vector: -0.0371349975
vector: -0.0232151151
vector: 0.0153016197
vector: -0.00322092674
vector: -0.0189977195
vector: -0.0241388399
vector: -0.0209848098
vector: -0.00240087532
vector: -0.0097488109
vector: 0.0277912915
vector: 0.0129486732
vector: -0.0418183915
vector: -0.0172175094
vector: 0.014063701
vector: 0.0179245677
vector: 0.0329503492
vector: -0.0451413542
vector: 0.0375629142
vector: -0.00336613436
vector: 0.0837872624
vector: 0.0155244442
vector: -0.0319327973
vector: 0.0177341402
vector: -0.0694914684
vector: -0.0230065882
vector: -0.0349770822
vector: -0.000637284247
vector: -0.0171240233
vector: -0.000385974447
vector: -0.0154445628
vector: -0.00813626312
vector: -0.0018971978
vector: -0.0101503106
vector: -0.0577919446
vector: 0.00862083118
vector: 0.0169755798
vector: -0.0585522167
vector: -0.0303873625
vector: 0.0278049447
vector: 0.000132163666
vector: 0.0184809174
vector: 0.0111429971
vector: 0.0186991747
vector: 0.0336764
vector: 0.00854624715
vector: 0.0118956529
vector: 5.09967458e-006
vector: -0.0374510773
vector: 0.0125189032
vector: 0.0146504
vector: 0.0372200981
vector: -0.0323317759
vector: 0.0184809044
vector: -0.0637908429
vector: 0.00348871434
vector: 0.0116535509
vector: -0.0170193538
vector: -0.00596837653
vector: 0.00763026299
vector: -0.0396812037
vector: 0.00393839693
vector: 0.0243215691
vector: 0.0736935437
vector: 0.0649354607
vector: -0.0693660229
vector: -0.0173935127
vector: 0.0645925924
vector: 0.0218239073
vector: 0.0443394184
vector: 0.00201784237
vector: 0.000589759089
vector: 0.0140056768
vector: 0.0208582152
vector: -0.0178501364
vector: 0.0198773723
vector: 0.0119620683
vector: -0.00607993035
vector: 0.0137936343
vector: 0.015486001
vector: 0.0217939913
vector: 0.000516918313
vector: -0.0155286901
vector: -0.0236418
vector: 0.00602063863
vector: -0.0117947338
vector: -0.0382487215
vector: -0.0253913198
vector: 0.0540902093
vector: -0.00069479784
vector: -0.0030842768
vector: 0.00678593758
vector: -0.0158268567
vector: -0.0301091801
vector: -0.047721222
vector: 0.0486889854
vector: -0.0031820375
vector: -0.00780140189
vector: -0.0473188572
vector: -0.0377694
vector: 0.00300174044
vector: 0.00725157047
vector: 0.0334912054
vector: -0.0190392211
vector: -0.00786152
vector: -0.00393901
vector: 0.0346906558
vector: -0.0048984047
vector: -0.0289619621
vector: -0.0132993627
vector: -0.0260517057
vector: -0.0194860194
vector: -0.00903460104
vector: -0.0720684156
vector: -0.0326718502
vector: -0.0178757478
vector: 0.0116493832
vector: -0.0249003451
vector: -0.012765849
vector: -0.0367143154
vector: 0.0253556
vector: -0.000750295934
vector: 0.00800141
vector: -0.0399938263
vector: -0.0156591497
vector: 0.00355092739
vector: 0.00527952937
vector: -0.00371489814
vector: 0.00979584455
vector: 0.0128817623
vector: -0.0257944819
vector: 0.00532698631
vector: -0.0054745716
vector: -0.0402425155
vector: 0.0023749
vector: -0.0191466119
vector: -0.000185457975
vector: 0.0123729743
vector: -0.0285796933
vector: 0.027248418
vector: -0.00682593649
vector: -0.027824128
vector: -0.0483085215
vector: 0.0509723
vector: 0.0150561249
vector: -0.0671003759
vector: -0.0302604195
vector: 0.0335228
vector: -0.00811030716
vector: -0.0476540774
vector: -0.0483691692
vector: 0.0257905629
vector: 0.0216702223
vector: 0.0756625161
vector: -0.0114407977
vector: 0.0250642728
vector: 0.0492128
vector: 0.0104585467
vector: 0.0354731493
vector: 0.0342974328
vector: -0.0315436497
vector: -0.0416372307
vector: 0.0251945127
vector: 0.0550534874
vector: -0.00769103458
vector: -0.0269156434
vector: 0.0629401803
vector: 0.00849406514
vector: 0.0277529527
vector: 0.00905152876
vector: -0.0214481559
vector: -0.0336390026
vector: 0.0172805414
vector: -0.0254873466
vector: 0.00181326456
vector: -0.00262851967
vector: 0.00212677
vector: 0.021096956
vector: -0.0310921967
vector: 0.0090319775
vector: -0.0244406015
vector: -0.00450366875
vector: 0.0024087145
vector: -0.021715302
vector: -0.0187266618
vector: 0.00639256975
vector: 0.00592296245
vector: -0.0126604829
vector: -0.00361264497
vector: 0.018905757
vector: 0.0180640183
vector: -0.0142810205
vector: 0.0201476328
vector: -0.0371763296
vector: -0.016901549
vector: 0.0173841435
vector: 0.0144714024
vector: 0.0371279158
vector: -0.0259519368
vector: -0.0135354148
vector: 0.0480304547
vector: 0.0241338
vector: 0.00995781645
vector: -0.0145506114
vector: -0.0132244434
vector: 0.0104087936
vector: -0.0019786309
vector: -0.010059624
vector: 0.0418220572
vector: 0.00906385668
vector: -0.0608912781
vector: 0.0269323979
vector: 0.0267148074
vector: 0.0114678359
vector: -0.00542484596
vector: 0.0605565161
vector: 0.00729974685
vector: 0.0284503475
vector: 0.000257256615
vector: -0.029183466
vector: 0.029569421
vector: 0.000915852608
vector: 0.0109864781
vector: 0.0160177927
vector: -0.0311278421
vector: -0.00568914367
vector: -0.045658119
vector: -0.0380534567
vector: 0.00769931311
vector: -0.0169286355
vector: -0.0414473452
vector: 0.0346236937
vector: 0.00468268059
vector: -0.000171230145
vector: -0.0555126853
vector: -0.0213408619
vector: -0.00562082976
vector: -0.042139709
vector: 0.00932554808
vector: 0.0544001274
vector: 0.0186972376
vector: 0.0127014797
vector: 0.0039651487
vector: 0.0317394622
vector: 0.00775914779
vector: -0.0223883521
vector: -0.0120635182
vector: 0.0196055751
vector: 0.0291586574
vector: 0.0337944
num_dimensions: 768
}
}
}
outputs {
id: "ce36cd3d9aa645338238341bfbdfcf45"
status {
code: SUCCESS
description: "Ok"
}
created_at {
seconds: 1700723375
nanos: 640238148
}
model {
id: "multilingual-text-clustering"
name: "multilingual-text-clustering"
created_at {
seconds: 1607379316
nanos: 936028000
}
modified_at {
seconds: 1657111032
nanos: 332505000
}
app_id: "main"
model_version {
id: "f3f0dbe5e9ec4072ae4aa2794021982b"
created_at {
seconds: 1607365607
nanos: 249885000
}
status {
code: MODEL_TRAINED
description: "Model is trained and ready"
}
visibility {
gettable: PUBLIC
}
app_id: "main"
user_id: "clarifai"
metadata {
}
}
user_id: "clarifai"
model_type_id: "clusterer"
visibility {
gettable: PUBLIC
}
workflow_recommended {
}
}
data {
clusters {
id: "9_17"
projection: 0.210836634
projection: -0.248410583
}
}
}
Audio
Let's illustrate how you would get the sentiment of an audio input using Clarifai's asr-sentiment workflow.
- Python
- JavaScript (REST)
- NodeJS
- Java
- PHP
- cURL
###########################################################################################
# In this section, we set the user authentication, user and app ID, workflow ID, and
# audio URL. Change these strings to run your own example.
##########################################################################################
# Your PAT (Personal Access Token) can be found in the Account's Security section
PAT = "YOUR_PAT_HERE"
USER_ID = "clarifai"
APP_ID = "main"
# Change these to make your own predictions
WORKFLOW_ID = "asr-sentiment"
AUDIO_URL = "https://samples.clarifai.com/negative_sentence_1.wav"
# Or, to use a local audio file, assign the location variable
# AUDIO_FILE_LOCATION = "YOUR_AUDIO_FILE_LOCATION_HERE"
##########################################################################
# YOU DO NOT NEED TO CHANGE ANYTHING BELOW THIS LINE TO RUN THIS EXAMPLE
##########################################################################
from clarifai_grpc.channel.clarifai_channel import ClarifaiChannel
from clarifai_grpc.grpc.api import resources_pb2, service_pb2, service_pb2_grpc
from clarifai_grpc.grpc.api.status import status_code_pb2
channel = ClarifaiChannel.get_grpc_channel()
stub = service_pb2_grpc.V2Stub(channel)
metadata = (("authorization", "Key " + PAT),)
userDataObject = resources_pb2.UserAppIDSet(
user_id=USER_ID, app_id=APP_ID
) # The userDataObject is required when using a PAT
# To use a local video file, uncomment the following lines
# with open(AUDIO_FILE_LOCATION, "rb") as f:
# audio_bytes = f.read()
post_workflow_results_response = stub.PostWorkflowResults(
service_pb2.PostWorkflowResultsRequest(
user_app_id=userDataObject,
workflow_id=WORKFLOW_ID,
inputs=[
resources_pb2.Input(
data=resources_pb2.Data(
audio=resources_pb2.Audio(
url=AUDIO_URL,
# base64=audio_bytes
)
)
)
],
),
metadata=metadata,
)
if post_workflow_results_response.status.code != status_code_pb2.SUCCESS:
print(post_workflow_results_response.status)
raise Exception(
"Post workflow results failed, status: "
+ post_workflow_results_response.status.description
)
# We'll get one WorkflowResult for each input we used above. Because of one input, we have here one WorkflowResult
results = post_workflow_results_response.results[0]
# Each model we have in the workflow will produce its output
for output in results.outputs:
model = output.model
print("Output for the model: `%s`" % model.id)
for concept in output.data.concepts:
print("\t%s %.2f" % (concept.name, concept.value))
print(output.data.text.raw)
# Uncomment this line to print the raw output
# print(results)
<!--index.html file-->
<script>
////////////////////////////////////////////////////////////////////////////////////////////////////
// In this section, we set the user authentication, user and app ID, workflow ID, and
// audio URL. Change these strings to run your own example.
///////////////////////////////////////////////////////////////////////////////////////////////////
// Your PAT (Personal Access Token) can be found in the Account's Security section
const PAT = "YOUR_PAT_HERE";
const USER_ID = "clarifai";
const APP_ID = "main";
// Change these to make your own predictions
const WORKFLOW_ID = "asr-sentiment";
const AUDIO_URL = "https://samples.clarifai.com/negative_sentence_1.wav";
///////////////////////////////////////////////////////////////////////////////////
// YOU DO NOT NEED TO CHANGE ANYTHING BELOW THIS LINE TO RUN THIS EXAMPLE
///////////////////////////////////////////////////////////////////////////////////
const raw = JSON.stringify({
"user_app_id": {
"user_id": USER_ID,
"app_id": APP_ID
},
"inputs": [
{
"data": {
"audio": {
"url": AUDIO_URL
}
}
}
]
});
const requestOptions = {
method: "POST",
headers: {
"Accept": "application/json",
"Authorization": "Key " + PAT
},
body: raw
};
fetch(`https://api.clarifai.com/v2/workflows/${WORKFLOW_ID}/results`, requestOptions)
.then(response => response.text())
.then(result => console.log(result))
.catch(error => console.log("error", error));
</script>
//index.js file
//////////////////////////////////////////////////////////////////////////////////////////
// In this section, we set the user authentication, user and app ID, workflow ID, and
// audio URL. Change these strings to run your own example.
/////////////////////////////////////////////////////////////////////////////////////////
// Your PAT (Personal Access Token) can be found in the Account's Security section
const PAT = "YOUR_PAT_HERE";
const USER_ID = "clarifai";
const APP_ID = "main";
// Change these to make your own predictions
const WORKFLOW_ID = "asr-sentiment";
const AUDIO_URL = "https://samples.clarifai.com/negative_sentence_1.wav";
// Or, to use a local audio file, assign the location variable
// const AUDIO_FILE_LOCATION = "YOUR_AUDIO_FILE_LOCATION_HERE";
/////////////////////////////////////////////////////////////////////////////
// YOU DO NOT NEED TO CHANGE ANYTHING BELOW THIS LINE TO RUN THIS EXAMPLE
/////////////////////////////////////////////////////////////////////////////
const { ClarifaiStub, grpc } = require("clarifai-nodejs-grpc");
const stub = ClarifaiStub.grpc();
// This will be used by every Clarifai endpoint call
const metadata = new grpc.Metadata();
metadata.set("authorization", "Key " + PAT);
// To use a local text file, uncomment the following lines
// const fs = require("fs");
// const audioBytes = fs.readFileSync(AUDIO_FILE_LOCATION);
stub.PostWorkflowResults(
{
user_app_id: {
"user_id": USER_ID,
"app_id": APP_ID,
},
workflow_id: WORKFLOW_ID,
inputs: [{
data: {
audio: {
url: AUDIO_URL,
// base64: audioBytes
}
}
}],
},
metadata,
(err, response) => {
if (err) {
throw new Error(err);
}
if (response.status.code !== 10000) {
throw new Error(
"Post workflow results failed, status: " + response.status.description
);
}
// We'll get one WorkflowResult for each input we used above. Because of one input, we have here
// one WorkflowResult
const results = response.results[0];
// Each model we have in the workflow will produce its output
for (const output of results.outputs) {
const model = output.model;
console.log("Output for the model: `" + model.id + "`");
for (const concept of output.data.concepts) {
console.log("\t" + concept.name + " " + concept.value);
}
if (output.data.text) {
console.log(output.data.text.raw);
}
}
}
);
package com.clarifai.example;
import com.clarifai.channel.ClarifaiChannel;
import com.clarifai.credentials.ClarifaiCallCredentials;
import com.clarifai.grpc.api.*;
import com.clarifai.grpc.api.status.StatusCode;
import com.google.protobuf.ByteString;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
public class ClarifaiExample {
///////////////////////////////////////////////////////////////////////////////////
// In this section, we set the user authentication, app ID, workflow ID, and
// audio URL. Change these strings to run your own example.
///////////////////////////////////////////////////////////////////////////////////
//Your PAT (Personal Access Token) can be found in the portal under Authentication
static final String PAT = "YOUR_PAT_HERE";
static final String USER_ID = "clarifai";
static final String APP_ID = "main";
// Change these to make your own predictions
static final String WORKFLOW_ID = "asr-sentiment";
static final String AUDIO_URL = "https://samples.clarifai.com/negative_sentence_1.wav";
// Or, to use a local audio file, assign the location variable
// static final String AUDIO_FILE_LOCATION = "YOUR_IMAGE_FILE_LOCATION_HERE";
///////////////////////////////////////////////////////////////////////////////////
// YOU DO NOT NEED TO CHANGE ANYTHING BELOW THIS LINE TO RUN THIS EXAMPLE
///////////////////////////////////////////////////////////////////////////////////
public static void main(String[] args) throws IOException {
V2Grpc.V2BlockingStub stub = V2Grpc.newBlockingStub(ClarifaiChannel.INSTANCE.getGrpcChannel())
.withCallCredentials(new ClarifaiCallCredentials(PAT));
PostWorkflowResultsResponse postWorkflowResultsResponse = stub.postWorkflowResults(
PostWorkflowResultsRequest.newBuilder()
.setUserAppId(UserAppIDSet.newBuilder().setUserId(USER_ID).setAppId(APP_ID))
.setWorkflowId(WORKFLOW_ID)
.addInputs(
Input.newBuilder().setData(
Data.newBuilder().setAudio(
Audio.newBuilder().setUrl(AUDIO_URL)
// To use a local text file, uncomment the following lines
//Audio.newBuilder().setBase64(ByteString.copyFrom(Files.readAllBytes(
// new File(AUDIO_FILE_LOCATION).toPath()
//)))
)
)
)
.build()
);
if (postWorkflowResultsResponse.getStatus().getCode() != StatusCode.SUCCESS) {
throw new RuntimeException("Post workflow results failed, status: " + postWorkflowResultsResponse.getStatus());
}
// We'll get one WorkflowResult for each input we used above. Because of one input, we have here
// one WorkflowResult
WorkflowResult results = postWorkflowResultsResponse.getResults(0);
// Each model we have in the workflow will produce its output
for (Output output : results.getOutputsList()) {
Model model = output.getModel();
System.out.println("Output for the model: `" + model.getId() + "`");
for (Concept concept : output.getData().getConceptsList()) {
System.out.printf("%s %.2f%n", concept.getName(), concept.getValue());
}
System.out.println(output.getData().getText().getRaw());
}
}
}
<?php
require __DIR__ . "/vendor/autoload.php";
///////////////////////////////////////////////////////////////////////////////////////////
// In this section, we set the user authentication, user and app ID, workflow ID, and
// audio URL. Change these strings to run your own example.
////////////////////////////////////////////////////////////////////////////////////////////
// Your PAT (Personal Access Token) can be found in the Account's Security section
$PAT = "YOUR_PAT_HERE";
$USER_ID = "clarifai";
$APP_ID = "main";
// Change these to make your own predictions
$WORKFLOW_ID = "asr-sentiment";
$AUDIO_URL = "https://samples.clarifai.com/negative_sentence_1.wav";
# Or, to use a local audio file, assign the location variable
# $AUDIO_FILE_LOCATION = "YOUR_AUDIO_FILE_LOCATION_HERE";
///////////////////////////////////////////////////////////////////////////////////
// YOU DO NOT NEED TO CHANGE ANYTHING BELOW THIS LINE TO RUN THIS EXAMPLE
///////////////////////////////////////////////////////////////////////////////////
use Clarifai\ClarifaiClient;
use Clarifai\Api\Data;
use Clarifai\Api\Audio;
use Clarifai\Api\Input;
use Clarifai\Api\PostWorkflowResultsRequest;
use Clarifai\Api\Status\StatusCode;
use Clarifai\Api\UserAppIDSet;
$client = ClarifaiClient::grpc();
$metadata = ["Authorization" => ["Key " . $PAT]];
$userDataObject = new UserAppIDSet([
"user_id" => $USER_ID,
"app_id" => $APP_ID,
]);
// To use a local text file, uncomment the following lines
//$audioData = file_get_contents($AUDIO_FILE_LOCATION);
// Let's make a RPC call to the Clarifai platform. It uses the opened gRPC client channel to communicate a
// request and then wait for the response
[$response, $status] = $client
->PostWorkflowResults(
// The request object carries the request along with the request status and other metadata related to the request itself
new PostWorkflowResultsRequest([
"user_app_id" => $userDataObject,
"workflow_id" => $WORKFLOW_ID,
"inputs" => [
new Input([
// The Input object wraps the Data object in order to meet the API specification
"data" => new Data([
// The Data object is constructed around the Audio object. It offers a container that has additional independent
// metadata. In this particular use case, no other metadata is needed to be specified
"audio" => new Audio([
// In the Clarifai platform, a audio is defined by a special Audio object
"url" => $AUDIO_URL,
//"base64" => $audioData
]),
]),
]),
],
]),
$metadata
)
->wait();
// A response is returned and the first thing we do is check the status of it
// A successful response will have a status code of 0; otherwise, there is some error
if ($status->code !== 0) {
throw new Exception("Error: {$status->details}");
}
// In addition to the RPC response status, there is a Clarifai API status that reports if the operation was a success or failure
// (not just that the communication was successful)
if ($response->getStatus()->getCode() != StatusCode::SUCCESS) {
throw new Exception(
"Failure response: " .
$response->getStatus()->getDescription() .
" " .
$response->getStatus()->getDetails()
);
}
// We'll get one WorkflowResult for each input we used above. Because of one input, we have here one WorkflowResult
$results = $response->getResults()[0];
// Each model we have in the workflow will produce its output
foreach ($results->getOutputs() as $output) {
$model = $output->getModel();
echo "Output for the model: `" . $model->getId() . "`" . "<br>";
foreach ($output->getData()->getConcepts() as $concept) {
echo $concept->getName() .
" " .
number_format($concept->getValue(), 2) .
"<br>";
}
$textData = $output->getData()->getText();
if ($textData !== null) {
echo $textData->getRaw() . "<br>";
}
}
// Uncomment this line to print the raw output
// echo $results->serializeToJsonString();
curl -X POST "https://api.clarifai.com/v2/users/clarifai/apps/main/workflows/asr-sentiment/results" \
-H "authorization: Key YOUR_PAT_HERE" \
-H "content-type: application/json" \
-d '{
"inputs": [
{
"data": {
"audio": {
"url": "https://samples.clarifai.com/negative_sentence_1.wav"
}
}
}
]
}'
Text Output Example
Output for the model: `asr-wav2vec2-large-robust-ft-swbd-300h-english`
I AM NOT FLYING TO ENGLAND
Output for the model: `sentiment-analysis-twitter-roberta-base`
LABEL_0 0.92
LABEL_1 0.07
LABEL_2 0.01
Raw Output Example
status {
code: SUCCESS
description: "Ok"
}
input {
id: "c7b258c785614694bc1d9982e847e327"
data {
audio {
url: "https://samples.clarifai.com/negative_sentence_1.wav"
}
}
}
outputs {
id: "b562c938bb4545199a6908eabd8f6295"
status {
code: SUCCESS
description: "Ok"
}
created_at {
seconds: 1700762370
nanos: 800729365
}
model {
id: "asr-wav2vec2-large-robust-ft-swbd-300h-english"
name: "wav2vec2-large-robust-ft-swbd-300"
created_at {
seconds: 1636021464
nanos: 884891000
}
modified_at {
seconds: 1659644487
nanos: 107647000
}
app_id: "asr"
model_version {
id: "7adce5efc90744ed986fbd0bdc40000f"
created_at {
seconds: 1638786626
nanos: 104602000
}
status {
code: MODEL_TRAINED
description: "Model is trained and ready"
}
visibility {
gettable: PUBLIC
}
app_id: "asr"
user_id: "facebook"
metadata {
}
license: "Apache-2.0"
}
user_id: "facebook"
model_type_id: "audio-to-text"
visibility {
gettable: PUBLIC
}
workflow_recommended {
}
}
data {
text {
raw: "I AM NOT FLYING TO ENGLAND"
text_info {
encoding: "UnknownTextEnc"
}
}
}
}
outputs {
id: "1a3815e9c46b425e8247a4c491cfa0f2"
status {
code: SUCCESS
description: "Ok"
}
created_at {
seconds: 1700762370
nanos: 800742640
}
model {
id: "sentiment-analysis-twitter-roberta-base"
name: "sentiment-analysis-twitter-roberta-base"
created_at {
seconds: 1656525158
nanos: 299847000
}
modified_at {
seconds: 1659564125
nanos: 82152000
}
app_id: "text-classification"
model_version {
id: "f7f3df02b79d4080a0233ec1fb6404bd"
created_at {
seconds: 1656525158
nanos: 310142000
}
status {
code: MODEL_TRAINED
description: "Model is trained and ready"
}
visibility {
gettable: PUBLIC
}
app_id: "text-classification"
user_id: "erfan"
metadata {
fields {
key: "Model version logs zipped"
value {
string_value: "https://s3.amazonaws.com/clarifai-temp/prod/f7f3df02b79d4080a0233ec1fb6404bd.zip"
}
}
}
}
user_id: "erfan"
model_type_id: "text-classifier"
task: "text-classification"
visibility {
gettable: PUBLIC
}
workflow_recommended {
}
}
data {
concepts {
id: "LABEL_0"
name: "LABEL_0"
value: 0.91823113
app_id: "text-classification"
}
concepts {
id: "LABEL_1"
name: "LABEL_1"
value: 0.0743510351
app_id: "text-classification"
}
concepts {
id: "LABEL_2"
name: "LABEL_2"
value: 0.00741776684
app_id: "text-classification"
}
}
}