NAV Navigation
Shell HTTP JavaScript Node.JS Ruby Python Java Go

Event Streams REST Producer v1.1.0

Scroll down for code samples, example requests and responses. Select a language for code samples from the tabs above or the mobile navigation menu.

REST API for producing messages to Event Streams.
Note: You must have Event Streams version 2019.1.1 or later to use the REST API.

Getting started
For an introduction to the REST Producer, including authentication for the version of Event Streams you are using, see the producer API.
To retrieve the base url, run the following command: cloudctl es init

Notes
The total length of the URI must not exceed 10Kb.

Default

produceMessage

Code samples

# You can also use wget
curl -X POST /topics/{topic_name}/records \
  -H 'Content-Type: application/json' \
  -H 'Accept: application/json'

POST /topics/{topic_name}/records HTTP/1.1

Content-Type: application/json
Accept: application/json

var headers = {
  'Content-Type':'application/json',
  'Accept':'application/json'

};

$.ajax({
  url: '/topics/{topic_name}/records',
  method: 'post',

  headers: headers,
  success: function(data) {
    console.log(JSON.stringify(data));
  }
})

const fetch = require('node-fetch');
const inputBody = '{}';
const headers = {
  'Content-Type':'application/json',
  'Accept':'application/json'

};

fetch('/topics/{topic_name}/records',
{
  method: 'POST',
  body: inputBody,
  headers: headers
})
.then(function(res) {
    return res.json();
}).then(function(body) {
    console.log(body);
});

require 'rest-client'
require 'json'

headers = {
  'Content-Type' => 'application/json',
  'Accept' => 'application/json'
}

result = RestClient.post '/topics/{topic_name}/records',
  params: {
  }, headers: headers

p JSON.parse(result)

import requests
headers = {
  'Content-Type': 'application/json',
  'Accept': 'application/json'
}

r = requests.post('/topics/{topic_name}/records', params={

}, headers = headers)

print r.json()

URL obj = new URL("/topics/{topic_name}/records");
HttpURLConnection con = (HttpURLConnection) obj.openConnection();
con.setRequestMethod("POST");
int responseCode = con.getResponseCode();
BufferedReader in = new BufferedReader(
    new InputStreamReader(con.getInputStream()));
String inputLine;
StringBuffer response = new StringBuffer();
while ((inputLine = in.readLine()) != null) {
    response.append(inputLine);
}
in.close();
System.out.println(response.toString());

package main

import (
       "bytes"
       "net/http"
)

func main() {

    headers := map[string][]string{
        "Content-Type": []string{"application/json"},
        "Accept": []string{"application/json"},
        
    }

    data := bytes.NewBuffer([]byte{jsonReq})
    req, err := http.NewRequest("POST", "/topics/{topic_name}/records", data)
    req.Header = headers

    client := &http.Client{}
    resp, err := client.Do(req)
    // ...
}

POST /topics/{topic_name}/records

Produce a message.

Produce the body of the request as a message in Event Streams.

Body parameter

{}
{}

<?xml version="1.0" encoding="UTF-8" ?>

Parameters

Name In Type Required Description
topic_name path string true The name of the topic you want to produce the messages to.
key query string false The record key to use when producing a Kafka message. If omitted the default round robin partitioner is used.
keyType query string false The type of the key, either text or binary. Binary supplied data must be base16 encoded
headers query string false Comma separated list of key:value pairs to be set as message properties. The value must be expressed in base16 encoding, for example: /records?headers=colour:726463,size:666f726d696461626c65
schemaname query string false The name of the schema you want to use when producing messages.
schemaversion query string false The schema version you want to use when producing messages.
schemavalidation query boolean false Set to 'true' to enable validation of your message body against the provided schema (keys cannot be schema validated)
body body object true The message to be produced to the topic.

Enumerated Values

Parameter Value
keyType text
keyType binary

Example responses

200 Response

{
  "metadata": {
    "topic": "string",
    "timestamp": "string",
    "partition": 0,
    "offset": 0
  },
  "Comment": "string"
}

Responses

Status Meaning Description Schema
200 OK Message successfully sent to Event Streams. successresponse
400 Bad Request Not a valid request. error_response
401 Unauthorized Unauthorized to produce to this topic. error_response
404 Not Found The topic or specified schema does not exist. If Kafka setting auto.create.topics.enable is set to true (default), Kafka automatically creates the topic with the default replication factor and number of partitions. If Kafka setting auto.create.topics.enable is set to false, this error message is displayed when attempting to produce to a topic that does not exist. error_response
408 Request Timeout The request timed out producing to Event Streams. error_response
411 Length Required The request did not contain any data. error_response
500 Internal Server Error The request failed due to an internal server error. error_response
503 Service Unavailable The request failed due to Event Streams brokers being unavailable. error_response

Schemas

successresponse

{
  "metadata": {
    "topic": "string",
    "timestamp": "string",
    "partition": 0,
    "offset": 0
  },
  "Comment": "string"
}

Object returned on successful request.

Properties

Name Type Required Restrictions Description
metadata metadata true none The RecordMetadata object returned from Event Streams.
Comment string false none User-provided string to explain why a schema is deprecated. Ignored if not using schemas or schema used is active.

metadata

{
  "topic": "string",
  "timestamp": "string",
  "partition": 0,
  "offset": 0
}

The RecordMetadata object returned from Event Streams.

Properties

Name Type Required Restrictions Description
topic string true none The name of the topic the message was produced to.
timestamp string true none The timestamp of the message.
partition integer true none The partition the message was sent to.
offset integer true none The offset the message was sent to.

error_response

{
  "error_code": 0,
  "message": "string",
  "error": {
    "error_code": 0,
    "message": "string",
    "kafka_error_code": 0,
    "topic": "string",
    "incident": "string",
    "schema": "string"
  }
}

Error object representing the error that occured while producing the message.

Properties

Name Type Required Restrictions Description
error_code integer true none HTTP Status code of the response.
message string true none Error message.
error object true none none
» error_code integer true none HTTP Status code of the response.
» message string true none Error message.
» kafka_error_code integer false none The error code returned by Apache Kafka if available.
» topic string false none The name of the topic that the request was performed against (if any).
» incident string false none Optional incident ID.
» schema string false none The name of the schema that the request was performed against (if any).