This page is not yet available in Spanish. We are working on its translation.
If you have any questions or feedback about our current translation project, feel free to reach out to us!

Pipelines de logs

Los pipelines y procesadores operan sobre los logs entrantes, analizándolos y transformándolos en atributos estructurados para facilitar su consulta.

Notas:

Estos endpoints sólo están disponibles para usuarios administradores. Asegúrate de utilizar una clave de aplicación creada por un administrador.

Las reglas de análisis Grok pueden afectar al resultado JSON y requieren que los datos devueltos se configuren antes de ser utilizados en una solicitud. Por ejemplo, si utilizas los datos devueltos por una solicitud para otro cuerpo de solicitud y tienes una regla de análisis que utiliza un patrón de expresiones regulares como \s para los espacios, tendrás que configurar todos los espacios escapados como %{space} para utilizarlos en los datos del cuerpo.

GET https://api.ap1.datadoghq.com/api/v1/logs/config/pipeline-orderhttps://api.datadoghq.eu/api/v1/logs/config/pipeline-orderhttps://api.ddog-gov.com/api/v1/logs/config/pipeline-orderhttps://api.datadoghq.com/api/v1/logs/config/pipeline-orderhttps://api.us3.datadoghq.com/api/v1/logs/config/pipeline-orderhttps://api.us5.datadoghq.com/api/v1/logs/config/pipeline-order

Información general

Obtén el orden actual de tus pipelines. Este endpoint no recibe argumentos JSON.

Respuesta

OK

Object containing the ordered list of pipeline IDs.

Expand All

Campo

Tipo

Descripción

pipeline_ids [required]

[string]

Ordered Array of <PIPELINE_ID> strings, the order of pipeline IDs in the array define the overall Pipelines order for Datadog.

{
  "pipeline_ids": [
    "tags",
    "org_ids",
    "products"
  ]
}

Forbidden

Error response object.

Expand All

Campo

Tipo

Descripción

errors [required]

[string]

Array of errors returned by the API.

{
  "errors": [
    "Bad Request"
  ]
}

Too many requests

Error response object.

Expand All

Campo

Tipo

Descripción

errors [required]

[string]

Array of errors returned by the API.

{
  "errors": [
    "Bad Request"
  ]
}

Ejemplo de código

                  # Curl command
curl -X GET "https://api.ap1.datadoghq.com"https://api.datadoghq.eu"https://api.ddog-gov.com"https://api.datadoghq.com"https://api.us3.datadoghq.com"https://api.us5.datadoghq.com/api/v1/logs/config/pipeline-order" \ -H "Accept: application/json" \ -H "DD-API-KEY: ${DD_API_KEY}" \ -H "DD-APPLICATION-KEY: ${DD_APP_KEY}"
"""
Get pipeline order returns "OK" response
"""

from datadog_api_client import ApiClient, Configuration
from datadog_api_client.v1.api.logs_pipelines_api import LogsPipelinesApi

configuration = Configuration()
with ApiClient(configuration) as api_client:
    api_instance = LogsPipelinesApi(api_client)
    response = api_instance.get_logs_pipeline_order()

    print(response)

Instructions

First install the library and its dependencies and then save the example to example.py and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" python3 "example.py"
# Get pipeline order returns "OK" response

require "datadog_api_client"
api_instance = DatadogAPIClient::V1::LogsPipelinesAPI.new
p api_instance.get_logs_pipeline_order()

Instructions

First install the library and its dependencies and then save the example to example.rb and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" rb "example.rb"
// Get pipeline order returns "OK" response

package main

import (
	"context"
	"encoding/json"
	"fmt"
	"os"

	"github.com/DataDog/datadog-api-client-go/v2/api/datadog"
	"github.com/DataDog/datadog-api-client-go/v2/api/datadogV1"
)

func main() {
	ctx := datadog.NewDefaultContext(context.Background())
	configuration := datadog.NewConfiguration()
	apiClient := datadog.NewAPIClient(configuration)
	api := datadogV1.NewLogsPipelinesApi(apiClient)
	resp, r, err := api.GetLogsPipelineOrder(ctx)

	if err != nil {
		fmt.Fprintf(os.Stderr, "Error when calling `LogsPipelinesApi.GetLogsPipelineOrder`: %v\n", err)
		fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r)
	}

	responseContent, _ := json.MarshalIndent(resp, "", "  ")
	fmt.Fprintf(os.Stdout, "Response from `LogsPipelinesApi.GetLogsPipelineOrder`:\n%s\n", responseContent)
}

Instructions

First install the library and its dependencies and then save the example to main.go and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" go run "main.go"
// Get pipeline order returns "OK" response

import com.datadog.api.client.ApiClient;
import com.datadog.api.client.ApiException;
import com.datadog.api.client.v1.api.LogsPipelinesApi;
import com.datadog.api.client.v1.model.LogsPipelinesOrder;

public class Example {
  public static void main(String[] args) {
    ApiClient defaultClient = ApiClient.getDefaultApiClient();
    LogsPipelinesApi apiInstance = new LogsPipelinesApi(defaultClient);

    try {
      LogsPipelinesOrder result = apiInstance.getLogsPipelineOrder();
      System.out.println(result);
    } catch (ApiException e) {
      System.err.println("Exception when calling LogsPipelinesApi#getLogsPipelineOrder");
      System.err.println("Status code: " + e.getCode());
      System.err.println("Reason: " + e.getResponseBody());
      System.err.println("Response headers: " + e.getResponseHeaders());
      e.printStackTrace();
    }
  }
}

Instructions

First install the library and its dependencies and then save the example to Example.java and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" java "Example.java"
// Get pipeline order returns "OK" response
use datadog_api_client::datadog;
use datadog_api_client::datadogV1::api_logs_pipelines::LogsPipelinesAPI;

#[tokio::main]
async fn main() {
    let configuration = datadog::Configuration::new();
    let api = LogsPipelinesAPI::with_config(configuration);
    let resp = api.get_logs_pipeline_order().await;
    if let Ok(value) = resp {
        println!("{:#?}", value);
    } else {
        println!("{:#?}", resp.unwrap_err());
    }
}

Instructions

First install the library and its dependencies and then save the example to src/main.rs and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" cargo run
/**
 * Get pipeline order returns "OK" response
 */

import { client, v1 } from "@datadog/datadog-api-client";

const configuration = client.createConfiguration();
const apiInstance = new v1.LogsPipelinesApi(configuration);

apiInstance
  .getLogsPipelineOrder()
  .then((data: v1.LogsPipelinesOrder) => {
    console.log(
      "API called successfully. Returned data: " + JSON.stringify(data)
    );
  })
  .catch((error: any) => console.error(error));

Instructions

First install the library and its dependencies and then save the example to example.ts and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" tsc "example.ts"

PUT https://api.ap1.datadoghq.com/api/v1/logs/config/pipeline-orderhttps://api.datadoghq.eu/api/v1/logs/config/pipeline-orderhttps://api.ddog-gov.com/api/v1/logs/config/pipeline-orderhttps://api.datadoghq.com/api/v1/logs/config/pipeline-orderhttps://api.us3.datadoghq.com/api/v1/logs/config/pipeline-orderhttps://api.us5.datadoghq.com/api/v1/logs/config/pipeline-order

Información general

Actualiza el orden de tus pipelines. Dado que los logs se procesan en secuencias, la reordenación de un pipeline puede cambiar la estructura y el contenido de los datos procesados por otros pipelines y sus procesadores.

Nota: Utilizando el método PUT se actualiza el orden de tus pipelines, reemplazando tu orden actual por el nuevo, enviado a tu organización Datadog.

This endpoint requires the logs_write_pipelines permission.

Solicitud

Body Data (required)

Objeto que contiene la nueva lista ordenada de los ID de pipelines.

Expand All

Campo

Tipo

Descripción

pipeline_ids [required]

[string]

Ordered Array of <PIPELINE_ID> strings, the order of pipeline IDs in the array define the overall Pipelines order for Datadog.

{
  "pipeline_ids": [
    "tags",
    "org_ids",
    "products"
  ]
}

Respuesta

OK

Object containing the ordered list of pipeline IDs.

Expand All

Campo

Tipo

Descripción

pipeline_ids [required]

[string]

Ordered Array of <PIPELINE_ID> strings, the order of pipeline IDs in the array define the overall Pipelines order for Datadog.

{
  "pipeline_ids": [
    "tags",
    "org_ids",
    "products"
  ]
}

Bad Request

Response returned by the Logs API when errors occur.

Expand All

Campo

Tipo

Descripción

error

object

Error returned by the Logs API

code

string

Code identifying the error

details

[object]

Additional error details

message

string

Error message

{
  "error": {
    "code": "string",
    "details": [],
    "message": "string"
  }
}

Forbidden

Error response object.

Expand All

Campo

Tipo

Descripción

errors [required]

[string]

Array of errors returned by the API.

{
  "errors": [
    "Bad Request"
  ]
}

Unprocessable Entity

Response returned by the Logs API when errors occur.

Expand All

Campo

Tipo

Descripción

error

object

Error returned by the Logs API

code

string

Code identifying the error

details

[object]

Additional error details

message

string

Error message

{
  "error": {
    "code": "string",
    "details": [],
    "message": "string"
  }
}

Too many requests

Error response object.

Expand All

Campo

Tipo

Descripción

errors [required]

[string]

Array of errors returned by the API.

{
  "errors": [
    "Bad Request"
  ]
}

Ejemplo de código

                  # Curl command
curl -X PUT "https://api.ap1.datadoghq.com"https://api.datadoghq.eu"https://api.ddog-gov.com"https://api.datadoghq.com"https://api.us3.datadoghq.com"https://api.us5.datadoghq.com/api/v1/logs/config/pipeline-order" \ -H "Accept: application/json" \ -H "Content-Type: application/json" \ -H "DD-API-KEY: ${DD_API_KEY}" \ -H "DD-APPLICATION-KEY: ${DD_APP_KEY}" \ -d @- << EOF { "pipeline_ids": [ "tags", "org_ids", "products" ] } EOF
"""
Update pipeline order returns "OK" response
"""

from datadog_api_client import ApiClient, Configuration
from datadog_api_client.v1.api.logs_pipelines_api import LogsPipelinesApi
from datadog_api_client.v1.model.logs_pipelines_order import LogsPipelinesOrder

body = LogsPipelinesOrder(
    pipeline_ids=[
        "tags",
        "org_ids",
        "products",
    ],
)

configuration = Configuration()
with ApiClient(configuration) as api_client:
    api_instance = LogsPipelinesApi(api_client)
    response = api_instance.update_logs_pipeline_order(body=body)

    print(response)

Instructions

First install the library and its dependencies and then save the example to example.py and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" python3 "example.py"
# Update pipeline order returns "OK" response

require "datadog_api_client"
api_instance = DatadogAPIClient::V1::LogsPipelinesAPI.new

body = DatadogAPIClient::V1::LogsPipelinesOrder.new({
  pipeline_ids: [
    "tags",
    "org_ids",
    "products",
  ],
})
p api_instance.update_logs_pipeline_order(body)

Instructions

First install the library and its dependencies and then save the example to example.rb and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" rb "example.rb"
// Update pipeline order returns "OK" response

package main

import (
	"context"
	"encoding/json"
	"fmt"
	"os"

	"github.com/DataDog/datadog-api-client-go/v2/api/datadog"
	"github.com/DataDog/datadog-api-client-go/v2/api/datadogV1"
)

func main() {
	body := datadogV1.LogsPipelinesOrder{
		PipelineIds: []string{
			"tags",
			"org_ids",
			"products",
		},
	}
	ctx := datadog.NewDefaultContext(context.Background())
	configuration := datadog.NewConfiguration()
	apiClient := datadog.NewAPIClient(configuration)
	api := datadogV1.NewLogsPipelinesApi(apiClient)
	resp, r, err := api.UpdateLogsPipelineOrder(ctx, body)

	if err != nil {
		fmt.Fprintf(os.Stderr, "Error when calling `LogsPipelinesApi.UpdateLogsPipelineOrder`: %v\n", err)
		fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r)
	}

	responseContent, _ := json.MarshalIndent(resp, "", "  ")
	fmt.Fprintf(os.Stdout, "Response from `LogsPipelinesApi.UpdateLogsPipelineOrder`:\n%s\n", responseContent)
}

Instructions

First install the library and its dependencies and then save the example to main.go and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" go run "main.go"
// Update pipeline order returns "OK" response

import com.datadog.api.client.ApiClient;
import com.datadog.api.client.ApiException;
import com.datadog.api.client.v1.api.LogsPipelinesApi;
import com.datadog.api.client.v1.model.LogsPipelinesOrder;
import java.util.Arrays;

public class Example {
  public static void main(String[] args) {
    ApiClient defaultClient = ApiClient.getDefaultApiClient();
    LogsPipelinesApi apiInstance = new LogsPipelinesApi(defaultClient);

    LogsPipelinesOrder body =
        new LogsPipelinesOrder().pipelineIds(Arrays.asList("tags", "org_ids", "products"));

    try {
      LogsPipelinesOrder result = apiInstance.updateLogsPipelineOrder(body);
      System.out.println(result);
    } catch (ApiException e) {
      System.err.println("Exception when calling LogsPipelinesApi#updateLogsPipelineOrder");
      System.err.println("Status code: " + e.getCode());
      System.err.println("Reason: " + e.getResponseBody());
      System.err.println("Response headers: " + e.getResponseHeaders());
      e.printStackTrace();
    }
  }
}

Instructions

First install the library and its dependencies and then save the example to Example.java and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" java "Example.java"
// Update pipeline order returns "OK" response
use datadog_api_client::datadog;
use datadog_api_client::datadogV1::api_logs_pipelines::LogsPipelinesAPI;
use datadog_api_client::datadogV1::model::LogsPipelinesOrder;

#[tokio::main]
async fn main() {
    let body = LogsPipelinesOrder::new(vec![
        "tags".to_string(),
        "org_ids".to_string(),
        "products".to_string(),
    ]);
    let configuration = datadog::Configuration::new();
    let api = LogsPipelinesAPI::with_config(configuration);
    let resp = api.update_logs_pipeline_order(body).await;
    if let Ok(value) = resp {
        println!("{:#?}", value);
    } else {
        println!("{:#?}", resp.unwrap_err());
    }
}

Instructions

First install the library and its dependencies and then save the example to src/main.rs and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" cargo run
/**
 * Update pipeline order returns "OK" response
 */

import { client, v1 } from "@datadog/datadog-api-client";

const configuration = client.createConfiguration();
const apiInstance = new v1.LogsPipelinesApi(configuration);

const params: v1.LogsPipelinesApiUpdateLogsPipelineOrderRequest = {
  body: {
    pipelineIds: ["tags", "org_ids", "products"],
  },
};

apiInstance
  .updateLogsPipelineOrder(params)
  .then((data: v1.LogsPipelinesOrder) => {
    console.log(
      "API called successfully. Returned data: " + JSON.stringify(data)
    );
  })
  .catch((error: any) => console.error(error));

Instructions

First install the library and its dependencies and then save the example to example.ts and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" tsc "example.ts"

GET https://api.ap1.datadoghq.com/api/v1/logs/config/pipelineshttps://api.datadoghq.eu/api/v1/logs/config/pipelineshttps://api.ddog-gov.com/api/v1/logs/config/pipelineshttps://api.datadoghq.com/api/v1/logs/config/pipelineshttps://api.us3.datadoghq.com/api/v1/logs/config/pipelineshttps://api.us5.datadoghq.com/api/v1/logs/config/pipelines

Información general

Obtén todos los pipelines de tu organización. Este endpoint no recibe argumentos JSON.

Respuesta

OK

Array of pipeline ID strings.

Expand All

Campo

Tipo

Descripción

filter

object

Filter for logs.

query

string

The filter query.

id

string

ID of the pipeline.

is_enabled

boolean

Whether or not the pipeline is enabled.

is_read_only

boolean

Whether or not the pipeline can be edited.

name

string

Name of the pipeline.

processors

[ <oneOf>]

Ordered list of processors in this pipeline.

Option 1

object

Create custom grok rules to parse the full message or a specific attribute of your raw event. For more information, see the parsing section.

grok [required]

object

Set of rules for the grok parser.

match_rules [required]

string

List of match rules for the grok parser, separated by a new line.

support_rules

string

List of support rules for the grok parser, separated by a new line.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

samples

[string]

List of sample logs to test this grok parser.

source [required]

string

Name of the log attribute to parse.

default: message

type [required]

enum

Type of logs grok parser. Allowed enum values: grok-parser

default: grok-parser

Option 2

object

As Datadog receives logs, it timestamps them using the value(s) from any of these default attributes.

  • timestamp

  • date

  • _timestamp

  • Timestamp

  • eventTime

  • published_date

    If your logs put their dates in an attribute not in this list, use the log date Remapper Processor to define their date attribute as the official log timestamp. The recognized date formats are ISO8601, UNIX (the milliseconds EPOCH format), and RFC3164.

Note: If your logs don’t contain any of the default attributes and you haven’t defined your own date attribute, Datadog timestamps the logs with the date it received them.

If multiple log date remapper processors can be applied to a given log, only the first one (according to the pipelines order) is taken into account.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

type [required]

enum

Type of logs date remapper. Allowed enum values: date-remapper

default: date-remapper

Option 3

object

Use this Processor if you want to assign some attributes as the official status.

Each incoming status value is mapped as follows.

  • Integers from 0 to 7 map to the Syslog severity standards
  • Strings beginning with emerg or f (case-insensitive) map to emerg (0)
  • Strings beginning with a (case-insensitive) map to alert (1)
  • Strings beginning with c (case-insensitive) map to critical (2)
  • Strings beginning with err (case-insensitive) map to error (3)
  • Strings beginning with w (case-insensitive) map to warning (4)
  • Strings beginning with n (case-insensitive) map to notice (5)
  • Strings beginning with i (case-insensitive) map to info (6)
  • Strings beginning with d, trace or verbose (case-insensitive) map to debug (7)
  • Strings beginning with o or matching OK or Success (case-insensitive) map to OK
  • All others map to info (6)

Note: If multiple log status remapper processors can be applied to a given log, only the first one (according to the pipelines order) is taken into account.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

type [required]

enum

Type of logs status remapper. Allowed enum values: status-remapper

default: status-remapper

Option 4

object

Use this processor if you want to assign one or more attributes as the official service.

Note: If multiple service remapper processors can be applied to a given log, only the first one (according to the pipeline order) is taken into account.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

type [required]

enum

Type of logs service remapper. Allowed enum values: service-remapper

default: service-remapper

Option 5

object

The message is a key attribute in Datadog. It is displayed in the message column of the Log Explorer and you can do full string search on it. Use this Processor to define one or more attributes as the official log message.

Note: If multiple log message remapper processors can be applied to a given log, only the first one (according to the pipeline order) is taken into account.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

default: msg

type [required]

enum

Type of logs message remapper. Allowed enum values: message-remapper

default: message-remapper

Option 6

object

The remapper processor remaps any source attribute(s) or tag to another target attribute or tag. Constraints on the tag/attribute name are explained in the Tag Best Practice documentation. Some additional constraints are applied as : or , are not allowed in the target tag/attribute name.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

override_on_conflict

boolean

Override or not the target element if already set,

preserve_source

boolean

Remove or preserve the remapped source element.

source_type

string

Defines if the sources are from log attribute or tag.

default: attribute

sources [required]

[string]

Array of source attributes.

target [required]

string

Final attribute or tag name to remap the sources to.

target_format

enum

If the target_type of the remapper is attribute, try to cast the value to a new specific type. If the cast is not possible, the original type is kept. string, integer, or double are the possible types. If the target_type is tag, this parameter may not be specified. Allowed enum values: auto,string,integer,double

target_type

string

Defines if the final attribute or tag name is from log attribute or tag.

default: attribute

type [required]

enum

Type of logs attribute remapper. Allowed enum values: attribute-remapper

default: attribute-remapper

Option 7

object

This processor extracts query parameters and other important parameters from a URL.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

normalize_ending_slashes

boolean

Normalize the ending slashes or not.

sources [required]

[string]

Array of source attributes.

default: http.url

target [required]

string

Name of the parent attribute that contains all the extracted details from the sources.

default: http.url_details

type [required]

enum

Type of logs URL parser. Allowed enum values: url-parser

default: url-parser

Option 8

object

The User-Agent parser takes a User-Agent attribute and extracts the OS, browser, device, and other user data. It recognizes major bots like the Google Bot, Yahoo Slurp, and Bing.

is_enabled

boolean

Whether or not the processor is enabled.

is_encoded

boolean

Define if the source attribute is URL encoded or not.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

default: http.useragent

target [required]

string

Name of the parent attribute that contains all the extracted details from the sources.

default: http.useragent_details

type [required]

enum

Type of logs User-Agent parser. Allowed enum values: user-agent-parser

default: user-agent-parser

Option 9

object

Use the Category Processor to add a new attribute (without spaces or special characters in the new attribute name) to a log matching a provided search query. Use categories to create groups for an analytical view. For example, URL groups, machine groups, environments, and response time buckets.

Notes:

  • The syntax of the query is the one of Logs Explorer search bar. The query can be done on any log attribute or tag, whether it is a facet or not. Wildcards can also be used inside your query.
  • Once the log has matched one of the Processor queries, it stops. Make sure they are properly ordered in case a log could match several queries.
  • The names of the categories must be unique.
  • Once defined in the Category Processor, you can map categories to log status using the Log Status Remapper.

categories [required]

[object]

Array of filters to match or not a log and their corresponding name to assign a custom value to the log.

filter

object

Filter for logs.

query

string

The filter query.

name

string

Value to assign to the target attribute.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

target [required]

string

Name of the target attribute which value is defined by the matching category.

type [required]

enum

Type of logs category processor. Allowed enum values: category-processor

default: category-processor

Option 10

object

Use the Arithmetic Processor to add a new attribute (without spaces or special characters in the new attribute name) to a log with the result of the provided formula. This enables you to remap different time attributes with different units into a single attribute, or to compute operations on attributes within the same log.

The formula can use parentheses and the basic arithmetic operators -, +, *, /.

By default, the calculation is skipped if an attribute is missing. Select “Replace missing attribute by 0” to automatically populate missing attribute values with 0 to ensure that the calculation is done. An attribute is missing if it is not found in the log attributes, or if it cannot be converted to a number.

Notes:

  • The operator - needs to be space split in the formula as it can also be contained in attribute names.
  • If the target attribute already exists, it is overwritten by the result of the formula.
  • Results are rounded up to the 9th decimal. For example, if the result of the formula is 0.1234567891, the actual value stored for the attribute is 0.123456789.
  • If you need to scale a unit of measure, see Scale Filter.

expression [required]

string

Arithmetic operation between one or more log attributes.

is_enabled

boolean

Whether or not the processor is enabled.

is_replace_missing

boolean

If true, it replaces all missing attributes of expression by 0, false skip the operation if an attribute is missing.

name

string

Name of the processor.

target [required]

string

Name of the attribute that contains the result of the arithmetic operation.

type [required]

enum

Type of logs arithmetic processor. Allowed enum values: arithmetic-processor

default: arithmetic-processor

Option 11

object

Use the string builder processor to add a new attribute (without spaces or special characters) to a log with the result of the provided template. This enables aggregation of different attributes or raw strings into a single attribute.

The template is defined by both raw text and blocks with the syntax %{attribute_path}.

Notes:

  • The processor only accepts attributes with values or an array of values in the blocks.
  • If an attribute cannot be used (object or array of object), it is replaced by an empty string or the entire operation is skipped depending on your selection.
  • If the target attribute already exists, it is overwritten by the result of the template.
  • Results of the template cannot exceed 256 characters.

is_enabled

boolean

Whether or not the processor is enabled.

is_replace_missing

boolean

If true, it replaces all missing attributes of template by an empty string. If false (default), skips the operation for missing attributes.

name

string

Name of the processor.

target [required]

string

The name of the attribute that contains the result of the template.

template [required]

string

A formula with one or more attributes and raw text.

type [required]

enum

Type of logs string builder processor. Allowed enum values: string-builder-processor

default: string-builder-processor

Option 12

object

Nested Pipelines are pipelines within a pipeline. Use Nested Pipelines to split the processing into two steps. For example, first use a high-level filtering such as team and then a second level of filtering based on the integration, service, or any other tag or attribute.

A pipeline can contain Nested Pipelines and Processors whereas a Nested Pipeline can only contain Processors.

filter

object

Filter for logs.

query

string

The filter query.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

processors

[object]

Ordered list of processors in this pipeline.

type [required]

enum

Type of logs pipeline processor. Allowed enum values: pipeline

default: pipeline

Option 13

object

The GeoIP parser takes an IP address attribute and extracts if available the Continent, Country, Subdivision, and City information in the target attribute path.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

default: network.client.ip

target [required]

string

Name of the parent attribute that contains all the extracted details from the sources.

default: network.client.geoip

type [required]

enum

Type of GeoIP parser. Allowed enum values: geo-ip-parser

default: geo-ip-parser

Option 14

object

Use the Lookup Processor to define a mapping between a log attribute and a human readable value saved in the processors mapping table. For example, you can use the Lookup Processor to map an internal service ID into a human readable service name. Alternatively, you could also use it to check if the MAC address that just attempted to connect to the production environment belongs to your list of stolen machines.

default_lookup

string

Value to set the target attribute if the source value is not found in the list.

is_enabled

boolean

Whether or not the processor is enabled.

lookup_table [required]

[string]

Mapping table of values for the source attribute and their associated target attribute values, formatted as ["source_key1,target_value1", "source_key2,target_value2"]

name

string

Name of the processor.

source [required]

string

Source attribute used to perform the lookup.

target [required]

string

Name of the attribute that contains the corresponding value in the mapping list or the default_lookup if not found in the mapping list.

type [required]

enum

Type of logs lookup processor. Allowed enum values: lookup-processor

default: lookup-processor

Option 15

object

Note: Reference Tables are in public beta. Use the Lookup Processor to define a mapping between a log attribute and a human readable value saved in a Reference Table. For example, you can use the Lookup Processor to map an internal service ID into a human readable service name. Alternatively, you could also use it to check if the MAC address that just attempted to connect to the production environment belongs to your list of stolen machines.

is_enabled

boolean

Whether or not the processor is enabled.

lookup_enrichment_table [required]

string

Name of the Reference Table for the source attribute and their associated target attribute values.

name

string

Name of the processor.

source [required]

string

Source attribute used to perform the lookup.

target [required]

string

Name of the attribute that contains the corresponding value in the mapping list.

type [required]

enum

Type of logs lookup processor. Allowed enum values: lookup-processor

default: lookup-processor

Option 16

object

There are two ways to improve correlation between application traces and logs.

  1. Follow the documentation on how to inject a trace ID in the application logs and by default log integrations take care of all the rest of the setup.

  2. Use the Trace remapper processor to define a log attribute as its associated trace ID.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources

[string]

Array of source attributes.

default: dd.trace_id

type [required]

enum

Type of logs trace remapper. Allowed enum values: trace-id-remapper

default: trace-id-remapper

type

string

Type of pipeline.

{
  "filter": {
    "query": "source:python"
  },
  "id": "string",
  "is_enabled": false,
  "is_read_only": false,
  "name": "",
  "processors": [
    {
      "grok": {
        "match_rules": "rule_name_1 foo\nrule_name_2 bar\n",
        "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"
      },
      "is_enabled": false,
      "name": "string",
      "samples": [],
      "source": "message",
      "type": "grok-parser"
    }
  ],
  "type": "pipeline"
}

Forbidden

Error response object.

Expand All

Campo

Tipo

Descripción

errors [required]

[string]

Array of errors returned by the API.

{
  "errors": [
    "Bad Request"
  ]
}

Too many requests

Error response object.

Expand All

Campo

Tipo

Descripción

errors [required]

[string]

Array of errors returned by the API.

{
  "errors": [
    "Bad Request"
  ]
}

Ejemplo de código

                  # Curl command
curl -X GET "https://api.ap1.datadoghq.com"https://api.datadoghq.eu"https://api.ddog-gov.com"https://api.datadoghq.com"https://api.us3.datadoghq.com"https://api.us5.datadoghq.com/api/v1/logs/config/pipelines" \ -H "Accept: application/json" \ -H "DD-API-KEY: ${DD_API_KEY}" \ -H "DD-APPLICATION-KEY: ${DD_APP_KEY}"
"""
Get all pipelines returns "OK" response
"""

from datadog_api_client import ApiClient, Configuration
from datadog_api_client.v1.api.logs_pipelines_api import LogsPipelinesApi

configuration = Configuration()
with ApiClient(configuration) as api_client:
    api_instance = LogsPipelinesApi(api_client)
    response = api_instance.list_logs_pipelines()

    print(response)

Instructions

First install the library and its dependencies and then save the example to example.py and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" python3 "example.py"
# Get all pipelines returns "OK" response

require "datadog_api_client"
api_instance = DatadogAPIClient::V1::LogsPipelinesAPI.new
p api_instance.list_logs_pipelines()

Instructions

First install the library and its dependencies and then save the example to example.rb and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" rb "example.rb"
// Get all pipelines returns "OK" response

package main

import (
	"context"
	"encoding/json"
	"fmt"
	"os"

	"github.com/DataDog/datadog-api-client-go/v2/api/datadog"
	"github.com/DataDog/datadog-api-client-go/v2/api/datadogV1"
)

func main() {
	ctx := datadog.NewDefaultContext(context.Background())
	configuration := datadog.NewConfiguration()
	apiClient := datadog.NewAPIClient(configuration)
	api := datadogV1.NewLogsPipelinesApi(apiClient)
	resp, r, err := api.ListLogsPipelines(ctx)

	if err != nil {
		fmt.Fprintf(os.Stderr, "Error when calling `LogsPipelinesApi.ListLogsPipelines`: %v\n", err)
		fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r)
	}

	responseContent, _ := json.MarshalIndent(resp, "", "  ")
	fmt.Fprintf(os.Stdout, "Response from `LogsPipelinesApi.ListLogsPipelines`:\n%s\n", responseContent)
}

Instructions

First install the library and its dependencies and then save the example to main.go and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" go run "main.go"
// Get all pipelines returns "OK" response

import com.datadog.api.client.ApiClient;
import com.datadog.api.client.ApiException;
import com.datadog.api.client.v1.api.LogsPipelinesApi;
import com.datadog.api.client.v1.model.LogsPipeline;
import java.util.List;

public class Example {
  public static void main(String[] args) {
    ApiClient defaultClient = ApiClient.getDefaultApiClient();
    LogsPipelinesApi apiInstance = new LogsPipelinesApi(defaultClient);

    try {
      List<LogsPipeline> result = apiInstance.listLogsPipelines();
      System.out.println(result);
    } catch (ApiException e) {
      System.err.println("Exception when calling LogsPipelinesApi#listLogsPipelines");
      System.err.println("Status code: " + e.getCode());
      System.err.println("Reason: " + e.getResponseBody());
      System.err.println("Response headers: " + e.getResponseHeaders());
      e.printStackTrace();
    }
  }
}

Instructions

First install the library and its dependencies and then save the example to Example.java and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" java "Example.java"
// Get all pipelines returns "OK" response
use datadog_api_client::datadog;
use datadog_api_client::datadogV1::api_logs_pipelines::LogsPipelinesAPI;

#[tokio::main]
async fn main() {
    let configuration = datadog::Configuration::new();
    let api = LogsPipelinesAPI::with_config(configuration);
    let resp = api.list_logs_pipelines().await;
    if let Ok(value) = resp {
        println!("{:#?}", value);
    } else {
        println!("{:#?}", resp.unwrap_err());
    }
}

Instructions

First install the library and its dependencies and then save the example to src/main.rs and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" cargo run
/**
 * Get all pipelines returns "OK" response
 */

import { client, v1 } from "@datadog/datadog-api-client";

const configuration = client.createConfiguration();
const apiInstance = new v1.LogsPipelinesApi(configuration);

apiInstance
  .listLogsPipelines()
  .then((data: v1.LogsPipeline[]) => {
    console.log(
      "API called successfully. Returned data: " + JSON.stringify(data)
    );
  })
  .catch((error: any) => console.error(error));

Instructions

First install the library and its dependencies and then save the example to example.ts and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" tsc "example.ts"

POST https://api.ap1.datadoghq.com/api/v1/logs/config/pipelineshttps://api.datadoghq.eu/api/v1/logs/config/pipelineshttps://api.ddog-gov.com/api/v1/logs/config/pipelineshttps://api.datadoghq.com/api/v1/logs/config/pipelineshttps://api.us3.datadoghq.com/api/v1/logs/config/pipelineshttps://api.us5.datadoghq.com/api/v1/logs/config/pipelines

Información general

Crea un pipeline en tu organización. This endpoint requires the logs_write_pipelines permission.

Solicitud

Body Data (required)

Definición del nuevo pipeline.

Expand All

Campo

Tipo

Descripción

filter

object

Filter for logs.

query

string

The filter query.

id

string

ID of the pipeline.

is_enabled

boolean

Whether or not the pipeline is enabled.

is_read_only

boolean

Whether or not the pipeline can be edited.

name [required]

string

Name of the pipeline.

processors

[ <oneOf>]

Ordered list of processors in this pipeline.

Option 1

object

Create custom grok rules to parse the full message or a specific attribute of your raw event. For more information, see the parsing section.

grok [required]

object

Set of rules for the grok parser.

match_rules [required]

string

List of match rules for the grok parser, separated by a new line.

support_rules

string

List of support rules for the grok parser, separated by a new line.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

samples

[string]

List of sample logs to test this grok parser.

source [required]

string

Name of the log attribute to parse.

default: message

type [required]

enum

Type of logs grok parser. Allowed enum values: grok-parser

default: grok-parser

Option 2

object

As Datadog receives logs, it timestamps them using the value(s) from any of these default attributes.

  • timestamp

  • date

  • _timestamp

  • Timestamp

  • eventTime

  • published_date

    If your logs put their dates in an attribute not in this list, use the log date Remapper Processor to define their date attribute as the official log timestamp. The recognized date formats are ISO8601, UNIX (the milliseconds EPOCH format), and RFC3164.

Note: If your logs don’t contain any of the default attributes and you haven’t defined your own date attribute, Datadog timestamps the logs with the date it received them.

If multiple log date remapper processors can be applied to a given log, only the first one (according to the pipelines order) is taken into account.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

type [required]

enum

Type of logs date remapper. Allowed enum values: date-remapper

default: date-remapper

Option 3

object

Use this Processor if you want to assign some attributes as the official status.

Each incoming status value is mapped as follows.

  • Integers from 0 to 7 map to the Syslog severity standards
  • Strings beginning with emerg or f (case-insensitive) map to emerg (0)
  • Strings beginning with a (case-insensitive) map to alert (1)
  • Strings beginning with c (case-insensitive) map to critical (2)
  • Strings beginning with err (case-insensitive) map to error (3)
  • Strings beginning with w (case-insensitive) map to warning (4)
  • Strings beginning with n (case-insensitive) map to notice (5)
  • Strings beginning with i (case-insensitive) map to info (6)
  • Strings beginning with d, trace or verbose (case-insensitive) map to debug (7)
  • Strings beginning with o or matching OK or Success (case-insensitive) map to OK
  • All others map to info (6)

Note: If multiple log status remapper processors can be applied to a given log, only the first one (according to the pipelines order) is taken into account.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

type [required]

enum

Type of logs status remapper. Allowed enum values: status-remapper

default: status-remapper

Option 4

object

Use this processor if you want to assign one or more attributes as the official service.

Note: If multiple service remapper processors can be applied to a given log, only the first one (according to the pipeline order) is taken into account.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

type [required]

enum

Type of logs service remapper. Allowed enum values: service-remapper

default: service-remapper

Option 5

object

The message is a key attribute in Datadog. It is displayed in the message column of the Log Explorer and you can do full string search on it. Use this Processor to define one or more attributes as the official log message.

Note: If multiple log message remapper processors can be applied to a given log, only the first one (according to the pipeline order) is taken into account.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

default: msg

type [required]

enum

Type of logs message remapper. Allowed enum values: message-remapper

default: message-remapper

Option 6

object

The remapper processor remaps any source attribute(s) or tag to another target attribute or tag. Constraints on the tag/attribute name are explained in the Tag Best Practice documentation. Some additional constraints are applied as : or , are not allowed in the target tag/attribute name.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

override_on_conflict

boolean

Override or not the target element if already set,

preserve_source

boolean

Remove or preserve the remapped source element.

source_type

string

Defines if the sources are from log attribute or tag.

default: attribute

sources [required]

[string]

Array of source attributes.

target [required]

string

Final attribute or tag name to remap the sources to.

target_format

enum

If the target_type of the remapper is attribute, try to cast the value to a new specific type. If the cast is not possible, the original type is kept. string, integer, or double are the possible types. If the target_type is tag, this parameter may not be specified. Allowed enum values: auto,string,integer,double

target_type

string

Defines if the final attribute or tag name is from log attribute or tag.

default: attribute

type [required]

enum

Type of logs attribute remapper. Allowed enum values: attribute-remapper

default: attribute-remapper

Option 7

object

This processor extracts query parameters and other important parameters from a URL.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

normalize_ending_slashes

boolean

Normalize the ending slashes or not.

sources [required]

[string]

Array of source attributes.

default: http.url

target [required]

string

Name of the parent attribute that contains all the extracted details from the sources.

default: http.url_details

type [required]

enum

Type of logs URL parser. Allowed enum values: url-parser

default: url-parser

Option 8

object

The User-Agent parser takes a User-Agent attribute and extracts the OS, browser, device, and other user data. It recognizes major bots like the Google Bot, Yahoo Slurp, and Bing.

is_enabled

boolean

Whether or not the processor is enabled.

is_encoded

boolean

Define if the source attribute is URL encoded or not.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

default: http.useragent

target [required]

string

Name of the parent attribute that contains all the extracted details from the sources.

default: http.useragent_details

type [required]

enum

Type of logs User-Agent parser. Allowed enum values: user-agent-parser

default: user-agent-parser

Option 9

object

Use the Category Processor to add a new attribute (without spaces or special characters in the new attribute name) to a log matching a provided search query. Use categories to create groups for an analytical view. For example, URL groups, machine groups, environments, and response time buckets.

Notes:

  • The syntax of the query is the one of Logs Explorer search bar. The query can be done on any log attribute or tag, whether it is a facet or not. Wildcards can also be used inside your query.
  • Once the log has matched one of the Processor queries, it stops. Make sure they are properly ordered in case a log could match several queries.
  • The names of the categories must be unique.
  • Once defined in the Category Processor, you can map categories to log status using the Log Status Remapper.

categories [required]

[object]

Array of filters to match or not a log and their corresponding name to assign a custom value to the log.

filter

object

Filter for logs.

query

string

The filter query.

name

string

Value to assign to the target attribute.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

target [required]

string

Name of the target attribute which value is defined by the matching category.

type [required]

enum

Type of logs category processor. Allowed enum values: category-processor

default: category-processor

Option 10

object

Use the Arithmetic Processor to add a new attribute (without spaces or special characters in the new attribute name) to a log with the result of the provided formula. This enables you to remap different time attributes with different units into a single attribute, or to compute operations on attributes within the same log.

The formula can use parentheses and the basic arithmetic operators -, +, *, /.

By default, the calculation is skipped if an attribute is missing. Select “Replace missing attribute by 0” to automatically populate missing attribute values with 0 to ensure that the calculation is done. An attribute is missing if it is not found in the log attributes, or if it cannot be converted to a number.

Notes:

  • The operator - needs to be space split in the formula as it can also be contained in attribute names.
  • If the target attribute already exists, it is overwritten by the result of the formula.
  • Results are rounded up to the 9th decimal. For example, if the result of the formula is 0.1234567891, the actual value stored for the attribute is 0.123456789.
  • If you need to scale a unit of measure, see Scale Filter.

expression [required]

string

Arithmetic operation between one or more log attributes.

is_enabled

boolean

Whether or not the processor is enabled.

is_replace_missing

boolean

If true, it replaces all missing attributes of expression by 0, false skip the operation if an attribute is missing.

name

string

Name of the processor.

target [required]

string

Name of the attribute that contains the result of the arithmetic operation.

type [required]

enum

Type of logs arithmetic processor. Allowed enum values: arithmetic-processor

default: arithmetic-processor

Option 11

object

Use the string builder processor to add a new attribute (without spaces or special characters) to a log with the result of the provided template. This enables aggregation of different attributes or raw strings into a single attribute.

The template is defined by both raw text and blocks with the syntax %{attribute_path}.

Notes:

  • The processor only accepts attributes with values or an array of values in the blocks.
  • If an attribute cannot be used (object or array of object), it is replaced by an empty string or the entire operation is skipped depending on your selection.
  • If the target attribute already exists, it is overwritten by the result of the template.
  • Results of the template cannot exceed 256 characters.

is_enabled

boolean

Whether or not the processor is enabled.

is_replace_missing

boolean

If true, it replaces all missing attributes of template by an empty string. If false (default), skips the operation for missing attributes.

name

string

Name of the processor.

target [required]

string

The name of the attribute that contains the result of the template.

template [required]

string

A formula with one or more attributes and raw text.

type [required]

enum

Type of logs string builder processor. Allowed enum values: string-builder-processor

default: string-builder-processor

Option 12

object

Nested Pipelines are pipelines within a pipeline. Use Nested Pipelines to split the processing into two steps. For example, first use a high-level filtering such as team and then a second level of filtering based on the integration, service, or any other tag or attribute.

A pipeline can contain Nested Pipelines and Processors whereas a Nested Pipeline can only contain Processors.

filter

object

Filter for logs.

query

string

The filter query.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

processors

[object]

Ordered list of processors in this pipeline.

type [required]

enum

Type of logs pipeline processor. Allowed enum values: pipeline

default: pipeline

Option 13

object

The GeoIP parser takes an IP address attribute and extracts if available the Continent, Country, Subdivision, and City information in the target attribute path.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

default: network.client.ip

target [required]

string

Name of the parent attribute that contains all the extracted details from the sources.

default: network.client.geoip

type [required]

enum

Type of GeoIP parser. Allowed enum values: geo-ip-parser

default: geo-ip-parser

Option 14

object

Use the Lookup Processor to define a mapping between a log attribute and a human readable value saved in the processors mapping table. For example, you can use the Lookup Processor to map an internal service ID into a human readable service name. Alternatively, you could also use it to check if the MAC address that just attempted to connect to the production environment belongs to your list of stolen machines.

default_lookup

string

Value to set the target attribute if the source value is not found in the list.

is_enabled

boolean

Whether or not the processor is enabled.

lookup_table [required]

[string]

Mapping table of values for the source attribute and their associated target attribute values, formatted as ["source_key1,target_value1", "source_key2,target_value2"]

name

string

Name of the processor.

source [required]

string

Source attribute used to perform the lookup.

target [required]

string

Name of the attribute that contains the corresponding value in the mapping list or the default_lookup if not found in the mapping list.

type [required]

enum

Type of logs lookup processor. Allowed enum values: lookup-processor

default: lookup-processor

Option 15

object

Note: Reference Tables are in public beta. Use the Lookup Processor to define a mapping between a log attribute and a human readable value saved in a Reference Table. For example, you can use the Lookup Processor to map an internal service ID into a human readable service name. Alternatively, you could also use it to check if the MAC address that just attempted to connect to the production environment belongs to your list of stolen machines.

is_enabled

boolean

Whether or not the processor is enabled.

lookup_enrichment_table [required]

string

Name of the Reference Table for the source attribute and their associated target attribute values.

name

string

Name of the processor.

source [required]

string

Source attribute used to perform the lookup.

target [required]

string

Name of the attribute that contains the corresponding value in the mapping list.

type [required]

enum

Type of logs lookup processor. Allowed enum values: lookup-processor

default: lookup-processor

Option 16

object

There are two ways to improve correlation between application traces and logs.

  1. Follow the documentation on how to inject a trace ID in the application logs and by default log integrations take care of all the rest of the setup.

  2. Use the Trace remapper processor to define a log attribute as its associated trace ID.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources

[string]

Array of source attributes.

default: dd.trace_id

type [required]

enum

Type of logs trace remapper. Allowed enum values: trace-id-remapper

default: trace-id-remapper

type

string

Type of pipeline.

{
  "filter": {
    "query": "source:python"
  },
  "is_enabled": false,
  "name": "",
  "processors": [
    {
      "grok": {
        "match_rules": "rule_name_1 foo\nrule_name_2 bar\n",
        "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"
      },
      "is_enabled": false,
      "name": "string",
      "samples": [],
      "source": "message",
      "type": "grok-parser"
    }
  ]
}

Respuesta

OK

Pipelines and processors operate on incoming logs, parsing and transforming them into structured attributes for easier querying.

Note: These endpoints are only available for admin users. Make sure to use an application key created by an admin.

Expand All

Campo

Tipo

Descripción

filter

object

Filter for logs.

query

string

The filter query.

id

string

ID of the pipeline.

is_enabled

boolean

Whether or not the pipeline is enabled.

is_read_only

boolean

Whether or not the pipeline can be edited.

name [required]

string

Name of the pipeline.

processors

[ <oneOf>]

Ordered list of processors in this pipeline.

Option 1

object

Create custom grok rules to parse the full message or a specific attribute of your raw event. For more information, see the parsing section.

grok [required]

object

Set of rules for the grok parser.

match_rules [required]

string

List of match rules for the grok parser, separated by a new line.

support_rules

string

List of support rules for the grok parser, separated by a new line.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

samples

[string]

List of sample logs to test this grok parser.

source [required]

string

Name of the log attribute to parse.

default: message

type [required]

enum

Type of logs grok parser. Allowed enum values: grok-parser

default: grok-parser

Option 2

object

As Datadog receives logs, it timestamps them using the value(s) from any of these default attributes.

  • timestamp

  • date

  • _timestamp

  • Timestamp

  • eventTime

  • published_date

    If your logs put their dates in an attribute not in this list, use the log date Remapper Processor to define their date attribute as the official log timestamp. The recognized date formats are ISO8601, UNIX (the milliseconds EPOCH format), and RFC3164.

Note: If your logs don’t contain any of the default attributes and you haven’t defined your own date attribute, Datadog timestamps the logs with the date it received them.

If multiple log date remapper processors can be applied to a given log, only the first one (according to the pipelines order) is taken into account.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

type [required]

enum

Type of logs date remapper. Allowed enum values: date-remapper

default: date-remapper

Option 3

object

Use this Processor if you want to assign some attributes as the official status.

Each incoming status value is mapped as follows.

  • Integers from 0 to 7 map to the Syslog severity standards
  • Strings beginning with emerg or f (case-insensitive) map to emerg (0)
  • Strings beginning with a (case-insensitive) map to alert (1)
  • Strings beginning with c (case-insensitive) map to critical (2)
  • Strings beginning with err (case-insensitive) map to error (3)
  • Strings beginning with w (case-insensitive) map to warning (4)
  • Strings beginning with n (case-insensitive) map to notice (5)
  • Strings beginning with i (case-insensitive) map to info (6)
  • Strings beginning with d, trace or verbose (case-insensitive) map to debug (7)
  • Strings beginning with o or matching OK or Success (case-insensitive) map to OK
  • All others map to info (6)

Note: If multiple log status remapper processors can be applied to a given log, only the first one (according to the pipelines order) is taken into account.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

type [required]

enum

Type of logs status remapper. Allowed enum values: status-remapper

default: status-remapper

Option 4

object

Use this processor if you want to assign one or more attributes as the official service.

Note: If multiple service remapper processors can be applied to a given log, only the first one (according to the pipeline order) is taken into account.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

type [required]

enum

Type of logs service remapper. Allowed enum values: service-remapper

default: service-remapper

Option 5

object

The message is a key attribute in Datadog. It is displayed in the message column of the Log Explorer and you can do full string search on it. Use this Processor to define one or more attributes as the official log message.

Note: If multiple log message remapper processors can be applied to a given log, only the first one (according to the pipeline order) is taken into account.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

default: msg

type [required]

enum

Type of logs message remapper. Allowed enum values: message-remapper

default: message-remapper

Option 6

object

The remapper processor remaps any source attribute(s) or tag to another target attribute or tag. Constraints on the tag/attribute name are explained in the Tag Best Practice documentation. Some additional constraints are applied as : or , are not allowed in the target tag/attribute name.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

override_on_conflict

boolean

Override or not the target element if already set,

preserve_source

boolean

Remove or preserve the remapped source element.

source_type

string

Defines if the sources are from log attribute or tag.

default: attribute

sources [required]

[string]

Array of source attributes.

target [required]

string

Final attribute or tag name to remap the sources to.

target_format

enum

If the target_type of the remapper is attribute, try to cast the value to a new specific type. If the cast is not possible, the original type is kept. string, integer, or double are the possible types. If the target_type is tag, this parameter may not be specified. Allowed enum values: auto,string,integer,double

target_type

string

Defines if the final attribute or tag name is from log attribute or tag.

default: attribute

type [required]

enum

Type of logs attribute remapper. Allowed enum values: attribute-remapper

default: attribute-remapper

Option 7

object

This processor extracts query parameters and other important parameters from a URL.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

normalize_ending_slashes

boolean

Normalize the ending slashes or not.

sources [required]

[string]

Array of source attributes.

default: http.url

target [required]

string

Name of the parent attribute that contains all the extracted details from the sources.

default: http.url_details

type [required]

enum

Type of logs URL parser. Allowed enum values: url-parser

default: url-parser

Option 8

object

The User-Agent parser takes a User-Agent attribute and extracts the OS, browser, device, and other user data. It recognizes major bots like the Google Bot, Yahoo Slurp, and Bing.

is_enabled

boolean

Whether or not the processor is enabled.

is_encoded

boolean

Define if the source attribute is URL encoded or not.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

default: http.useragent

target [required]

string

Name of the parent attribute that contains all the extracted details from the sources.

default: http.useragent_details

type [required]

enum

Type of logs User-Agent parser. Allowed enum values: user-agent-parser

default: user-agent-parser

Option 9

object

Use the Category Processor to add a new attribute (without spaces or special characters in the new attribute name) to a log matching a provided search query. Use categories to create groups for an analytical view. For example, URL groups, machine groups, environments, and response time buckets.

Notes:

  • The syntax of the query is the one of Logs Explorer search bar. The query can be done on any log attribute or tag, whether it is a facet or not. Wildcards can also be used inside your query.
  • Once the log has matched one of the Processor queries, it stops. Make sure they are properly ordered in case a log could match several queries.
  • The names of the categories must be unique.
  • Once defined in the Category Processor, you can map categories to log status using the Log Status Remapper.

categories [required]

[object]

Array of filters to match or not a log and their corresponding name to assign a custom value to the log.

filter

object

Filter for logs.

query

string

The filter query.

name

string

Value to assign to the target attribute.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

target [required]

string

Name of the target attribute which value is defined by the matching category.

type [required]

enum

Type of logs category processor. Allowed enum values: category-processor

default: category-processor

Option 10

object

Use the Arithmetic Processor to add a new attribute (without spaces or special characters in the new attribute name) to a log with the result of the provided formula. This enables you to remap different time attributes with different units into a single attribute, or to compute operations on attributes within the same log.

The formula can use parentheses and the basic arithmetic operators -, +, *, /.

By default, the calculation is skipped if an attribute is missing. Select “Replace missing attribute by 0” to automatically populate missing attribute values with 0 to ensure that the calculation is done. An attribute is missing if it is not found in the log attributes, or if it cannot be converted to a number.

Notes:

  • The operator - needs to be space split in the formula as it can also be contained in attribute names.
  • If the target attribute already exists, it is overwritten by the result of the formula.
  • Results are rounded up to the 9th decimal. For example, if the result of the formula is 0.1234567891, the actual value stored for the attribute is 0.123456789.
  • If you need to scale a unit of measure, see Scale Filter.

expression [required]

string

Arithmetic operation between one or more log attributes.

is_enabled

boolean

Whether or not the processor is enabled.

is_replace_missing

boolean

If true, it replaces all missing attributes of expression by 0, false skip the operation if an attribute is missing.

name

string

Name of the processor.

target [required]

string

Name of the attribute that contains the result of the arithmetic operation.

type [required]

enum

Type of logs arithmetic processor. Allowed enum values: arithmetic-processor

default: arithmetic-processor

Option 11

object

Use the string builder processor to add a new attribute (without spaces or special characters) to a log with the result of the provided template. This enables aggregation of different attributes or raw strings into a single attribute.

The template is defined by both raw text and blocks with the syntax %{attribute_path}.

Notes:

  • The processor only accepts attributes with values or an array of values in the blocks.
  • If an attribute cannot be used (object or array of object), it is replaced by an empty string or the entire operation is skipped depending on your selection.
  • If the target attribute already exists, it is overwritten by the result of the template.
  • Results of the template cannot exceed 256 characters.

is_enabled

boolean

Whether or not the processor is enabled.

is_replace_missing

boolean

If true, it replaces all missing attributes of template by an empty string. If false (default), skips the operation for missing attributes.

name

string

Name of the processor.

target [required]

string

The name of the attribute that contains the result of the template.

template [required]

string

A formula with one or more attributes and raw text.

type [required]

enum

Type of logs string builder processor. Allowed enum values: string-builder-processor

default: string-builder-processor

Option 12

object

Nested Pipelines are pipelines within a pipeline. Use Nested Pipelines to split the processing into two steps. For example, first use a high-level filtering such as team and then a second level of filtering based on the integration, service, or any other tag or attribute.

A pipeline can contain Nested Pipelines and Processors whereas a Nested Pipeline can only contain Processors.

filter

object

Filter for logs.

query

string

The filter query.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

processors

[object]

Ordered list of processors in this pipeline.

type [required]

enum

Type of logs pipeline processor. Allowed enum values: pipeline

default: pipeline

Option 13

object

The GeoIP parser takes an IP address attribute and extracts if available the Continent, Country, Subdivision, and City information in the target attribute path.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

default: network.client.ip

target [required]

string

Name of the parent attribute that contains all the extracted details from the sources.

default: network.client.geoip

type [required]

enum

Type of GeoIP parser. Allowed enum values: geo-ip-parser

default: geo-ip-parser

Option 14

object

Use the Lookup Processor to define a mapping between a log attribute and a human readable value saved in the processors mapping table. For example, you can use the Lookup Processor to map an internal service ID into a human readable service name. Alternatively, you could also use it to check if the MAC address that just attempted to connect to the production environment belongs to your list of stolen machines.

default_lookup

string

Value to set the target attribute if the source value is not found in the list.

is_enabled

boolean

Whether or not the processor is enabled.

lookup_table [required]

[string]

Mapping table of values for the source attribute and their associated target attribute values, formatted as ["source_key1,target_value1", "source_key2,target_value2"]

name

string

Name of the processor.

source [required]

string

Source attribute used to perform the lookup.

target [required]

string

Name of the attribute that contains the corresponding value in the mapping list or the default_lookup if not found in the mapping list.

type [required]

enum

Type of logs lookup processor. Allowed enum values: lookup-processor

default: lookup-processor

Option 15

object

Note: Reference Tables are in public beta. Use the Lookup Processor to define a mapping between a log attribute and a human readable value saved in a Reference Table. For example, you can use the Lookup Processor to map an internal service ID into a human readable service name. Alternatively, you could also use it to check if the MAC address that just attempted to connect to the production environment belongs to your list of stolen machines.

is_enabled

boolean

Whether or not the processor is enabled.

lookup_enrichment_table [required]

string

Name of the Reference Table for the source attribute and their associated target attribute values.

name

string

Name of the processor.

source [required]

string

Source attribute used to perform the lookup.

target [required]

string

Name of the attribute that contains the corresponding value in the mapping list.

type [required]

enum

Type of logs lookup processor. Allowed enum values: lookup-processor

default: lookup-processor

Option 16

object

There are two ways to improve correlation between application traces and logs.

  1. Follow the documentation on how to inject a trace ID in the application logs and by default log integrations take care of all the rest of the setup.

  2. Use the Trace remapper processor to define a log attribute as its associated trace ID.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources

[string]

Array of source attributes.

default: dd.trace_id

type [required]

enum

Type of logs trace remapper. Allowed enum values: trace-id-remapper

default: trace-id-remapper

type

string

Type of pipeline.

{
  "filter": {
    "query": "source:python"
  },
  "id": "string",
  "is_enabled": false,
  "is_read_only": false,
  "name": "",
  "processors": [
    {
      "grok": {
        "match_rules": "rule_name_1 foo\nrule_name_2 bar\n",
        "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"
      },
      "is_enabled": false,
      "name": "string",
      "samples": [],
      "source": "message",
      "type": "grok-parser"
    }
  ],
  "type": "pipeline"
}

Bad Request

Response returned by the Logs API when errors occur.

Expand All

Campo

Tipo

Descripción

error

object

Error returned by the Logs API

code

string

Code identifying the error

details

[object]

Additional error details

message

string

Error message

{
  "error": {
    "code": "string",
    "details": [],
    "message": "string"
  }
}

Forbidden

Error response object.

Expand All

Campo

Tipo

Descripción

errors [required]

[string]

Array of errors returned by the API.

{
  "errors": [
    "Bad Request"
  ]
}

Too many requests

Error response object.

Expand All

Campo

Tipo

Descripción

errors [required]

[string]

Array of errors returned by the API.

{
  "errors": [
    "Bad Request"
  ]
}

Ejemplo de código

                  # Curl command
curl -X POST "https://api.ap1.datadoghq.com"https://api.datadoghq.eu"https://api.ddog-gov.com"https://api.datadoghq.com"https://api.us3.datadoghq.com"https://api.us5.datadoghq.com/api/v1/logs/config/pipelines" \ -H "Accept: application/json" \ -H "Content-Type: application/json" \ -H "DD-API-KEY: ${DD_API_KEY}" \ -H "DD-APPLICATION-KEY: ${DD_APP_KEY}" \ -d @- << EOF { "name": "", "processors": [ { "grok": { "match_rules": "rule_name_1 foo\nrule_name_2 bar\n" } } ] } EOF
"""
Create a pipeline returns "OK" response
"""

from datadog_api_client import ApiClient, Configuration
from datadog_api_client.v1.api.logs_pipelines_api import LogsPipelinesApi
from datadog_api_client.v1.model.logs_filter import LogsFilter
from datadog_api_client.v1.model.logs_grok_parser import LogsGrokParser
from datadog_api_client.v1.model.logs_grok_parser_rules import LogsGrokParserRules
from datadog_api_client.v1.model.logs_grok_parser_type import LogsGrokParserType
from datadog_api_client.v1.model.logs_pipeline import LogsPipeline

body = LogsPipeline(
    filter=LogsFilter(
        query="source:python",
    ),
    name="",
    processors=[
        LogsGrokParser(
            grok=LogsGrokParserRules(
                match_rules="rule_name_1 foo\nrule_name_2 bar\n",
                support_rules="rule_name_1 foo\nrule_name_2 bar\n",
            ),
            is_enabled=False,
            samples=[],
            source="message",
            type=LogsGrokParserType.GROK_PARSER,
        ),
    ],
)

configuration = Configuration()
with ApiClient(configuration) as api_client:
    api_instance = LogsPipelinesApi(api_client)
    response = api_instance.create_logs_pipeline(body=body)

    print(response)

Instructions

First install the library and its dependencies and then save the example to example.py and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" python3 "example.py"
# Create a pipeline returns "OK" response

require "datadog_api_client"
api_instance = DatadogAPIClient::V1::LogsPipelinesAPI.new

body = DatadogAPIClient::V1::LogsPipeline.new({
  filter: DatadogAPIClient::V1::LogsFilter.new({
    query: "source:python",
  }),
  name: "",
  processors: [
    DatadogAPIClient::V1::LogsGrokParser.new({
      grok: DatadogAPIClient::V1::LogsGrokParserRules.new({
        match_rules: 'rule_name_1 foo\nrule_name_2 bar\n',
        support_rules: 'rule_name_1 foo\nrule_name_2 bar\n',
      }),
      is_enabled: false,
      samples: [],
      source: "message",
      type: DatadogAPIClient::V1::LogsGrokParserType::GROK_PARSER,
    }),
  ],
})
p api_instance.create_logs_pipeline(body)

Instructions

First install the library and its dependencies and then save the example to example.rb and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" rb "example.rb"
// Create a pipeline returns "OK" response

package main

import (
	"context"
	"encoding/json"
	"fmt"
	"os"

	"github.com/DataDog/datadog-api-client-go/v2/api/datadog"
	"github.com/DataDog/datadog-api-client-go/v2/api/datadogV1"
)

func main() {
	body := datadogV1.LogsPipeline{
		Filter: &datadogV1.LogsFilter{
			Query: datadog.PtrString("source:python"),
		},
		Name: "",
		Processors: []datadogV1.LogsProcessor{
			datadogV1.LogsProcessor{
				LogsGrokParser: &datadogV1.LogsGrokParser{
					Grok: datadogV1.LogsGrokParserRules{
						MatchRules: `rule_name_1 foo
rule_name_2 bar
`,
						SupportRules: datadog.PtrString(`rule_name_1 foo
rule_name_2 bar
`),
					},
					IsEnabled: datadog.PtrBool(false),
					Samples:   []string{},
					Source:    "message",
					Type:      datadogV1.LOGSGROKPARSERTYPE_GROK_PARSER,
				}},
		},
	}
	ctx := datadog.NewDefaultContext(context.Background())
	configuration := datadog.NewConfiguration()
	apiClient := datadog.NewAPIClient(configuration)
	api := datadogV1.NewLogsPipelinesApi(apiClient)
	resp, r, err := api.CreateLogsPipeline(ctx, body)

	if err != nil {
		fmt.Fprintf(os.Stderr, "Error when calling `LogsPipelinesApi.CreateLogsPipeline`: %v\n", err)
		fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r)
	}

	responseContent, _ := json.MarshalIndent(resp, "", "  ")
	fmt.Fprintf(os.Stdout, "Response from `LogsPipelinesApi.CreateLogsPipeline`:\n%s\n", responseContent)
}

Instructions

First install the library and its dependencies and then save the example to main.go and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" go run "main.go"
// Create a pipeline returns "OK" response

import com.datadog.api.client.ApiClient;
import com.datadog.api.client.ApiException;
import com.datadog.api.client.v1.api.LogsPipelinesApi;
import com.datadog.api.client.v1.model.LogsFilter;
import com.datadog.api.client.v1.model.LogsGrokParser;
import com.datadog.api.client.v1.model.LogsGrokParserRules;
import com.datadog.api.client.v1.model.LogsGrokParserType;
import com.datadog.api.client.v1.model.LogsPipeline;
import com.datadog.api.client.v1.model.LogsProcessor;
import java.util.Collections;

public class Example {
  public static void main(String[] args) {
    ApiClient defaultClient = ApiClient.getDefaultApiClient();
    LogsPipelinesApi apiInstance = new LogsPipelinesApi(defaultClient);

    LogsPipeline body =
        new LogsPipeline()
            .filter(new LogsFilter().query("source:python"))
            .name("")
            .processors(
                Collections.singletonList(
                    new LogsProcessor(
                        new LogsGrokParser()
                            .grok(
                                new LogsGrokParserRules()
                                    .matchRules("""
rule_name_1 foo
rule_name_2 bar

""")
                                    .supportRules("""
rule_name_1 foo
rule_name_2 bar

"""))
                            .isEnabled(false)
                            .source("message")
                            .type(LogsGrokParserType.GROK_PARSER))));

    try {
      LogsPipeline result = apiInstance.createLogsPipeline(body);
      System.out.println(result);
    } catch (ApiException e) {
      System.err.println("Exception when calling LogsPipelinesApi#createLogsPipeline");
      System.err.println("Status code: " + e.getCode());
      System.err.println("Reason: " + e.getResponseBody());
      System.err.println("Response headers: " + e.getResponseHeaders());
      e.printStackTrace();
    }
  }
}

Instructions

First install the library and its dependencies and then save the example to Example.java and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" java "Example.java"
// Create a pipeline returns "OK" response
use datadog_api_client::datadog;
use datadog_api_client::datadogV1::api_logs_pipelines::LogsPipelinesAPI;
use datadog_api_client::datadogV1::model::LogsFilter;
use datadog_api_client::datadogV1::model::LogsGrokParser;
use datadog_api_client::datadogV1::model::LogsGrokParserRules;
use datadog_api_client::datadogV1::model::LogsGrokParserType;
use datadog_api_client::datadogV1::model::LogsPipeline;
use datadog_api_client::datadogV1::model::LogsProcessor;

#[tokio::main]
async fn main() {
    let body = LogsPipeline::new("".to_string())
        .filter(LogsFilter::new().query("source:python".to_string()))
        .processors(vec![LogsProcessor::LogsGrokParser(Box::new(
            LogsGrokParser::new(
                LogsGrokParserRules::new(
                    r#"rule_name_1 foo
rule_name_2 bar
"#
                    .to_string(),
                )
                .support_rules(
                    r#"rule_name_1 foo
rule_name_2 bar
"#
                    .to_string(),
                ),
                "message".to_string(),
                LogsGrokParserType::GROK_PARSER,
            )
            .is_enabled(false)
            .samples(vec![]),
        ))]);
    let configuration = datadog::Configuration::new();
    let api = LogsPipelinesAPI::with_config(configuration);
    let resp = api.create_logs_pipeline(body).await;
    if let Ok(value) = resp {
        println!("{:#?}", value);
    } else {
        println!("{:#?}", resp.unwrap_err());
    }
}

Instructions

First install the library and its dependencies and then save the example to src/main.rs and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" cargo run
/**
 * Create a pipeline returns "OK" response
 */

import { client, v1 } from "@datadog/datadog-api-client";

const configuration = client.createConfiguration();
const apiInstance = new v1.LogsPipelinesApi(configuration);

const params: v1.LogsPipelinesApiCreateLogsPipelineRequest = {
  body: {
    filter: {
      query: "source:python",
    },
    name: "",
    processors: [
      {
        grok: {
          matchRules: `rule_name_1 foo
rule_name_2 bar
`,
          supportRules: `rule_name_1 foo
rule_name_2 bar
`,
        },
        isEnabled: false,
        samples: [],
        source: "message",
        type: "grok-parser",
      },
    ],
  },
};

apiInstance
  .createLogsPipeline(params)
  .then((data: v1.LogsPipeline) => {
    console.log(
      "API called successfully. Returned data: " + JSON.stringify(data)
    );
  })
  .catch((error: any) => console.error(error));

Instructions

First install the library and its dependencies and then save the example to example.ts and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" tsc "example.ts"

GET https://api.ap1.datadoghq.com/api/v1/logs/config/pipelines/{pipeline_id}https://api.datadoghq.eu/api/v1/logs/config/pipelines/{pipeline_id}https://api.ddog-gov.com/api/v1/logs/config/pipelines/{pipeline_id}https://api.datadoghq.com/api/v1/logs/config/pipelines/{pipeline_id}https://api.us3.datadoghq.com/api/v1/logs/config/pipelines/{pipeline_id}https://api.us5.datadoghq.com/api/v1/logs/config/pipelines/{pipeline_id}

Información general

Obtén un pipeline específico de tu organización. Este endpoint no recibe argumentos JSON.

Argumentos

Parámetros de ruta

Nombre

Tipo

Descripción

pipeline_id [required]

string

ID of the pipeline to get.

Respuesta

OK

Pipelines and processors operate on incoming logs, parsing and transforming them into structured attributes for easier querying.

Note: These endpoints are only available for admin users. Make sure to use an application key created by an admin.

Expand All

Campo

Tipo

Descripción

filter

object

Filter for logs.

query

string

The filter query.

id

string

ID of the pipeline.

is_enabled

boolean

Whether or not the pipeline is enabled.

is_read_only

boolean

Whether or not the pipeline can be edited.

name [required]

string

Name of the pipeline.

processors

[ <oneOf>]

Ordered list of processors in this pipeline.

Option 1

object

Create custom grok rules to parse the full message or a specific attribute of your raw event. For more information, see the parsing section.

grok [required]

object

Set of rules for the grok parser.

match_rules [required]

string

List of match rules for the grok parser, separated by a new line.

support_rules

string

List of support rules for the grok parser, separated by a new line.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

samples

[string]

List of sample logs to test this grok parser.

source [required]

string

Name of the log attribute to parse.

default: message

type [required]

enum

Type of logs grok parser. Allowed enum values: grok-parser

default: grok-parser

Option 2

object

As Datadog receives logs, it timestamps them using the value(s) from any of these default attributes.

  • timestamp

  • date

  • _timestamp

  • Timestamp

  • eventTime

  • published_date

    If your logs put their dates in an attribute not in this list, use the log date Remapper Processor to define their date attribute as the official log timestamp. The recognized date formats are ISO8601, UNIX (the milliseconds EPOCH format), and RFC3164.

Note: If your logs don’t contain any of the default attributes and you haven’t defined your own date attribute, Datadog timestamps the logs with the date it received them.

If multiple log date remapper processors can be applied to a given log, only the first one (according to the pipelines order) is taken into account.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

type [required]

enum

Type of logs date remapper. Allowed enum values: date-remapper

default: date-remapper

Option 3

object

Use this Processor if you want to assign some attributes as the official status.

Each incoming status value is mapped as follows.

  • Integers from 0 to 7 map to the Syslog severity standards
  • Strings beginning with emerg or f (case-insensitive) map to emerg (0)
  • Strings beginning with a (case-insensitive) map to alert (1)
  • Strings beginning with c (case-insensitive) map to critical (2)
  • Strings beginning with err (case-insensitive) map to error (3)
  • Strings beginning with w (case-insensitive) map to warning (4)
  • Strings beginning with n (case-insensitive) map to notice (5)
  • Strings beginning with i (case-insensitive) map to info (6)
  • Strings beginning with d, trace or verbose (case-insensitive) map to debug (7)
  • Strings beginning with o or matching OK or Success (case-insensitive) map to OK
  • All others map to info (6)

Note: If multiple log status remapper processors can be applied to a given log, only the first one (according to the pipelines order) is taken into account.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

type [required]

enum

Type of logs status remapper. Allowed enum values: status-remapper

default: status-remapper

Option 4

object

Use this processor if you want to assign one or more attributes as the official service.

Note: If multiple service remapper processors can be applied to a given log, only the first one (according to the pipeline order) is taken into account.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

type [required]

enum

Type of logs service remapper. Allowed enum values: service-remapper

default: service-remapper

Option 5

object

The message is a key attribute in Datadog. It is displayed in the message column of the Log Explorer and you can do full string search on it. Use this Processor to define one or more attributes as the official log message.

Note: If multiple log message remapper processors can be applied to a given log, only the first one (according to the pipeline order) is taken into account.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

default: msg

type [required]

enum

Type of logs message remapper. Allowed enum values: message-remapper

default: message-remapper

Option 6

object

The remapper processor remaps any source attribute(s) or tag to another target attribute or tag. Constraints on the tag/attribute name are explained in the Tag Best Practice documentation. Some additional constraints are applied as : or , are not allowed in the target tag/attribute name.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

override_on_conflict

boolean

Override or not the target element if already set,

preserve_source

boolean

Remove or preserve the remapped source element.

source_type

string

Defines if the sources are from log attribute or tag.

default: attribute

sources [required]

[string]

Array of source attributes.

target [required]

string

Final attribute or tag name to remap the sources to.

target_format

enum

If the target_type of the remapper is attribute, try to cast the value to a new specific type. If the cast is not possible, the original type is kept. string, integer, or double are the possible types. If the target_type is tag, this parameter may not be specified. Allowed enum values: auto,string,integer,double

target_type

string

Defines if the final attribute or tag name is from log attribute or tag.

default: attribute

type [required]

enum

Type of logs attribute remapper. Allowed enum values: attribute-remapper

default: attribute-remapper

Option 7

object

This processor extracts query parameters and other important parameters from a URL.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

normalize_ending_slashes

boolean

Normalize the ending slashes or not.

sources [required]

[string]

Array of source attributes.

default: http.url

target [required]

string

Name of the parent attribute that contains all the extracted details from the sources.

default: http.url_details

type [required]

enum

Type of logs URL parser. Allowed enum values: url-parser

default: url-parser

Option 8

object

The User-Agent parser takes a User-Agent attribute and extracts the OS, browser, device, and other user data. It recognizes major bots like the Google Bot, Yahoo Slurp, and Bing.

is_enabled

boolean

Whether or not the processor is enabled.

is_encoded

boolean

Define if the source attribute is URL encoded or not.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

default: http.useragent

target [required]

string

Name of the parent attribute that contains all the extracted details from the sources.

default: http.useragent_details

type [required]

enum

Type of logs User-Agent parser. Allowed enum values: user-agent-parser

default: user-agent-parser

Option 9

object

Use the Category Processor to add a new attribute (without spaces or special characters in the new attribute name) to a log matching a provided search query. Use categories to create groups for an analytical view. For example, URL groups, machine groups, environments, and response time buckets.

Notes:

  • The syntax of the query is the one of Logs Explorer search bar. The query can be done on any log attribute or tag, whether it is a facet or not. Wildcards can also be used inside your query.
  • Once the log has matched one of the Processor queries, it stops. Make sure they are properly ordered in case a log could match several queries.
  • The names of the categories must be unique.
  • Once defined in the Category Processor, you can map categories to log status using the Log Status Remapper.

categories [required]

[object]

Array of filters to match or not a log and their corresponding name to assign a custom value to the log.

filter

object

Filter for logs.

query

string

The filter query.

name

string

Value to assign to the target attribute.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

target [required]

string

Name of the target attribute which value is defined by the matching category.

type [required]

enum

Type of logs category processor. Allowed enum values: category-processor

default: category-processor

Option 10

object

Use the Arithmetic Processor to add a new attribute (without spaces or special characters in the new attribute name) to a log with the result of the provided formula. This enables you to remap different time attributes with different units into a single attribute, or to compute operations on attributes within the same log.

The formula can use parentheses and the basic arithmetic operators -, +, *, /.

By default, the calculation is skipped if an attribute is missing. Select “Replace missing attribute by 0” to automatically populate missing attribute values with 0 to ensure that the calculation is done. An attribute is missing if it is not found in the log attributes, or if it cannot be converted to a number.

Notes:

  • The operator - needs to be space split in the formula as it can also be contained in attribute names.
  • If the target attribute already exists, it is overwritten by the result of the formula.
  • Results are rounded up to the 9th decimal. For example, if the result of the formula is 0.1234567891, the actual value stored for the attribute is 0.123456789.
  • If you need to scale a unit of measure, see Scale Filter.

expression [required]

string

Arithmetic operation between one or more log attributes.

is_enabled

boolean

Whether or not the processor is enabled.

is_replace_missing

boolean

If true, it replaces all missing attributes of expression by 0, false skip the operation if an attribute is missing.

name

string

Name of the processor.

target [required]

string

Name of the attribute that contains the result of the arithmetic operation.

type [required]

enum

Type of logs arithmetic processor. Allowed enum values: arithmetic-processor

default: arithmetic-processor

Option 11

object

Use the string builder processor to add a new attribute (without spaces or special characters) to a log with the result of the provided template. This enables aggregation of different attributes or raw strings into a single attribute.

The template is defined by both raw text and blocks with the syntax %{attribute_path}.

Notes:

  • The processor only accepts attributes with values or an array of values in the blocks.
  • If an attribute cannot be used (object or array of object), it is replaced by an empty string or the entire operation is skipped depending on your selection.
  • If the target attribute already exists, it is overwritten by the result of the template.
  • Results of the template cannot exceed 256 characters.

is_enabled

boolean

Whether or not the processor is enabled.

is_replace_missing

boolean

If true, it replaces all missing attributes of template by an empty string. If false (default), skips the operation for missing attributes.

name

string

Name of the processor.

target [required]

string

The name of the attribute that contains the result of the template.

template [required]

string

A formula with one or more attributes and raw text.

type [required]

enum

Type of logs string builder processor. Allowed enum values: string-builder-processor

default: string-builder-processor

Option 12

object

Nested Pipelines are pipelines within a pipeline. Use Nested Pipelines to split the processing into two steps. For example, first use a high-level filtering such as team and then a second level of filtering based on the integration, service, or any other tag or attribute.

A pipeline can contain Nested Pipelines and Processors whereas a Nested Pipeline can only contain Processors.

filter

object

Filter for logs.

query

string

The filter query.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

processors

[object]

Ordered list of processors in this pipeline.

type [required]

enum

Type of logs pipeline processor. Allowed enum values: pipeline

default: pipeline

Option 13

object

The GeoIP parser takes an IP address attribute and extracts if available the Continent, Country, Subdivision, and City information in the target attribute path.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

default: network.client.ip

target [required]

string

Name of the parent attribute that contains all the extracted details from the sources.

default: network.client.geoip

type [required]

enum

Type of GeoIP parser. Allowed enum values: geo-ip-parser

default: geo-ip-parser

Option 14

object

Use the Lookup Processor to define a mapping between a log attribute and a human readable value saved in the processors mapping table. For example, you can use the Lookup Processor to map an internal service ID into a human readable service name. Alternatively, you could also use it to check if the MAC address that just attempted to connect to the production environment belongs to your list of stolen machines.

default_lookup

string

Value to set the target attribute if the source value is not found in the list.

is_enabled

boolean

Whether or not the processor is enabled.

lookup_table [required]

[string]

Mapping table of values for the source attribute and their associated target attribute values, formatted as ["source_key1,target_value1", "source_key2,target_value2"]

name

string

Name of the processor.

source [required]

string

Source attribute used to perform the lookup.

target [required]

string

Name of the attribute that contains the corresponding value in the mapping list or the default_lookup if not found in the mapping list.

type [required]

enum

Type of logs lookup processor. Allowed enum values: lookup-processor

default: lookup-processor

Option 15

object

Note: Reference Tables are in public beta. Use the Lookup Processor to define a mapping between a log attribute and a human readable value saved in a Reference Table. For example, you can use the Lookup Processor to map an internal service ID into a human readable service name. Alternatively, you could also use it to check if the MAC address that just attempted to connect to the production environment belongs to your list of stolen machines.

is_enabled

boolean

Whether or not the processor is enabled.

lookup_enrichment_table [required]

string

Name of the Reference Table for the source attribute and their associated target attribute values.

name

string

Name of the processor.

source [required]

string

Source attribute used to perform the lookup.

target [required]

string

Name of the attribute that contains the corresponding value in the mapping list.

type [required]

enum

Type of logs lookup processor. Allowed enum values: lookup-processor

default: lookup-processor

Option 16

object

There are two ways to improve correlation between application traces and logs.

  1. Follow the documentation on how to inject a trace ID in the application logs and by default log integrations take care of all the rest of the setup.

  2. Use the Trace remapper processor to define a log attribute as its associated trace ID.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources

[string]

Array of source attributes.

default: dd.trace_id

type [required]

enum

Type of logs trace remapper. Allowed enum values: trace-id-remapper

default: trace-id-remapper

type

string

Type of pipeline.

{
  "filter": {
    "query": "source:python"
  },
  "id": "string",
  "is_enabled": false,
  "is_read_only": false,
  "name": "",
  "processors": [
    {
      "grok": {
        "match_rules": "rule_name_1 foo\nrule_name_2 bar\n",
        "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"
      },
      "is_enabled": false,
      "name": "string",
      "samples": [],
      "source": "message",
      "type": "grok-parser"
    }
  ],
  "type": "pipeline"
}

Bad Request

Response returned by the Logs API when errors occur.

Expand All

Campo

Tipo

Descripción

error

object

Error returned by the Logs API

code

string

Code identifying the error

details

[object]

Additional error details

message

string

Error message

{
  "error": {
    "code": "string",
    "details": [],
    "message": "string"
  }
}

Forbidden

Error response object.

Expand All

Campo

Tipo

Descripción

errors [required]

[string]

Array of errors returned by the API.

{
  "errors": [
    "Bad Request"
  ]
}

Too many requests

Error response object.

Expand All

Campo

Tipo

Descripción

errors [required]

[string]

Array of errors returned by the API.

{
  "errors": [
    "Bad Request"
  ]
}

Ejemplo de código

                  # Path parameters
export pipeline_id="CHANGE_ME"
# Curl command
curl -X GET "https://api.ap1.datadoghq.com"https://api.datadoghq.eu"https://api.ddog-gov.com"https://api.datadoghq.com"https://api.us3.datadoghq.com"https://api.us5.datadoghq.com/api/v1/logs/config/pipelines/${pipeline_id}" \ -H "Accept: application/json" \ -H "DD-API-KEY: ${DD_API_KEY}" \ -H "DD-APPLICATION-KEY: ${DD_APP_KEY}"
"""
Get a pipeline returns "OK" response
"""

from datadog_api_client import ApiClient, Configuration
from datadog_api_client.v1.api.logs_pipelines_api import LogsPipelinesApi

configuration = Configuration()
with ApiClient(configuration) as api_client:
    api_instance = LogsPipelinesApi(api_client)
    response = api_instance.get_logs_pipeline(
        pipeline_id="pipeline_id",
    )

    print(response)

Instructions

First install the library and its dependencies and then save the example to example.py and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" python3 "example.py"
# Get a pipeline returns "OK" response

require "datadog_api_client"
api_instance = DatadogAPIClient::V1::LogsPipelinesAPI.new
p api_instance.get_logs_pipeline("pipeline_id")

Instructions

First install the library and its dependencies and then save the example to example.rb and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" rb "example.rb"
// Get a pipeline returns "OK" response

package main

import (
	"context"
	"encoding/json"
	"fmt"
	"os"

	"github.com/DataDog/datadog-api-client-go/v2/api/datadog"
	"github.com/DataDog/datadog-api-client-go/v2/api/datadogV1"
)

func main() {
	ctx := datadog.NewDefaultContext(context.Background())
	configuration := datadog.NewConfiguration()
	apiClient := datadog.NewAPIClient(configuration)
	api := datadogV1.NewLogsPipelinesApi(apiClient)
	resp, r, err := api.GetLogsPipeline(ctx, "pipeline_id")

	if err != nil {
		fmt.Fprintf(os.Stderr, "Error when calling `LogsPipelinesApi.GetLogsPipeline`: %v\n", err)
		fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r)
	}

	responseContent, _ := json.MarshalIndent(resp, "", "  ")
	fmt.Fprintf(os.Stdout, "Response from `LogsPipelinesApi.GetLogsPipeline`:\n%s\n", responseContent)
}

Instructions

First install the library and its dependencies and then save the example to main.go and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" go run "main.go"
// Get a pipeline returns "OK" response

import com.datadog.api.client.ApiClient;
import com.datadog.api.client.ApiException;
import com.datadog.api.client.v1.api.LogsPipelinesApi;
import com.datadog.api.client.v1.model.LogsPipeline;

public class Example {
  public static void main(String[] args) {
    ApiClient defaultClient = ApiClient.getDefaultApiClient();
    LogsPipelinesApi apiInstance = new LogsPipelinesApi(defaultClient);

    try {
      LogsPipeline result = apiInstance.getLogsPipeline("pipeline_id");
      System.out.println(result);
    } catch (ApiException e) {
      System.err.println("Exception when calling LogsPipelinesApi#getLogsPipeline");
      System.err.println("Status code: " + e.getCode());
      System.err.println("Reason: " + e.getResponseBody());
      System.err.println("Response headers: " + e.getResponseHeaders());
      e.printStackTrace();
    }
  }
}

Instructions

First install the library and its dependencies and then save the example to Example.java and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" java "Example.java"
// Get a pipeline returns "OK" response
use datadog_api_client::datadog;
use datadog_api_client::datadogV1::api_logs_pipelines::LogsPipelinesAPI;

#[tokio::main]
async fn main() {
    let configuration = datadog::Configuration::new();
    let api = LogsPipelinesAPI::with_config(configuration);
    let resp = api.get_logs_pipeline("pipeline_id".to_string()).await;
    if let Ok(value) = resp {
        println!("{:#?}", value);
    } else {
        println!("{:#?}", resp.unwrap_err());
    }
}

Instructions

First install the library and its dependencies and then save the example to src/main.rs and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" cargo run
/**
 * Get a pipeline returns "OK" response
 */

import { client, v1 } from "@datadog/datadog-api-client";

const configuration = client.createConfiguration();
const apiInstance = new v1.LogsPipelinesApi(configuration);

const params: v1.LogsPipelinesApiGetLogsPipelineRequest = {
  pipelineId: "pipeline_id",
};

apiInstance
  .getLogsPipeline(params)
  .then((data: v1.LogsPipeline) => {
    console.log(
      "API called successfully. Returned data: " + JSON.stringify(data)
    );
  })
  .catch((error: any) => console.error(error));

Instructions

First install the library and its dependencies and then save the example to example.ts and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" tsc "example.ts"

DELETE https://api.ap1.datadoghq.com/api/v1/logs/config/pipelines/{pipeline_id}https://api.datadoghq.eu/api/v1/logs/config/pipelines/{pipeline_id}https://api.ddog-gov.com/api/v1/logs/config/pipelines/{pipeline_id}https://api.datadoghq.com/api/v1/logs/config/pipelines/{pipeline_id}https://api.us3.datadoghq.com/api/v1/logs/config/pipelines/{pipeline_id}https://api.us5.datadoghq.com/api/v1/logs/config/pipelines/{pipeline_id}

Información general

Elimina un pipeline determinado de tu organización. Este endpoint no recibe argumentos JSON. This endpoint requires the logs_write_pipelines permission.

Argumentos

Parámetros de ruta

Nombre

Tipo

Descripción

pipeline_id [required]

string

ID of the pipeline to delete.

Respuesta

OK

Bad Request

Response returned by the Logs API when errors occur.

Expand All

Campo

Tipo

Descripción

error

object

Error returned by the Logs API

code

string

Code identifying the error

details

[object]

Additional error details

message

string

Error message

{
  "error": {
    "code": "string",
    "details": [],
    "message": "string"
  }
}

Forbidden

Error response object.

Expand All

Campo

Tipo

Descripción

errors [required]

[string]

Array of errors returned by the API.

{
  "errors": [
    "Bad Request"
  ]
}

Too many requests

Error response object.

Expand All

Campo

Tipo

Descripción

errors [required]

[string]

Array of errors returned by the API.

{
  "errors": [
    "Bad Request"
  ]
}

Ejemplo de código

                  # Path parameters
export pipeline_id="CHANGE_ME"
# Curl command
curl -X DELETE "https://api.ap1.datadoghq.com"https://api.datadoghq.eu"https://api.ddog-gov.com"https://api.datadoghq.com"https://api.us3.datadoghq.com"https://api.us5.datadoghq.com/api/v1/logs/config/pipelines/${pipeline_id}" \ -H "Accept: application/json" \ -H "DD-API-KEY: ${DD_API_KEY}" \ -H "DD-APPLICATION-KEY: ${DD_APP_KEY}"
"""
Delete a pipeline returns "OK" response
"""

from datadog_api_client import ApiClient, Configuration
from datadog_api_client.v1.api.logs_pipelines_api import LogsPipelinesApi

configuration = Configuration()
with ApiClient(configuration) as api_client:
    api_instance = LogsPipelinesApi(api_client)
    api_instance.delete_logs_pipeline(
        pipeline_id="pipeline_id",
    )

Instructions

First install the library and its dependencies and then save the example to example.py and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" python3 "example.py"
# Delete a pipeline returns "OK" response

require "datadog_api_client"
api_instance = DatadogAPIClient::V1::LogsPipelinesAPI.new
p api_instance.delete_logs_pipeline("pipeline_id")

Instructions

First install the library and its dependencies and then save the example to example.rb and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" rb "example.rb"
// Delete a pipeline returns "OK" response

package main

import (
	"context"
	"fmt"
	"os"

	"github.com/DataDog/datadog-api-client-go/v2/api/datadog"
	"github.com/DataDog/datadog-api-client-go/v2/api/datadogV1"
)

func main() {
	ctx := datadog.NewDefaultContext(context.Background())
	configuration := datadog.NewConfiguration()
	apiClient := datadog.NewAPIClient(configuration)
	api := datadogV1.NewLogsPipelinesApi(apiClient)
	r, err := api.DeleteLogsPipeline(ctx, "pipeline_id")

	if err != nil {
		fmt.Fprintf(os.Stderr, "Error when calling `LogsPipelinesApi.DeleteLogsPipeline`: %v\n", err)
		fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r)
	}
}

Instructions

First install the library and its dependencies and then save the example to main.go and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" go run "main.go"
// Delete a pipeline returns "OK" response

import com.datadog.api.client.ApiClient;
import com.datadog.api.client.ApiException;
import com.datadog.api.client.v1.api.LogsPipelinesApi;

public class Example {
  public static void main(String[] args) {
    ApiClient defaultClient = ApiClient.getDefaultApiClient();
    LogsPipelinesApi apiInstance = new LogsPipelinesApi(defaultClient);

    try {
      apiInstance.deleteLogsPipeline("pipeline_id");
    } catch (ApiException e) {
      System.err.println("Exception when calling LogsPipelinesApi#deleteLogsPipeline");
      System.err.println("Status code: " + e.getCode());
      System.err.println("Reason: " + e.getResponseBody());
      System.err.println("Response headers: " + e.getResponseHeaders());
      e.printStackTrace();
    }
  }
}

Instructions

First install the library and its dependencies and then save the example to Example.java and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" java "Example.java"
// Delete a pipeline returns "OK" response
use datadog_api_client::datadog;
use datadog_api_client::datadogV1::api_logs_pipelines::LogsPipelinesAPI;

#[tokio::main]
async fn main() {
    let configuration = datadog::Configuration::new();
    let api = LogsPipelinesAPI::with_config(configuration);
    let resp = api.delete_logs_pipeline("pipeline_id".to_string()).await;
    if let Ok(value) = resp {
        println!("{:#?}", value);
    } else {
        println!("{:#?}", resp.unwrap_err());
    }
}

Instructions

First install the library and its dependencies and then save the example to src/main.rs and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" cargo run
/**
 * Delete a pipeline returns "OK" response
 */

import { client, v1 } from "@datadog/datadog-api-client";

const configuration = client.createConfiguration();
const apiInstance = new v1.LogsPipelinesApi(configuration);

const params: v1.LogsPipelinesApiDeleteLogsPipelineRequest = {
  pipelineId: "pipeline_id",
};

apiInstance
  .deleteLogsPipeline(params)
  .then((data: any) => {
    console.log(
      "API called successfully. Returned data: " + JSON.stringify(data)
    );
  })
  .catch((error: any) => console.error(error));

Instructions

First install the library and its dependencies and then save the example to example.ts and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" tsc "example.ts"

PUT https://api.ap1.datadoghq.com/api/v1/logs/config/pipelines/{pipeline_id}https://api.datadoghq.eu/api/v1/logs/config/pipelines/{pipeline_id}https://api.ddog-gov.com/api/v1/logs/config/pipelines/{pipeline_id}https://api.datadoghq.com/api/v1/logs/config/pipelines/{pipeline_id}https://api.us3.datadoghq.com/api/v1/logs/config/pipelines/{pipeline_id}https://api.us5.datadoghq.com/api/v1/logs/config/pipelines/{pipeline_id}

Información general

Actualiza la configuración de un pipeline determinado para cambiar sus procesadores o su orden.

Nota: Utilizando este método, se modifica la configuración de tu pipeline reemplazando la configuración actual por la nueva, enviada a tu organización Datadog.

This endpoint requires the logs_write_pipelines permission.

Argumentos

Parámetros de ruta

Nombre

Tipo

Descripción

pipeline_id [required]

string

ID of the pipeline to delete.

Solicitud

Body Data (required)

Nueva definición del pipeline.

Expand All

Campo

Tipo

Descripción

filter

object

Filter for logs.

query

string

The filter query.

id

string

ID of the pipeline.

is_enabled

boolean

Whether or not the pipeline is enabled.

is_read_only

boolean

Whether or not the pipeline can be edited.

name [required]

string

Name of the pipeline.

processors

[ <oneOf>]

Ordered list of processors in this pipeline.

Option 1

object

Create custom grok rules to parse the full message or a specific attribute of your raw event. For more information, see the parsing section.

grok [required]

object

Set of rules for the grok parser.

match_rules [required]

string

List of match rules for the grok parser, separated by a new line.

support_rules

string

List of support rules for the grok parser, separated by a new line.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

samples

[string]

List of sample logs to test this grok parser.

source [required]

string

Name of the log attribute to parse.

default: message

type [required]

enum

Type of logs grok parser. Allowed enum values: grok-parser

default: grok-parser

Option 2

object

As Datadog receives logs, it timestamps them using the value(s) from any of these default attributes.

  • timestamp

  • date

  • _timestamp

  • Timestamp

  • eventTime

  • published_date

    If your logs put their dates in an attribute not in this list, use the log date Remapper Processor to define their date attribute as the official log timestamp. The recognized date formats are ISO8601, UNIX (the milliseconds EPOCH format), and RFC3164.

Note: If your logs don’t contain any of the default attributes and you haven’t defined your own date attribute, Datadog timestamps the logs with the date it received them.

If multiple log date remapper processors can be applied to a given log, only the first one (according to the pipelines order) is taken into account.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

type [required]

enum

Type of logs date remapper. Allowed enum values: date-remapper

default: date-remapper

Option 3

object

Use this Processor if you want to assign some attributes as the official status.

Each incoming status value is mapped as follows.

  • Integers from 0 to 7 map to the Syslog severity standards
  • Strings beginning with emerg or f (case-insensitive) map to emerg (0)
  • Strings beginning with a (case-insensitive) map to alert (1)
  • Strings beginning with c (case-insensitive) map to critical (2)
  • Strings beginning with err (case-insensitive) map to error (3)
  • Strings beginning with w (case-insensitive) map to warning (4)
  • Strings beginning with n (case-insensitive) map to notice (5)
  • Strings beginning with i (case-insensitive) map to info (6)
  • Strings beginning with d, trace or verbose (case-insensitive) map to debug (7)
  • Strings beginning with o or matching OK or Success (case-insensitive) map to OK
  • All others map to info (6)

Note: If multiple log status remapper processors can be applied to a given log, only the first one (according to the pipelines order) is taken into account.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

type [required]

enum

Type of logs status remapper. Allowed enum values: status-remapper

default: status-remapper

Option 4

object

Use this processor if you want to assign one or more attributes as the official service.

Note: If multiple service remapper processors can be applied to a given log, only the first one (according to the pipeline order) is taken into account.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

type [required]

enum

Type of logs service remapper. Allowed enum values: service-remapper

default: service-remapper

Option 5

object

The message is a key attribute in Datadog. It is displayed in the message column of the Log Explorer and you can do full string search on it. Use this Processor to define one or more attributes as the official log message.

Note: If multiple log message remapper processors can be applied to a given log, only the first one (according to the pipeline order) is taken into account.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

default: msg

type [required]

enum

Type of logs message remapper. Allowed enum values: message-remapper

default: message-remapper

Option 6

object

The remapper processor remaps any source attribute(s) or tag to another target attribute or tag. Constraints on the tag/attribute name are explained in the Tag Best Practice documentation. Some additional constraints are applied as : or , are not allowed in the target tag/attribute name.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

override_on_conflict

boolean

Override or not the target element if already set,

preserve_source

boolean

Remove or preserve the remapped source element.

source_type

string

Defines if the sources are from log attribute or tag.

default: attribute

sources [required]

[string]

Array of source attributes.

target [required]

string

Final attribute or tag name to remap the sources to.

target_format

enum

If the target_type of the remapper is attribute, try to cast the value to a new specific type. If the cast is not possible, the original type is kept. string, integer, or double are the possible types. If the target_type is tag, this parameter may not be specified. Allowed enum values: auto,string,integer,double

target_type

string

Defines if the final attribute or tag name is from log attribute or tag.

default: attribute

type [required]

enum

Type of logs attribute remapper. Allowed enum values: attribute-remapper

default: attribute-remapper

Option 7

object

This processor extracts query parameters and other important parameters from a URL.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

normalize_ending_slashes

boolean

Normalize the ending slashes or not.

sources [required]

[string]

Array of source attributes.

default: http.url

target [required]

string

Name of the parent attribute that contains all the extracted details from the sources.

default: http.url_details

type [required]

enum

Type of logs URL parser. Allowed enum values: url-parser

default: url-parser

Option 8

object

The User-Agent parser takes a User-Agent attribute and extracts the OS, browser, device, and other user data. It recognizes major bots like the Google Bot, Yahoo Slurp, and Bing.

is_enabled

boolean

Whether or not the processor is enabled.

is_encoded

boolean

Define if the source attribute is URL encoded or not.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

default: http.useragent

target [required]

string

Name of the parent attribute that contains all the extracted details from the sources.

default: http.useragent_details

type [required]

enum

Type of logs User-Agent parser. Allowed enum values: user-agent-parser

default: user-agent-parser

Option 9

object

Use the Category Processor to add a new attribute (without spaces or special characters in the new attribute name) to a log matching a provided search query. Use categories to create groups for an analytical view. For example, URL groups, machine groups, environments, and response time buckets.

Notes:

  • The syntax of the query is the one of Logs Explorer search bar. The query can be done on any log attribute or tag, whether it is a facet or not. Wildcards can also be used inside your query.
  • Once the log has matched one of the Processor queries, it stops. Make sure they are properly ordered in case a log could match several queries.
  • The names of the categories must be unique.
  • Once defined in the Category Processor, you can map categories to log status using the Log Status Remapper.

categories [required]

[object]

Array of filters to match or not a log and their corresponding name to assign a custom value to the log.

filter

object

Filter for logs.

query

string

The filter query.

name

string

Value to assign to the target attribute.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

target [required]

string

Name of the target attribute which value is defined by the matching category.

type [required]

enum

Type of logs category processor. Allowed enum values: category-processor

default: category-processor

Option 10

object

Use the Arithmetic Processor to add a new attribute (without spaces or special characters in the new attribute name) to a log with the result of the provided formula. This enables you to remap different time attributes with different units into a single attribute, or to compute operations on attributes within the same log.

The formula can use parentheses and the basic arithmetic operators -, +, *, /.

By default, the calculation is skipped if an attribute is missing. Select “Replace missing attribute by 0” to automatically populate missing attribute values with 0 to ensure that the calculation is done. An attribute is missing if it is not found in the log attributes, or if it cannot be converted to a number.

Notes:

  • The operator - needs to be space split in the formula as it can also be contained in attribute names.
  • If the target attribute already exists, it is overwritten by the result of the formula.
  • Results are rounded up to the 9th decimal. For example, if the result of the formula is 0.1234567891, the actual value stored for the attribute is 0.123456789.
  • If you need to scale a unit of measure, see Scale Filter.

expression [required]

string

Arithmetic operation between one or more log attributes.

is_enabled

boolean

Whether or not the processor is enabled.

is_replace_missing

boolean

If true, it replaces all missing attributes of expression by 0, false skip the operation if an attribute is missing.

name

string

Name of the processor.

target [required]

string

Name of the attribute that contains the result of the arithmetic operation.

type [required]

enum

Type of logs arithmetic processor. Allowed enum values: arithmetic-processor

default: arithmetic-processor

Option 11

object

Use the string builder processor to add a new attribute (without spaces or special characters) to a log with the result of the provided template. This enables aggregation of different attributes or raw strings into a single attribute.

The template is defined by both raw text and blocks with the syntax %{attribute_path}.

Notes:

  • The processor only accepts attributes with values or an array of values in the blocks.
  • If an attribute cannot be used (object or array of object), it is replaced by an empty string or the entire operation is skipped depending on your selection.
  • If the target attribute already exists, it is overwritten by the result of the template.
  • Results of the template cannot exceed 256 characters.

is_enabled

boolean

Whether or not the processor is enabled.

is_replace_missing

boolean

If true, it replaces all missing attributes of template by an empty string. If false (default), skips the operation for missing attributes.

name

string

Name of the processor.

target [required]

string

The name of the attribute that contains the result of the template.

template [required]

string

A formula with one or more attributes and raw text.

type [required]

enum

Type of logs string builder processor. Allowed enum values: string-builder-processor

default: string-builder-processor

Option 12

object

Nested Pipelines are pipelines within a pipeline. Use Nested Pipelines to split the processing into two steps. For example, first use a high-level filtering such as team and then a second level of filtering based on the integration, service, or any other tag or attribute.

A pipeline can contain Nested Pipelines and Processors whereas a Nested Pipeline can only contain Processors.

filter

object

Filter for logs.

query

string

The filter query.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

processors

[object]

Ordered list of processors in this pipeline.

type [required]

enum

Type of logs pipeline processor. Allowed enum values: pipeline

default: pipeline

Option 13

object

The GeoIP parser takes an IP address attribute and extracts if available the Continent, Country, Subdivision, and City information in the target attribute path.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

default: network.client.ip

target [required]

string

Name of the parent attribute that contains all the extracted details from the sources.

default: network.client.geoip

type [required]

enum

Type of GeoIP parser. Allowed enum values: geo-ip-parser

default: geo-ip-parser

Option 14

object

Use the Lookup Processor to define a mapping between a log attribute and a human readable value saved in the processors mapping table. For example, you can use the Lookup Processor to map an internal service ID into a human readable service name. Alternatively, you could also use it to check if the MAC address that just attempted to connect to the production environment belongs to your list of stolen machines.

default_lookup

string

Value to set the target attribute if the source value is not found in the list.

is_enabled

boolean

Whether or not the processor is enabled.

lookup_table [required]

[string]

Mapping table of values for the source attribute and their associated target attribute values, formatted as ["source_key1,target_value1", "source_key2,target_value2"]

name

string

Name of the processor.

source [required]

string

Source attribute used to perform the lookup.

target [required]

string

Name of the attribute that contains the corresponding value in the mapping list or the default_lookup if not found in the mapping list.

type [required]

enum

Type of logs lookup processor. Allowed enum values: lookup-processor

default: lookup-processor

Option 15

object

Note: Reference Tables are in public beta. Use the Lookup Processor to define a mapping between a log attribute and a human readable value saved in a Reference Table. For example, you can use the Lookup Processor to map an internal service ID into a human readable service name. Alternatively, you could also use it to check if the MAC address that just attempted to connect to the production environment belongs to your list of stolen machines.

is_enabled

boolean

Whether or not the processor is enabled.

lookup_enrichment_table [required]

string

Name of the Reference Table for the source attribute and their associated target attribute values.

name

string

Name of the processor.

source [required]

string

Source attribute used to perform the lookup.

target [required]

string

Name of the attribute that contains the corresponding value in the mapping list.

type [required]

enum

Type of logs lookup processor. Allowed enum values: lookup-processor

default: lookup-processor

Option 16

object

There are two ways to improve correlation between application traces and logs.

  1. Follow the documentation on how to inject a trace ID in the application logs and by default log integrations take care of all the rest of the setup.

  2. Use the Trace remapper processor to define a log attribute as its associated trace ID.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources

[string]

Array of source attributes.

default: dd.trace_id

type [required]

enum

Type of logs trace remapper. Allowed enum values: trace-id-remapper

default: trace-id-remapper

type

string

Type of pipeline.

{
  "filter": {
    "query": "source:python"
  },
  "is_enabled": false,
  "name": "",
  "processors": [
    {
      "grok": {
        "match_rules": "rule_name_1 foo\nrule_name_2 bar\n",
        "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"
      },
      "is_enabled": false,
      "name": "string",
      "samples": [],
      "source": "message",
      "type": "grok-parser"
    }
  ]
}

Respuesta

OK

Pipelines and processors operate on incoming logs, parsing and transforming them into structured attributes for easier querying.

Note: These endpoints are only available for admin users. Make sure to use an application key created by an admin.

Expand All

Campo

Tipo

Descripción

filter

object

Filter for logs.

query

string

The filter query.

id

string

ID of the pipeline.

is_enabled

boolean

Whether or not the pipeline is enabled.

is_read_only

boolean

Whether or not the pipeline can be edited.

name [required]

string

Name of the pipeline.

processors

[ <oneOf>]

Ordered list of processors in this pipeline.

Option 1

object

Create custom grok rules to parse the full message or a specific attribute of your raw event. For more information, see the parsing section.

grok [required]

object

Set of rules for the grok parser.

match_rules [required]

string

List of match rules for the grok parser, separated by a new line.

support_rules

string

List of support rules for the grok parser, separated by a new line.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

samples

[string]

List of sample logs to test this grok parser.

source [required]

string

Name of the log attribute to parse.

default: message

type [required]

enum

Type of logs grok parser. Allowed enum values: grok-parser

default: grok-parser

Option 2

object

As Datadog receives logs, it timestamps them using the value(s) from any of these default attributes.

  • timestamp

  • date

  • _timestamp

  • Timestamp

  • eventTime

  • published_date

    If your logs put their dates in an attribute not in this list, use the log date Remapper Processor to define their date attribute as the official log timestamp. The recognized date formats are ISO8601, UNIX (the milliseconds EPOCH format), and RFC3164.

Note: If your logs don’t contain any of the default attributes and you haven’t defined your own date attribute, Datadog timestamps the logs with the date it received them.

If multiple log date remapper processors can be applied to a given log, only the first one (according to the pipelines order) is taken into account.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

type [required]

enum

Type of logs date remapper. Allowed enum values: date-remapper

default: date-remapper

Option 3

object

Use this Processor if you want to assign some attributes as the official status.

Each incoming status value is mapped as follows.

  • Integers from 0 to 7 map to the Syslog severity standards
  • Strings beginning with emerg or f (case-insensitive) map to emerg (0)
  • Strings beginning with a (case-insensitive) map to alert (1)
  • Strings beginning with c (case-insensitive) map to critical (2)
  • Strings beginning with err (case-insensitive) map to error (3)
  • Strings beginning with w (case-insensitive) map to warning (4)
  • Strings beginning with n (case-insensitive) map to notice (5)
  • Strings beginning with i (case-insensitive) map to info (6)
  • Strings beginning with d, trace or verbose (case-insensitive) map to debug (7)
  • Strings beginning with o or matching OK or Success (case-insensitive) map to OK
  • All others map to info (6)

Note: If multiple log status remapper processors can be applied to a given log, only the first one (according to the pipelines order) is taken into account.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

type [required]

enum

Type of logs status remapper. Allowed enum values: status-remapper

default: status-remapper

Option 4

object

Use this processor if you want to assign one or more attributes as the official service.

Note: If multiple service remapper processors can be applied to a given log, only the first one (according to the pipeline order) is taken into account.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

type [required]

enum

Type of logs service remapper. Allowed enum values: service-remapper

default: service-remapper

Option 5

object

The message is a key attribute in Datadog. It is displayed in the message column of the Log Explorer and you can do full string search on it. Use this Processor to define one or more attributes as the official log message.

Note: If multiple log message remapper processors can be applied to a given log, only the first one (according to the pipeline order) is taken into account.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

default: msg

type [required]

enum

Type of logs message remapper. Allowed enum values: message-remapper

default: message-remapper

Option 6

object

The remapper processor remaps any source attribute(s) or tag to another target attribute or tag. Constraints on the tag/attribute name are explained in the Tag Best Practice documentation. Some additional constraints are applied as : or , are not allowed in the target tag/attribute name.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

override_on_conflict

boolean

Override or not the target element if already set,

preserve_source

boolean

Remove or preserve the remapped source element.

source_type

string

Defines if the sources are from log attribute or tag.

default: attribute

sources [required]

[string]

Array of source attributes.

target [required]

string

Final attribute or tag name to remap the sources to.

target_format

enum

If the target_type of the remapper is attribute, try to cast the value to a new specific type. If the cast is not possible, the original type is kept. string, integer, or double are the possible types. If the target_type is tag, this parameter may not be specified. Allowed enum values: auto,string,integer,double

target_type

string

Defines if the final attribute or tag name is from log attribute or tag.

default: attribute

type [required]

enum

Type of logs attribute remapper. Allowed enum values: attribute-remapper

default: attribute-remapper

Option 7

object

This processor extracts query parameters and other important parameters from a URL.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

normalize_ending_slashes

boolean

Normalize the ending slashes or not.

sources [required]

[string]

Array of source attributes.

default: http.url

target [required]

string

Name of the parent attribute that contains all the extracted details from the sources.

default: http.url_details

type [required]

enum

Type of logs URL parser. Allowed enum values: url-parser

default: url-parser

Option 8

object

The User-Agent parser takes a User-Agent attribute and extracts the OS, browser, device, and other user data. It recognizes major bots like the Google Bot, Yahoo Slurp, and Bing.

is_enabled

boolean

Whether or not the processor is enabled.

is_encoded

boolean

Define if the source attribute is URL encoded or not.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

default: http.useragent

target [required]

string

Name of the parent attribute that contains all the extracted details from the sources.

default: http.useragent_details

type [required]

enum

Type of logs User-Agent parser. Allowed enum values: user-agent-parser

default: user-agent-parser

Option 9

object

Use the Category Processor to add a new attribute (without spaces or special characters in the new attribute name) to a log matching a provided search query. Use categories to create groups for an analytical view. For example, URL groups, machine groups, environments, and response time buckets.

Notes:

  • The syntax of the query is the one of Logs Explorer search bar. The query can be done on any log attribute or tag, whether it is a facet or not. Wildcards can also be used inside your query.
  • Once the log has matched one of the Processor queries, it stops. Make sure they are properly ordered in case a log could match several queries.
  • The names of the categories must be unique.
  • Once defined in the Category Processor, you can map categories to log status using the Log Status Remapper.

categories [required]

[object]

Array of filters to match or not a log and their corresponding name to assign a custom value to the log.

filter

object

Filter for logs.

query

string

The filter query.

name

string

Value to assign to the target attribute.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

target [required]

string

Name of the target attribute which value is defined by the matching category.

type [required]

enum

Type of logs category processor. Allowed enum values: category-processor

default: category-processor

Option 10

object

Use the Arithmetic Processor to add a new attribute (without spaces or special characters in the new attribute name) to a log with the result of the provided formula. This enables you to remap different time attributes with different units into a single attribute, or to compute operations on attributes within the same log.

The formula can use parentheses and the basic arithmetic operators -, +, *, /.

By default, the calculation is skipped if an attribute is missing. Select “Replace missing attribute by 0” to automatically populate missing attribute values with 0 to ensure that the calculation is done. An attribute is missing if it is not found in the log attributes, or if it cannot be converted to a number.

Notes:

  • The operator - needs to be space split in the formula as it can also be contained in attribute names.
  • If the target attribute already exists, it is overwritten by the result of the formula.
  • Results are rounded up to the 9th decimal. For example, if the result of the formula is 0.1234567891, the actual value stored for the attribute is 0.123456789.
  • If you need to scale a unit of measure, see Scale Filter.

expression [required]

string

Arithmetic operation between one or more log attributes.

is_enabled

boolean

Whether or not the processor is enabled.

is_replace_missing

boolean

If true, it replaces all missing attributes of expression by 0, false skip the operation if an attribute is missing.

name

string

Name of the processor.

target [required]

string

Name of the attribute that contains the result of the arithmetic operation.

type [required]

enum

Type of logs arithmetic processor. Allowed enum values: arithmetic-processor

default: arithmetic-processor

Option 11

object

Use the string builder processor to add a new attribute (without spaces or special characters) to a log with the result of the provided template. This enables aggregation of different attributes or raw strings into a single attribute.

The template is defined by both raw text and blocks with the syntax %{attribute_path}.

Notes:

  • The processor only accepts attributes with values or an array of values in the blocks.
  • If an attribute cannot be used (object or array of object), it is replaced by an empty string or the entire operation is skipped depending on your selection.
  • If the target attribute already exists, it is overwritten by the result of the template.
  • Results of the template cannot exceed 256 characters.

is_enabled

boolean

Whether or not the processor is enabled.

is_replace_missing

boolean

If true, it replaces all missing attributes of template by an empty string. If false (default), skips the operation for missing attributes.

name

string

Name of the processor.

target [required]

string

The name of the attribute that contains the result of the template.

template [required]

string

A formula with one or more attributes and raw text.

type [required]

enum

Type of logs string builder processor. Allowed enum values: string-builder-processor

default: string-builder-processor

Option 12

object

Nested Pipelines are pipelines within a pipeline. Use Nested Pipelines to split the processing into two steps. For example, first use a high-level filtering such as team and then a second level of filtering based on the integration, service, or any other tag or attribute.

A pipeline can contain Nested Pipelines and Processors whereas a Nested Pipeline can only contain Processors.

filter

object

Filter for logs.

query

string

The filter query.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

processors

[object]

Ordered list of processors in this pipeline.

type [required]

enum

Type of logs pipeline processor. Allowed enum values: pipeline

default: pipeline

Option 13

object

The GeoIP parser takes an IP address attribute and extracts if available the Continent, Country, Subdivision, and City information in the target attribute path.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources [required]

[string]

Array of source attributes.

default: network.client.ip

target [required]

string

Name of the parent attribute that contains all the extracted details from the sources.

default: network.client.geoip

type [required]

enum

Type of GeoIP parser. Allowed enum values: geo-ip-parser

default: geo-ip-parser

Option 14

object

Use the Lookup Processor to define a mapping between a log attribute and a human readable value saved in the processors mapping table. For example, you can use the Lookup Processor to map an internal service ID into a human readable service name. Alternatively, you could also use it to check if the MAC address that just attempted to connect to the production environment belongs to your list of stolen machines.

default_lookup

string

Value to set the target attribute if the source value is not found in the list.

is_enabled

boolean

Whether or not the processor is enabled.

lookup_table [required]

[string]

Mapping table of values for the source attribute and their associated target attribute values, formatted as ["source_key1,target_value1", "source_key2,target_value2"]

name

string

Name of the processor.

source [required]

string

Source attribute used to perform the lookup.

target [required]

string

Name of the attribute that contains the corresponding value in the mapping list or the default_lookup if not found in the mapping list.

type [required]

enum

Type of logs lookup processor. Allowed enum values: lookup-processor

default: lookup-processor

Option 15

object

Note: Reference Tables are in public beta. Use the Lookup Processor to define a mapping between a log attribute and a human readable value saved in a Reference Table. For example, you can use the Lookup Processor to map an internal service ID into a human readable service name. Alternatively, you could also use it to check if the MAC address that just attempted to connect to the production environment belongs to your list of stolen machines.

is_enabled

boolean

Whether or not the processor is enabled.

lookup_enrichment_table [required]

string

Name of the Reference Table for the source attribute and their associated target attribute values.

name

string

Name of the processor.

source [required]

string

Source attribute used to perform the lookup.

target [required]

string

Name of the attribute that contains the corresponding value in the mapping list.

type [required]

enum

Type of logs lookup processor. Allowed enum values: lookup-processor

default: lookup-processor

Option 16

object

There are two ways to improve correlation between application traces and logs.

  1. Follow the documentation on how to inject a trace ID in the application logs and by default log integrations take care of all the rest of the setup.

  2. Use the Trace remapper processor to define a log attribute as its associated trace ID.

is_enabled

boolean

Whether or not the processor is enabled.

name

string

Name of the processor.

sources

[string]

Array of source attributes.

default: dd.trace_id

type [required]

enum

Type of logs trace remapper. Allowed enum values: trace-id-remapper

default: trace-id-remapper

type

string

Type of pipeline.

{
  "filter": {
    "query": "source:python"
  },
  "id": "string",
  "is_enabled": false,
  "is_read_only": false,
  "name": "",
  "processors": [
    {
      "grok": {
        "match_rules": "rule_name_1 foo\nrule_name_2 bar\n",
        "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"
      },
      "is_enabled": false,
      "name": "string",
      "samples": [],
      "source": "message",
      "type": "grok-parser"
    }
  ],
  "type": "pipeline"
}

Bad Request

Response returned by the Logs API when errors occur.

Expand All

Campo

Tipo

Descripción

error

object

Error returned by the Logs API

code

string

Code identifying the error

details

[object]

Additional error details

message

string

Error message

{
  "error": {
    "code": "string",
    "details": [],
    "message": "string"
  }
}

Forbidden

Error response object.

Expand All

Campo

Tipo

Descripción

errors [required]

[string]

Array of errors returned by the API.

{
  "errors": [
    "Bad Request"
  ]
}

Too many requests

Error response object.

Expand All

Campo

Tipo

Descripción

errors [required]

[string]

Array of errors returned by the API.

{
  "errors": [
    "Bad Request"
  ]
}

Ejemplo de código

                  # Path parameters
export pipeline_id="CHANGE_ME"
# Curl command
curl -X PUT "https://api.ap1.datadoghq.com"https://api.datadoghq.eu"https://api.ddog-gov.com"https://api.datadoghq.com"https://api.us3.datadoghq.com"https://api.us5.datadoghq.com/api/v1/logs/config/pipelines/${pipeline_id}" \ -H "Accept: application/json" \ -H "Content-Type: application/json" \ -H "DD-API-KEY: ${DD_API_KEY}" \ -H "DD-APPLICATION-KEY: ${DD_APP_KEY}" \ -d @- << EOF { "name": "", "processors": [ { "grok": { "match_rules": "rule_name_1 foo\nrule_name_2 bar\n" } } ] } EOF
"""
Update a pipeline returns "OK" response
"""

from datadog_api_client import ApiClient, Configuration
from datadog_api_client.v1.api.logs_pipelines_api import LogsPipelinesApi
from datadog_api_client.v1.model.logs_filter import LogsFilter
from datadog_api_client.v1.model.logs_grok_parser import LogsGrokParser
from datadog_api_client.v1.model.logs_grok_parser_rules import LogsGrokParserRules
from datadog_api_client.v1.model.logs_grok_parser_type import LogsGrokParserType
from datadog_api_client.v1.model.logs_pipeline import LogsPipeline

body = LogsPipeline(
    filter=LogsFilter(
        query="source:python",
    ),
    name="",
    processors=[
        LogsGrokParser(
            grok=LogsGrokParserRules(
                match_rules="rule_name_1 foo\nrule_name_2 bar\n",
                support_rules="rule_name_1 foo\nrule_name_2 bar\n",
            ),
            is_enabled=False,
            samples=[],
            source="message",
            type=LogsGrokParserType.GROK_PARSER,
        ),
    ],
)

configuration = Configuration()
with ApiClient(configuration) as api_client:
    api_instance = LogsPipelinesApi(api_client)
    response = api_instance.update_logs_pipeline(pipeline_id="pipeline_id", body=body)

    print(response)

Instructions

First install the library and its dependencies and then save the example to example.py and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" python3 "example.py"
# Update a pipeline returns "OK" response

require "datadog_api_client"
api_instance = DatadogAPIClient::V1::LogsPipelinesAPI.new

body = DatadogAPIClient::V1::LogsPipeline.new({
  filter: DatadogAPIClient::V1::LogsFilter.new({
    query: "source:python",
  }),
  name: "",
  processors: [
    DatadogAPIClient::V1::LogsGrokParser.new({
      grok: DatadogAPIClient::V1::LogsGrokParserRules.new({
        match_rules: 'rule_name_1 foo\nrule_name_2 bar\n',
        support_rules: 'rule_name_1 foo\nrule_name_2 bar\n',
      }),
      is_enabled: false,
      samples: [],
      source: "message",
      type: DatadogAPIClient::V1::LogsGrokParserType::GROK_PARSER,
    }),
  ],
})
p api_instance.update_logs_pipeline("pipeline_id", body)

Instructions

First install the library and its dependencies and then save the example to example.rb and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" rb "example.rb"
// Update a pipeline returns "OK" response

package main

import (
	"context"
	"encoding/json"
	"fmt"
	"os"

	"github.com/DataDog/datadog-api-client-go/v2/api/datadog"
	"github.com/DataDog/datadog-api-client-go/v2/api/datadogV1"
)

func main() {
	body := datadogV1.LogsPipeline{
		Filter: &datadogV1.LogsFilter{
			Query: datadog.PtrString("source:python"),
		},
		Name: "",
		Processors: []datadogV1.LogsProcessor{
			datadogV1.LogsProcessor{
				LogsGrokParser: &datadogV1.LogsGrokParser{
					Grok: datadogV1.LogsGrokParserRules{
						MatchRules: `rule_name_1 foo
rule_name_2 bar
`,
						SupportRules: datadog.PtrString(`rule_name_1 foo
rule_name_2 bar
`),
					},
					IsEnabled: datadog.PtrBool(false),
					Samples:   []string{},
					Source:    "message",
					Type:      datadogV1.LOGSGROKPARSERTYPE_GROK_PARSER,
				}},
		},
	}
	ctx := datadog.NewDefaultContext(context.Background())
	configuration := datadog.NewConfiguration()
	apiClient := datadog.NewAPIClient(configuration)
	api := datadogV1.NewLogsPipelinesApi(apiClient)
	resp, r, err := api.UpdateLogsPipeline(ctx, "pipeline_id", body)

	if err != nil {
		fmt.Fprintf(os.Stderr, "Error when calling `LogsPipelinesApi.UpdateLogsPipeline`: %v\n", err)
		fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r)
	}

	responseContent, _ := json.MarshalIndent(resp, "", "  ")
	fmt.Fprintf(os.Stdout, "Response from `LogsPipelinesApi.UpdateLogsPipeline`:\n%s\n", responseContent)
}

Instructions

First install the library and its dependencies and then save the example to main.go and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" go run "main.go"
// Update a pipeline returns "OK" response

import com.datadog.api.client.ApiClient;
import com.datadog.api.client.ApiException;
import com.datadog.api.client.v1.api.LogsPipelinesApi;
import com.datadog.api.client.v1.model.LogsFilter;
import com.datadog.api.client.v1.model.LogsGrokParser;
import com.datadog.api.client.v1.model.LogsGrokParserRules;
import com.datadog.api.client.v1.model.LogsGrokParserType;
import com.datadog.api.client.v1.model.LogsPipeline;
import com.datadog.api.client.v1.model.LogsProcessor;
import java.util.Collections;

public class Example {
  public static void main(String[] args) {
    ApiClient defaultClient = ApiClient.getDefaultApiClient();
    LogsPipelinesApi apiInstance = new LogsPipelinesApi(defaultClient);

    LogsPipeline body =
        new LogsPipeline()
            .filter(new LogsFilter().query("source:python"))
            .name("")
            .processors(
                Collections.singletonList(
                    new LogsProcessor(
                        new LogsGrokParser()
                            .grok(
                                new LogsGrokParserRules()
                                    .matchRules("""
rule_name_1 foo
rule_name_2 bar

""")
                                    .supportRules("""
rule_name_1 foo
rule_name_2 bar

"""))
                            .isEnabled(false)
                            .source("message")
                            .type(LogsGrokParserType.GROK_PARSER))));

    try {
      LogsPipeline result = apiInstance.updateLogsPipeline("pipeline_id", body);
      System.out.println(result);
    } catch (ApiException e) {
      System.err.println("Exception when calling LogsPipelinesApi#updateLogsPipeline");
      System.err.println("Status code: " + e.getCode());
      System.err.println("Reason: " + e.getResponseBody());
      System.err.println("Response headers: " + e.getResponseHeaders());
      e.printStackTrace();
    }
  }
}

Instructions

First install the library and its dependencies and then save the example to Example.java and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" java "Example.java"
// Update a pipeline returns "OK" response
use datadog_api_client::datadog;
use datadog_api_client::datadogV1::api_logs_pipelines::LogsPipelinesAPI;
use datadog_api_client::datadogV1::model::LogsFilter;
use datadog_api_client::datadogV1::model::LogsGrokParser;
use datadog_api_client::datadogV1::model::LogsGrokParserRules;
use datadog_api_client::datadogV1::model::LogsGrokParserType;
use datadog_api_client::datadogV1::model::LogsPipeline;
use datadog_api_client::datadogV1::model::LogsProcessor;

#[tokio::main]
async fn main() {
    let body = LogsPipeline::new("".to_string())
        .filter(LogsFilter::new().query("source:python".to_string()))
        .processors(vec![LogsProcessor::LogsGrokParser(Box::new(
            LogsGrokParser::new(
                LogsGrokParserRules::new(
                    r#"rule_name_1 foo
rule_name_2 bar
"#
                    .to_string(),
                )
                .support_rules(
                    r#"rule_name_1 foo
rule_name_2 bar
"#
                    .to_string(),
                ),
                "message".to_string(),
                LogsGrokParserType::GROK_PARSER,
            )
            .is_enabled(false)
            .samples(vec![]),
        ))]);
    let configuration = datadog::Configuration::new();
    let api = LogsPipelinesAPI::with_config(configuration);
    let resp = api
        .update_logs_pipeline("pipeline_id".to_string(), body)
        .await;
    if let Ok(value) = resp {
        println!("{:#?}", value);
    } else {
        println!("{:#?}", resp.unwrap_err());
    }
}

Instructions

First install the library and its dependencies and then save the example to src/main.rs and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" cargo run
/**
 * Update a pipeline returns "OK" response
 */

import { client, v1 } from "@datadog/datadog-api-client";

const configuration = client.createConfiguration();
const apiInstance = new v1.LogsPipelinesApi(configuration);

const params: v1.LogsPipelinesApiUpdateLogsPipelineRequest = {
  body: {
    filter: {
      query: "source:python",
    },
    name: "",
    processors: [
      {
        grok: {
          matchRules: `rule_name_1 foo
rule_name_2 bar
`,
          supportRules: `rule_name_1 foo
rule_name_2 bar
`,
        },
        isEnabled: false,
        samples: [],
        source: "message",
        type: "grok-parser",
      },
    ],
  },
  pipelineId: "pipeline_id",
};

apiInstance
  .updateLogsPipeline(params)
  .then((data: v1.LogsPipeline) => {
    console.log(
      "API called successfully. Returned data: " + JSON.stringify(data)
    );
  })
  .catch((error: any) => console.error(error));

Instructions

First install the library and its dependencies and then save the example to example.ts and run following commands:

    
DD_SITE="datadoghq.comus3.datadoghq.comus5.datadoghq.comdatadoghq.euap1.datadoghq.comddog-gov.com" DD_API_KEY="<API-KEY>" DD_APP_KEY="<APP-KEY>" tsc "example.ts"

PREVIEWING: rtrieu/product-analytics-ui-changes