1. Packages
  2. Mongodbatlas Provider
  3. API Docs
  4. StreamProcessor
MongoDB Atlas v3.30.0 published on Friday, Mar 21, 2025 by Pulumi

mongodbatlas.StreamProcessor

Explore with Pulumi AI

Example Usage

S

Coming soon!
Coming soon!
Coming soon!
Coming soon!
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.mongodbatlas.StreamInstance;
import com.pulumi.mongodbatlas.StreamInstanceArgs;
import com.pulumi.mongodbatlas.inputs.StreamInstanceDataProcessRegionArgs;
import com.pulumi.mongodbatlas.StreamConnection;
import com.pulumi.mongodbatlas.StreamConnectionArgs;
import com.pulumi.mongodbatlas.inputs.StreamConnectionDbRoleToExecuteArgs;
import com.pulumi.mongodbatlas.inputs.StreamConnectionAuthenticationArgs;
import com.pulumi.mongodbatlas.inputs.StreamConnectionSecurityArgs;
import com.pulumi.mongodbatlas.StreamProcessor;
import com.pulumi.mongodbatlas.StreamProcessorArgs;
import com.pulumi.mongodbatlas.inputs.StreamProcessorOptionsArgs;
import com.pulumi.mongodbatlas.inputs.StreamProcessorOptionsDlqArgs;
import com.pulumi.mongodbatlas.MongodbatlasFunctions;
import com.pulumi.mongodbatlas.inputs.GetStreamProcessorsArgs;
import com.pulumi.mongodbatlas.inputs.GetStreamProcessorArgs;
import static com.pulumi.codegen.internal.Serialization.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var example = new StreamInstance("example", StreamInstanceArgs.builder()
            .projectId(projectId)
            .instanceName("InstanceName")
            .dataProcessRegion(StreamInstanceDataProcessRegionArgs.builder()
                .region("VIRGINIA_USA")
                .cloud_provider("AWS")
                .build())
            .build());

        var example_sample = new StreamConnection("example-sample", StreamConnectionArgs.builder()
            .projectId(projectId)
            .instanceName(example.instanceName())
            .connectionName("sample_stream_solar")
            .type("Sample")
            .build());

        var example_cluster = new StreamConnection("example-cluster", StreamConnectionArgs.builder()
            .projectId(projectId)
            .instanceName(example.instanceName())
            .connectionName("ClusterConnection")
            .type("Cluster")
            .clusterName(clusterName)
            .dbRoleToExecute(StreamConnectionDbRoleToExecuteArgs.builder()
                .role("atlasAdmin")
                .type("BUILT_IN")
                .build())
            .build());

        var example_kafka = new StreamConnection("example-kafka", StreamConnectionArgs.builder()
            .projectId(projectId)
            .instanceName(example.instanceName())
            .connectionName("KafkaPlaintextConnection")
            .type("Kafka")
            .authentication(StreamConnectionAuthenticationArgs.builder()
                .mechanism("PLAIN")
                .username(kafkaUsername)
                .password(kafkaPassword)
                .build())
            .bootstrapServers("localhost:9092,localhost:9092")
            .config(Map.of("auto.offset.reset", "earliest"))
            .security(StreamConnectionSecurityArgs.builder()
                .protocol("PLAINTEXT")
                .build())
            .build());

        var stream_processor_sample_example = new StreamProcessor("stream-processor-sample-example", StreamProcessorArgs.builder()
            .projectId(projectId)
            .instanceName(example.instanceName())
            .processorName("sampleProcessorName")
            .pipeline(serializeJson(
                jsonArray(
                    jsonObject(
                        jsonProperty("$source", jsonObject(
                            jsonProperty("connectionName", mongodbatlasStreamConnection.example-sample().connectionName())
                        ))
                    ), 
                    jsonObject(
                        jsonProperty("$emit", jsonObject(
                            jsonProperty("connectionName", mongodbatlasStreamConnection.example-cluster().connectionName()),
                            jsonProperty("db", "sample"),
                            jsonProperty("coll", "solar"),
                            jsonProperty("timeseries", jsonObject(
                                jsonProperty("timeField", "_ts")
                            ))
                        ))
                    )
                )))
            .state("STARTED")
            .build());

        var stream_processor_cluster_to_kafka_example = new StreamProcessor("stream-processor-cluster-to-kafka-example", StreamProcessorArgs.builder()
            .projectId(projectId)
            .instanceName(example.instanceName())
            .processorName("clusterProcessorName")
            .pipeline(serializeJson(
                jsonArray(
                    jsonObject(
                        jsonProperty("$source", jsonObject(
                            jsonProperty("connectionName", mongodbatlasStreamConnection.example-cluster().connectionName())
                        ))
                    ), 
                    jsonObject(
                        jsonProperty("$emit", jsonObject(
                            jsonProperty("connectionName", mongodbatlasStreamConnection.example-kafka().connectionName()),
                            jsonProperty("topic", "topic_from_cluster")
                        ))
                    )
                )))
            .state("CREATED")
            .build());

        var stream_processor_kafka_to_cluster_example = new StreamProcessor("stream-processor-kafka-to-cluster-example", StreamProcessorArgs.builder()
            .projectId(projectId)
            .instanceName(example.instanceName())
            .processorName("kafkaProcessorName")
            .pipeline(serializeJson(
                jsonArray(
                    jsonObject(
                        jsonProperty("$source", jsonObject(
                            jsonProperty("connectionName", mongodbatlasStreamConnection.example-kafka().connectionName()),
                            jsonProperty("topic", "topic_source")
                        ))
                    ), 
                    jsonObject(
                        jsonProperty("$emit", jsonObject(
                            jsonProperty("connectionName", mongodbatlasStreamConnection.example-cluster().connectionName()),
                            jsonProperty("db", "kafka"),
                            jsonProperty("coll", "topic_source"),
                            jsonProperty("timeseries", jsonObject(
                                jsonProperty("timeField", "ts")
                            ))
                        ))
                    )
                )))
            .state("CREATED")
            .options(StreamProcessorOptionsArgs.builder()
                .dlq(StreamProcessorOptionsDlqArgs.builder()
                    .coll("exampleColumn")
                    .connectionName(mongodbatlasStreamConnection.example-cluster().connectionName())
                    .db("exampleDb")
                    .build())
                .build())
            .build());

        final var example-stream-processors = MongodbatlasFunctions.getStreamProcessors(GetStreamProcessorsArgs.builder()
            .projectId(projectId)
            .instanceName(example.instanceName())
            .build());

        final var example-stream-processor = MongodbatlasFunctions.getStreamProcessor(GetStreamProcessorArgs.builder()
            .projectId(projectId)
            .instanceName(example.instanceName())
            .processorName(stream_processor_sample_example.processorName())
            .build());

        ctx.export("streamProcessorsState", example_stream_processor.applyValue(example_stream_processor -> example_stream_processor.state()));
        ctx.export("streamProcessorsResults", example_stream_processors.applyValue(example_stream_processors -> example_stream_processors.results()));
    }
}
Copy
resources:
  example:
    type: mongodbatlas:StreamInstance
    properties:
      projectId: ${projectId}
      instanceName: InstanceName
      dataProcessRegion:
        region: VIRGINIA_USA
        cloud_provider: AWS
  example-sample:
    type: mongodbatlas:StreamConnection
    properties:
      projectId: ${projectId}
      instanceName: ${example.instanceName}
      connectionName: sample_stream_solar
      type: Sample
  example-cluster:
    type: mongodbatlas:StreamConnection
    properties:
      projectId: ${projectId}
      instanceName: ${example.instanceName}
      connectionName: ClusterConnection
      type: Cluster
      clusterName: ${clusterName}
      dbRoleToExecute:
        role: atlasAdmin
        type: BUILT_IN
  example-kafka:
    type: mongodbatlas:StreamConnection
    properties:
      projectId: ${projectId}
      instanceName: ${example.instanceName}
      connectionName: KafkaPlaintextConnection
      type: Kafka
      authentication:
        mechanism: PLAIN
        username: ${kafkaUsername}
        password: ${kafkaPassword}
      bootstrapServers: localhost:9092,localhost:9092
      config:
        auto.offset.reset: earliest
      security:
        protocol: PLAINTEXT
  stream-processor-sample-example:
    type: mongodbatlas:StreamProcessor
    properties:
      projectId: ${projectId}
      instanceName: ${example.instanceName}
      processorName: sampleProcessorName
      pipeline:
        fn::toJSON:
          - $source:
              connectionName: ${mongodbatlasStreamConnection"example-sample"[%!s(MISSING)].connectionName}
          - $emit:
              connectionName: ${mongodbatlasStreamConnection"example-cluster"[%!s(MISSING)].connectionName}
              db: sample
              coll: solar
              timeseries:
                timeField: _ts
      state: STARTED
  stream-processor-cluster-to-kafka-example:
    type: mongodbatlas:StreamProcessor
    properties:
      projectId: ${projectId}
      instanceName: ${example.instanceName}
      processorName: clusterProcessorName
      pipeline:
        fn::toJSON:
          - $source:
              connectionName: ${mongodbatlasStreamConnection"example-cluster"[%!s(MISSING)].connectionName}
          - $emit:
              connectionName: ${mongodbatlasStreamConnection"example-kafka"[%!s(MISSING)].connectionName}
              topic: topic_from_cluster
      state: CREATED
  stream-processor-kafka-to-cluster-example:
    type: mongodbatlas:StreamProcessor
    properties:
      projectId: ${projectId}
      instanceName: ${example.instanceName}
      processorName: kafkaProcessorName
      pipeline:
        fn::toJSON:
          - $source:
              connectionName: ${mongodbatlasStreamConnection"example-kafka"[%!s(MISSING)].connectionName}
              topic: topic_source
          - $emit:
              connectionName: ${mongodbatlasStreamConnection"example-cluster"[%!s(MISSING)].connectionName}
              db: kafka
              coll: topic_source
              timeseries:
                timeField: ts
      state: CREATED
      options:
        dlq:
          coll: exampleColumn
          connectionName: ${mongodbatlasStreamConnection"example-cluster"[%!s(MISSING)].connectionName}
          db: exampleDb
variables:
  example-stream-processors:
    fn::invoke:
      function: mongodbatlas:getStreamProcessors
      arguments:
        projectId: ${projectId}
        instanceName: ${example.instanceName}
  example-stream-processor:
    fn::invoke:
      function: mongodbatlas:getStreamProcessor
      arguments:
        projectId: ${projectId}
        instanceName: ${example.instanceName}
        processorName: ${["stream-processor-sample-example"].processorName}
outputs:
  # example making use of data sources
  streamProcessorsState: ${["example-stream-processor"].state}
  streamProcessorsResults: ${["example-stream-processors"].results}
Copy

Create StreamProcessor Resource

Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

Constructor syntax

new StreamProcessor(name: string, args: StreamProcessorArgs, opts?: CustomResourceOptions);
@overload
def StreamProcessor(resource_name: str,
                    args: StreamProcessorArgs,
                    opts: Optional[ResourceOptions] = None)

@overload
def StreamProcessor(resource_name: str,
                    opts: Optional[ResourceOptions] = None,
                    instance_name: Optional[str] = None,
                    pipeline: Optional[str] = None,
                    processor_name: Optional[str] = None,
                    project_id: Optional[str] = None,
                    options: Optional[StreamProcessorOptionsArgs] = None,
                    state: Optional[str] = None)
func NewStreamProcessor(ctx *Context, name string, args StreamProcessorArgs, opts ...ResourceOption) (*StreamProcessor, error)
public StreamProcessor(string name, StreamProcessorArgs args, CustomResourceOptions? opts = null)
public StreamProcessor(String name, StreamProcessorArgs args)
public StreamProcessor(String name, StreamProcessorArgs args, CustomResourceOptions options)
type: mongodbatlas:StreamProcessor
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

Parameters

name This property is required. string
The unique name of the resource.
args This property is required. StreamProcessorArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name This property is required. str
The unique name of the resource.
args This property is required. StreamProcessorArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name This property is required. string
The unique name of the resource.
args This property is required. StreamProcessorArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name This property is required. string
The unique name of the resource.
args This property is required. StreamProcessorArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name This property is required. String
The unique name of the resource.
args This property is required. StreamProcessorArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Constructor example

The following reference example uses placeholder values for all input properties.

var streamProcessorResource = new Mongodbatlas.StreamProcessor("streamProcessorResource", new()
{
    InstanceName = "string",
    Pipeline = "string",
    ProcessorName = "string",
    ProjectId = "string",
    Options = new Mongodbatlas.Inputs.StreamProcessorOptionsArgs
    {
        Dlq = new Mongodbatlas.Inputs.StreamProcessorOptionsDlqArgs
        {
            Coll = "string",
            ConnectionName = "string",
            Db = "string",
        },
    },
    State = "string",
});
Copy
example, err := mongodbatlas.NewStreamProcessor(ctx, "streamProcessorResource", &mongodbatlas.StreamProcessorArgs{
	InstanceName:  pulumi.String("string"),
	Pipeline:      pulumi.String("string"),
	ProcessorName: pulumi.String("string"),
	ProjectId:     pulumi.String("string"),
	Options: &mongodbatlas.StreamProcessorOptionsArgs{
		Dlq: &mongodbatlas.StreamProcessorOptionsDlqArgs{
			Coll:           pulumi.String("string"),
			ConnectionName: pulumi.String("string"),
			Db:             pulumi.String("string"),
		},
	},
	State: pulumi.String("string"),
})
Copy
var streamProcessorResource = new StreamProcessor("streamProcessorResource", StreamProcessorArgs.builder()
    .instanceName("string")
    .pipeline("string")
    .processorName("string")
    .projectId("string")
    .options(StreamProcessorOptionsArgs.builder()
        .dlq(StreamProcessorOptionsDlqArgs.builder()
            .coll("string")
            .connectionName("string")
            .db("string")
            .build())
        .build())
    .state("string")
    .build());
Copy
stream_processor_resource = mongodbatlas.StreamProcessor("streamProcessorResource",
    instance_name="string",
    pipeline="string",
    processor_name="string",
    project_id="string",
    options={
        "dlq": {
            "coll": "string",
            "connection_name": "string",
            "db": "string",
        },
    },
    state="string")
Copy
const streamProcessorResource = new mongodbatlas.StreamProcessor("streamProcessorResource", {
    instanceName: "string",
    pipeline: "string",
    processorName: "string",
    projectId: "string",
    options: {
        dlq: {
            coll: "string",
            connectionName: "string",
            db: "string",
        },
    },
    state: "string",
});
Copy
type: mongodbatlas:StreamProcessor
properties:
    instanceName: string
    options:
        dlq:
            coll: string
            connectionName: string
            db: string
    pipeline: string
    processorName: string
    projectId: string
    state: string
Copy

StreamProcessor Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

The StreamProcessor resource accepts the following input properties:

InstanceName This property is required. string
Human-readable label that identifies the stream instance.
Pipeline This property is required. string
Stream aggregation pipeline you want to apply to your streaming data. MongoDB Atlas Docs contain more information. Using jsonencode is recommended when setting this attribute. For more details see the Aggregation Pipelines Documentation
ProcessorName This property is required. string
Human-readable label that identifies the stream processor.
ProjectId This property is required. string
Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
Options StreamProcessorOptions
Optional configuration for the stream processor.
State string
The state of the stream processor. Commonly occurring states are 'CREATED', 'STARTED', 'STOPPED' and 'FAILED'. Used to start or stop the Stream Processor. Valid values are CREATED, STARTED or STOPPED. When a Stream Processor is created without specifying the state, it will default to CREATED state. NOTE When creating a stream processor, setting the state to STARTED can automatically start the stream processor.
InstanceName This property is required. string
Human-readable label that identifies the stream instance.
Pipeline This property is required. string
Stream aggregation pipeline you want to apply to your streaming data. MongoDB Atlas Docs contain more information. Using jsonencode is recommended when setting this attribute. For more details see the Aggregation Pipelines Documentation
ProcessorName This property is required. string
Human-readable label that identifies the stream processor.
ProjectId This property is required. string
Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
Options StreamProcessorOptionsArgs
Optional configuration for the stream processor.
State string
The state of the stream processor. Commonly occurring states are 'CREATED', 'STARTED', 'STOPPED' and 'FAILED'. Used to start or stop the Stream Processor. Valid values are CREATED, STARTED or STOPPED. When a Stream Processor is created without specifying the state, it will default to CREATED state. NOTE When creating a stream processor, setting the state to STARTED can automatically start the stream processor.
instanceName This property is required. String
Human-readable label that identifies the stream instance.
pipeline This property is required. String
Stream aggregation pipeline you want to apply to your streaming data. MongoDB Atlas Docs contain more information. Using jsonencode is recommended when setting this attribute. For more details see the Aggregation Pipelines Documentation
processorName This property is required. String
Human-readable label that identifies the stream processor.
projectId This property is required. String
Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
options StreamProcessorOptions
Optional configuration for the stream processor.
state String
The state of the stream processor. Commonly occurring states are 'CREATED', 'STARTED', 'STOPPED' and 'FAILED'. Used to start or stop the Stream Processor. Valid values are CREATED, STARTED or STOPPED. When a Stream Processor is created without specifying the state, it will default to CREATED state. NOTE When creating a stream processor, setting the state to STARTED can automatically start the stream processor.
instanceName This property is required. string
Human-readable label that identifies the stream instance.
pipeline This property is required. string
Stream aggregation pipeline you want to apply to your streaming data. MongoDB Atlas Docs contain more information. Using jsonencode is recommended when setting this attribute. For more details see the Aggregation Pipelines Documentation
processorName This property is required. string
Human-readable label that identifies the stream processor.
projectId This property is required. string
Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
options StreamProcessorOptions
Optional configuration for the stream processor.
state string
The state of the stream processor. Commonly occurring states are 'CREATED', 'STARTED', 'STOPPED' and 'FAILED'. Used to start or stop the Stream Processor. Valid values are CREATED, STARTED or STOPPED. When a Stream Processor is created without specifying the state, it will default to CREATED state. NOTE When creating a stream processor, setting the state to STARTED can automatically start the stream processor.
instance_name This property is required. str
Human-readable label that identifies the stream instance.
pipeline This property is required. str
Stream aggregation pipeline you want to apply to your streaming data. MongoDB Atlas Docs contain more information. Using jsonencode is recommended when setting this attribute. For more details see the Aggregation Pipelines Documentation
processor_name This property is required. str
Human-readable label that identifies the stream processor.
project_id This property is required. str
Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
options StreamProcessorOptionsArgs
Optional configuration for the stream processor.
state str
The state of the stream processor. Commonly occurring states are 'CREATED', 'STARTED', 'STOPPED' and 'FAILED'. Used to start or stop the Stream Processor. Valid values are CREATED, STARTED or STOPPED. When a Stream Processor is created without specifying the state, it will default to CREATED state. NOTE When creating a stream processor, setting the state to STARTED can automatically start the stream processor.
instanceName This property is required. String
Human-readable label that identifies the stream instance.
pipeline This property is required. String
Stream aggregation pipeline you want to apply to your streaming data. MongoDB Atlas Docs contain more information. Using jsonencode is recommended when setting this attribute. For more details see the Aggregation Pipelines Documentation
processorName This property is required. String
Human-readable label that identifies the stream processor.
projectId This property is required. String
Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
options Property Map
Optional configuration for the stream processor.
state String
The state of the stream processor. Commonly occurring states are 'CREATED', 'STARTED', 'STOPPED' and 'FAILED'. Used to start or stop the Stream Processor. Valid values are CREATED, STARTED or STOPPED. When a Stream Processor is created without specifying the state, it will default to CREATED state. NOTE When creating a stream processor, setting the state to STARTED can automatically start the stream processor.

Outputs

All input properties are implicitly available as output properties. Additionally, the StreamProcessor resource produces the following output properties:

Id string
The provider-assigned unique ID for this managed resource.
Stats string
The stats associated with the stream processor. Refer to the MongoDB Atlas Docs for more information.
Id string
The provider-assigned unique ID for this managed resource.
Stats string
The stats associated with the stream processor. Refer to the MongoDB Atlas Docs for more information.
id String
The provider-assigned unique ID for this managed resource.
stats String
The stats associated with the stream processor. Refer to the MongoDB Atlas Docs for more information.
id string
The provider-assigned unique ID for this managed resource.
stats string
The stats associated with the stream processor. Refer to the MongoDB Atlas Docs for more information.
id str
The provider-assigned unique ID for this managed resource.
stats str
The stats associated with the stream processor. Refer to the MongoDB Atlas Docs for more information.
id String
The provider-assigned unique ID for this managed resource.
stats String
The stats associated with the stream processor. Refer to the MongoDB Atlas Docs for more information.

Look up Existing StreamProcessor Resource

Get an existing StreamProcessor resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: StreamProcessorState, opts?: CustomResourceOptions): StreamProcessor
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        instance_name: Optional[str] = None,
        options: Optional[StreamProcessorOptionsArgs] = None,
        pipeline: Optional[str] = None,
        processor_name: Optional[str] = None,
        project_id: Optional[str] = None,
        state: Optional[str] = None,
        stats: Optional[str] = None) -> StreamProcessor
func GetStreamProcessor(ctx *Context, name string, id IDInput, state *StreamProcessorState, opts ...ResourceOption) (*StreamProcessor, error)
public static StreamProcessor Get(string name, Input<string> id, StreamProcessorState? state, CustomResourceOptions? opts = null)
public static StreamProcessor get(String name, Output<String> id, StreamProcessorState state, CustomResourceOptions options)
resources:  _:    type: mongodbatlas:StreamProcessor    get:      id: ${id}
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
InstanceName string
Human-readable label that identifies the stream instance.
Options StreamProcessorOptions
Optional configuration for the stream processor.
Pipeline string
Stream aggregation pipeline you want to apply to your streaming data. MongoDB Atlas Docs contain more information. Using jsonencode is recommended when setting this attribute. For more details see the Aggregation Pipelines Documentation
ProcessorName string
Human-readable label that identifies the stream processor.
ProjectId string
Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
State string
The state of the stream processor. Commonly occurring states are 'CREATED', 'STARTED', 'STOPPED' and 'FAILED'. Used to start or stop the Stream Processor. Valid values are CREATED, STARTED or STOPPED. When a Stream Processor is created without specifying the state, it will default to CREATED state. NOTE When creating a stream processor, setting the state to STARTED can automatically start the stream processor.
Stats string
The stats associated with the stream processor. Refer to the MongoDB Atlas Docs for more information.
InstanceName string
Human-readable label that identifies the stream instance.
Options StreamProcessorOptionsArgs
Optional configuration for the stream processor.
Pipeline string
Stream aggregation pipeline you want to apply to your streaming data. MongoDB Atlas Docs contain more information. Using jsonencode is recommended when setting this attribute. For more details see the Aggregation Pipelines Documentation
ProcessorName string
Human-readable label that identifies the stream processor.
ProjectId string
Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
State string
The state of the stream processor. Commonly occurring states are 'CREATED', 'STARTED', 'STOPPED' and 'FAILED'. Used to start or stop the Stream Processor. Valid values are CREATED, STARTED or STOPPED. When a Stream Processor is created without specifying the state, it will default to CREATED state. NOTE When creating a stream processor, setting the state to STARTED can automatically start the stream processor.
Stats string
The stats associated with the stream processor. Refer to the MongoDB Atlas Docs for more information.
instanceName String
Human-readable label that identifies the stream instance.
options StreamProcessorOptions
Optional configuration for the stream processor.
pipeline String
Stream aggregation pipeline you want to apply to your streaming data. MongoDB Atlas Docs contain more information. Using jsonencode is recommended when setting this attribute. For more details see the Aggregation Pipelines Documentation
processorName String
Human-readable label that identifies the stream processor.
projectId String
Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
state String
The state of the stream processor. Commonly occurring states are 'CREATED', 'STARTED', 'STOPPED' and 'FAILED'. Used to start or stop the Stream Processor. Valid values are CREATED, STARTED or STOPPED. When a Stream Processor is created without specifying the state, it will default to CREATED state. NOTE When creating a stream processor, setting the state to STARTED can automatically start the stream processor.
stats String
The stats associated with the stream processor. Refer to the MongoDB Atlas Docs for more information.
instanceName string
Human-readable label that identifies the stream instance.
options StreamProcessorOptions
Optional configuration for the stream processor.
pipeline string
Stream aggregation pipeline you want to apply to your streaming data. MongoDB Atlas Docs contain more information. Using jsonencode is recommended when setting this attribute. For more details see the Aggregation Pipelines Documentation
processorName string
Human-readable label that identifies the stream processor.
projectId string
Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
state string
The state of the stream processor. Commonly occurring states are 'CREATED', 'STARTED', 'STOPPED' and 'FAILED'. Used to start or stop the Stream Processor. Valid values are CREATED, STARTED or STOPPED. When a Stream Processor is created without specifying the state, it will default to CREATED state. NOTE When creating a stream processor, setting the state to STARTED can automatically start the stream processor.
stats string
The stats associated with the stream processor. Refer to the MongoDB Atlas Docs for more information.
instance_name str
Human-readable label that identifies the stream instance.
options StreamProcessorOptionsArgs
Optional configuration for the stream processor.
pipeline str
Stream aggregation pipeline you want to apply to your streaming data. MongoDB Atlas Docs contain more information. Using jsonencode is recommended when setting this attribute. For more details see the Aggregation Pipelines Documentation
processor_name str
Human-readable label that identifies the stream processor.
project_id str
Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
state str
The state of the stream processor. Commonly occurring states are 'CREATED', 'STARTED', 'STOPPED' and 'FAILED'. Used to start or stop the Stream Processor. Valid values are CREATED, STARTED or STOPPED. When a Stream Processor is created without specifying the state, it will default to CREATED state. NOTE When creating a stream processor, setting the state to STARTED can automatically start the stream processor.
stats str
The stats associated with the stream processor. Refer to the MongoDB Atlas Docs for more information.
instanceName String
Human-readable label that identifies the stream instance.
options Property Map
Optional configuration for the stream processor.
pipeline String
Stream aggregation pipeline you want to apply to your streaming data. MongoDB Atlas Docs contain more information. Using jsonencode is recommended when setting this attribute. For more details see the Aggregation Pipelines Documentation
processorName String
Human-readable label that identifies the stream processor.
projectId String
Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
state String
The state of the stream processor. Commonly occurring states are 'CREATED', 'STARTED', 'STOPPED' and 'FAILED'. Used to start or stop the Stream Processor. Valid values are CREATED, STARTED or STOPPED. When a Stream Processor is created without specifying the state, it will default to CREATED state. NOTE When creating a stream processor, setting the state to STARTED can automatically start the stream processor.
stats String
The stats associated with the stream processor. Refer to the MongoDB Atlas Docs for more information.

Supporting Types

StreamProcessorOptions
, StreamProcessorOptionsArgs

Dlq This property is required. StreamProcessorOptionsDlq
Dead letter queue for the stream processor. Refer to the MongoDB Atlas Docs for more information.
Dlq This property is required. StreamProcessorOptionsDlq
Dead letter queue for the stream processor. Refer to the MongoDB Atlas Docs for more information.
dlq This property is required. StreamProcessorOptionsDlq
Dead letter queue for the stream processor. Refer to the MongoDB Atlas Docs for more information.
dlq This property is required. StreamProcessorOptionsDlq
Dead letter queue for the stream processor. Refer to the MongoDB Atlas Docs for more information.
dlq This property is required. StreamProcessorOptionsDlq
Dead letter queue for the stream processor. Refer to the MongoDB Atlas Docs for more information.
dlq This property is required. Property Map
Dead letter queue for the stream processor. Refer to the MongoDB Atlas Docs for more information.

StreamProcessorOptionsDlq
, StreamProcessorOptionsDlqArgs

Coll This property is required. string
Name of the collection to use for the DLQ.
ConnectionName This property is required. string
Name of the connection to write DLQ messages to. Must be an Atlas connection.
Db This property is required. string
Name of the database to use for the DLQ.
Coll This property is required. string
Name of the collection to use for the DLQ.
ConnectionName This property is required. string
Name of the connection to write DLQ messages to. Must be an Atlas connection.
Db This property is required. string
Name of the database to use for the DLQ.
coll This property is required. String
Name of the collection to use for the DLQ.
connectionName This property is required. String
Name of the connection to write DLQ messages to. Must be an Atlas connection.
db This property is required. String
Name of the database to use for the DLQ.
coll This property is required. string
Name of the collection to use for the DLQ.
connectionName This property is required. string
Name of the connection to write DLQ messages to. Must be an Atlas connection.
db This property is required. string
Name of the database to use for the DLQ.
coll This property is required. str
Name of the collection to use for the DLQ.
connection_name This property is required. str
Name of the connection to write DLQ messages to. Must be an Atlas connection.
db This property is required. str
Name of the database to use for the DLQ.
coll This property is required. String
Name of the collection to use for the DLQ.
connectionName This property is required. String
Name of the connection to write DLQ messages to. Must be an Atlas connection.
db This property is required. String
Name of the database to use for the DLQ.

Import

Stream Processor resource can be imported using the Project ID, Stream Instance name and Stream Processor name, in the format INSTANCE_NAME-PROJECT_ID-PROCESSOR_NAME, e.g.

$ terraform import mongodbatlas_stream_processor.test yourInstanceName-6117ac2fe2a3d04ed27a987v-yourProcessorName

For more information see: MongoDB Atlas API - Stream Processor Documentation.

To learn more about importing existing cloud resources, see Importing resources.

Package Details

Repository
MongoDB Atlas pulumi/pulumi-mongodbatlas
License
Apache-2.0
Notes
This Pulumi package is based on the mongodbatlas Terraform Provider.